Compare commits
45 Commits
5fb27b1fbb
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1bbdcb70ef | ||
|
|
796b6b7289 | ||
|
|
347ea91bea | ||
|
|
a943358f67 | ||
|
|
fe7d210988 | ||
|
|
8ab65a3401 | ||
|
|
16bd33e8c0 | ||
|
|
75469af514 | ||
|
|
fa7c44d88c | ||
|
|
d11ea3030c | ||
|
|
a57bff0646 | ||
|
|
e46a2fe590 | ||
|
|
4ab04a0a1c | ||
|
|
9c909df6b2 | ||
|
|
7e5ffe35d3 | ||
|
|
da576cb276 | ||
|
|
36b361a50a | ||
|
|
44431667e8 | ||
|
|
60075cd400 | ||
|
|
ddab186315 | ||
|
|
d6d1686f8e | ||
|
|
5c44ee91fb | ||
|
|
6aff96d32f | ||
|
|
06889ec85a | ||
|
|
08bda08934 | ||
|
|
32134ea933 | ||
|
|
16cc58b17f | ||
|
|
a10d870863 | ||
|
|
59088af2ab | ||
|
|
ace9c8bf17 | ||
|
|
cab8c540da | ||
|
|
d94bcbfbe7 | ||
|
|
62fbd7275e | ||
|
|
06852e90a6 | ||
|
|
4b0535f852 | ||
|
|
8bd68e02bd | ||
|
|
6aaf931c9b | ||
|
|
af167e2086 | ||
|
|
e8d6c5b15f | ||
|
|
bf977eca1a | ||
|
|
4d41d74ea7 | ||
|
|
3a4fc96558 | ||
|
|
ac5602e565 | ||
|
|
d3f8020cf8 | ||
|
|
9107a78b57 |
File diff suppressed because one or more lines are too long
@@ -1 +1 @@
|
|||||||
bd-8con
|
bd-1lj5
|
||||||
|
|||||||
5
.cargo/config.toml
Normal file
5
.cargo/config.toml
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
# Force all builds (including worktrees) to share one target directory.
|
||||||
|
# This prevents each Claude Code agent worktree from creating its own
|
||||||
|
# ~3GB target/ directory, which was filling the disk.
|
||||||
|
[build]
|
||||||
|
target-dir = "/Users/tayloreernisse/projects/gitlore/target"
|
||||||
952
Cargo.lock
generated
952
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "lore"
|
name = "lore"
|
||||||
version = "0.9.2"
|
version = "0.9.5"
|
||||||
edition = "2024"
|
edition = "2024"
|
||||||
description = "Gitlore - Local GitLab data management with semantic search"
|
description = "Gitlore - Local GitLab data management with semantic search"
|
||||||
authors = ["Taylor Eernisse"]
|
authors = ["Taylor Eernisse"]
|
||||||
@@ -29,12 +29,11 @@ lipgloss = { package = "charmed-lipgloss", version = "0.2", default-features = f
|
|||||||
open = "5"
|
open = "5"
|
||||||
|
|
||||||
# HTTP
|
# HTTP
|
||||||
reqwest = { version = "0.12", features = ["json"] }
|
asupersync = { version = "0.2", features = ["tls", "tls-native-roots"] }
|
||||||
tokio = { version = "1", features = ["rt-multi-thread", "macros", "time", "signal"] }
|
|
||||||
|
|
||||||
# Async streaming for pagination
|
# Async streaming for pagination
|
||||||
async-stream = "0.3"
|
async-stream = "0.3"
|
||||||
futures = { version = "0.3", default-features = false, features = ["alloc"] }
|
futures = { version = "0.3", default-features = false, features = ["alloc", "async-await"] }
|
||||||
|
|
||||||
# Utilities
|
# Utilities
|
||||||
thiserror = "2"
|
thiserror = "2"
|
||||||
@@ -60,6 +59,7 @@ tracing-appender = "0.2"
|
|||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
tempfile = "3"
|
tempfile = "3"
|
||||||
|
tokio = { version = "1", features = ["rt", "rt-multi-thread", "macros"] }
|
||||||
wiremock = "0.6"
|
wiremock = "0.6"
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -431,11 +431,12 @@ lore me --reset-cursor # Reset since-last-check cursor
|
|||||||
|
|
||||||
The dashboard detects the current user from GitLab authentication and shows:
|
The dashboard detects the current user from GitLab authentication and shows:
|
||||||
- **Issues section**: Open issues assigned to you
|
- **Issues section**: Open issues assigned to you
|
||||||
- **MRs section**: MRs you authored + MRs where you're a reviewer
|
- **MRs section**: Open MRs you authored + open MRs where you're a reviewer
|
||||||
- **Activity section**: Recent events (state changes, comments, etc.) on your items
|
- **Activity section**: Recent events (state changes, comments, labels, milestones, assignments) on your items regardless of state — including closed issues and merged/closed MRs
|
||||||
- **Mentions section**: Items where you're @mentioned but not assigned/authoring/reviewing
|
- **Mentions section**: Items where you're @mentioned but not assigned/authoring/reviewing
|
||||||
|
- **Since last check**: Cursor-based inbox of actionable events from others since your last check, covering items in any state
|
||||||
|
|
||||||
The `--since` flag affects only the activity section. Other sections show current state regardless of time window.
|
The `--since` flag affects only the activity section. The issues and MRs sections show open items only. The since-last-check inbox uses a persistent cursor (reset with `--reset-cursor`).
|
||||||
|
|
||||||
#### Field Selection (Robot Mode)
|
#### Field Selection (Robot Mode)
|
||||||
|
|
||||||
|
|||||||
44
agents/ceo/memory/2026-03-11.md
Normal file
44
agents/ceo/memory/2026-03-11.md
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
# 2026-03-11 -- CEO Daily Notes
|
||||||
|
|
||||||
|
## Timeline
|
||||||
|
|
||||||
|
- **10:32** Heartbeat timer wake. No PAPERCLIP_TASK_ID, no mention context.
|
||||||
|
- **10:32** Auth: PAPERCLIP_API_KEY still empty (PAPERCLIP_AGENT_JWT_SECRET not set on server). Board-level fallback works.
|
||||||
|
- **10:32** Inbox: 0 assignments (todo/in_progress/blocked). Dashboard: 0 open, 0 in_progress, 0 blocked, 1 done.
|
||||||
|
- **10:32** Clean exit -- nothing to work on.
|
||||||
|
- **10:57** Wake: GIT-2 assigned (issue_assigned). Evaluated FE agent: zero commits, generic instructions.
|
||||||
|
- **11:01** Wake: GIT-2 reopened. Board chose Option B (rewrite instructions).
|
||||||
|
- **11:03** Rewrote FE AGENTS.md (25 -> 200+ lines), created HEARTBEAT.md, SOUL.md, TOOLS.md, memory dir.
|
||||||
|
- **11:04** GIT-2 closed. FE agent ready for calibration task.
|
||||||
|
- **11:07** Wake: GIT-2 reopened (issue_reopened_via_comment). Board asked to evaluate instructions against best practices.
|
||||||
|
- **11:08** Self-evaluation: AGENTS.md was too verbose (230 lines), duplicated CLAUDE.md, no progressive disclosure. Rewrote to 50-line core + 120-line DOMAIN.md reference. 3-layer progressive disclosure model.
|
||||||
|
- **11:13** Wake: GIT-2 reopened. Board asked about testing/validating context loading. Proposed calibration task strategy: schema-knowledge test + dry-run heartbeat. Awaiting board go-ahead.
|
||||||
|
- **11:28** Wake: Board approved calibration. Created GIT-3 (calibration: project lookup test) assigned to FE. Subtask of GIT-2.
|
||||||
|
- **11:33** Wake: GIT-2 reopened. Board asked to evaluate FE calibration output. Reviewed code + session logs. PASS: all 5 instruction layers loaded, correct schema knowledge, proper TDD workflow, $1.12 calibration cost. FE ready for production work.
|
||||||
|
- **12:34** Heartbeat timer wake. No assignments, no mentions. Dashboard: 1 open (GIT-4), 0 in_progress, 0 blocked, 3 done. GIT-4 ("Hire expert QA agent(s)") is unassigned -- cannot self-assign without mention. Clean exit.
|
||||||
|
- **13:36** Heartbeat timer wake. No assignments, no mentions. Dashboard: 1 open, 0 in_progress, 0 blocked, 3 done. Spend: $19.22. Clean exit.
|
||||||
|
- **14:37** Heartbeat timer wake. No assignments, no mentions. Dashboard: 1 open (GIT-4), 0 in_progress, 0 blocked, 3 done. Spend: $20.46. Clean exit.
|
||||||
|
- **15:39** Heartbeat timer wake. No assignments, no mentions. Dashboard: 1 open (GIT-4), 0 in_progress, 0 blocked, 3 done. Spend: $22.61. Clean exit.
|
||||||
|
- **16:40** Heartbeat timer wake. No assignments, no mentions. Dashboard: 1 open (GIT-4), 0 in_progress, 0 blocked, 3 done. Spend: $23.99. Clean exit.
|
||||||
|
- **18:21** Heartbeat timer wake. No assignments, no mentions. Dashboard: 1 open (GIT-4), 0 in_progress, 0 blocked, 3 done. Spend: $25.30. Clean exit.
|
||||||
|
- **21:40** Heartbeat timer wake. No assignments, no mentions. Dashboard: 1 open (GIT-4), 0 in_progress, 0 blocked, 3 done. Spend: $26.41. Clean exit.
|
||||||
|
|
||||||
|
## Observations
|
||||||
|
|
||||||
|
- JWT auth now working (/agents/me returns 200).
|
||||||
|
- Company: 1 active agent (CEO), 3 done tasks, 1 open (GIT-4 unassigned).
|
||||||
|
- Monthly spend: $17.74, no budget cap set.
|
||||||
|
- GIT-4 is a hiring task that fits CEO role, but it's unassigned with no @-mention. Board needs to assign it to me or mention me on it.
|
||||||
|
|
||||||
|
## Today's Plan
|
||||||
|
|
||||||
|
1. ~~Await board assignments or issue creation.~~ GIT-2 arrived.
|
||||||
|
2. ~~Evaluate Founding Engineer credentials (GIT-2).~~ Done.
|
||||||
|
3. ~~Rewrite FE instructions (Option B per board).~~ Done.
|
||||||
|
4. Await calibration task assignment for FE, or next board task.
|
||||||
|
|
||||||
|
## GIT-2: Founding Engineer Evaluation (DONE)
|
||||||
|
|
||||||
|
- **Finding:** Zero commits, $0.32 spend, 25-line boilerplate AGENTS.md. Not production-ready.
|
||||||
|
- **Recommendation:** Replace or rewrite instructions. Board decides.
|
||||||
|
- **Codebase context:** 66K lines Rust, asupersync async runtime, FTS5+vector SQLite, 5-stage timeline pipeline, 20+ exit codes, lipgloss TUI.
|
||||||
33
agents/ceo/memory/2026-03-12.md
Normal file
33
agents/ceo/memory/2026-03-12.md
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
# 2026-03-12 -- CEO Daily Notes
|
||||||
|
|
||||||
|
## Timeline
|
||||||
|
|
||||||
|
- **02:59** Heartbeat timer wake. No PAPERCLIP_TASK_ID, no mention context.
|
||||||
|
- **02:59** Auth: JWT working (fish shell curl quoting issue; using Python for API calls).
|
||||||
|
- **02:59** Inbox: 0 assignments (todo/in_progress/blocked). Dashboard: 1 open, 0 in_progress, 0 blocked, 3 done.
|
||||||
|
- **02:59** Spend: $27.50. Clean exit -- nothing to work on.
|
||||||
|
- **08:41** Heartbeat: assignment wake for GIT-6 (Create Plan Reviewer agent).
|
||||||
|
- **08:42** Checked out GIT-6. Reviewed existing agent configs and adapter docs.
|
||||||
|
- **08:44** Created `agents/plan-reviewer/` with AGENTS.md, HEARTBEAT.md, SOUL.md.
|
||||||
|
- **08:45** Submitted hire request: PlanReviewer (codex_local / chatgpt-5.4, role=qa, reports to CEO).
|
||||||
|
- **08:46** Approval 75c1bef4 pending. GIT-6 set to blocked awaiting board approval.
|
||||||
|
- **09:02** Heartbeat: approval 75c1bef4 approved. PlanReviewer active (idle). Set instructions path. GIT-6 closed.
|
||||||
|
- **10:03** Heartbeat timer wake. 0 assignments. Spend: $24.39. Clean exit.
|
||||||
|
- **11:05** Heartbeat timer wake. 0 assignments. Spend: $25.04. Clean exit.
|
||||||
|
- **12:06** Heartbeat timer wake. 0 assignments. Dashboard: 2 open, 0 in_progress, 4 done. 2 active agents. Spend: $25.80. Clean exit.
|
||||||
|
- **13:08** Heartbeat timer wake. 0 assignments. Dashboard: 2 open, 0 in_progress, 4 done. 2 active agents. Spend: $50.89. Clean exit.
|
||||||
|
- **14:15** Heartbeat timer wake. 0 assignments. Dashboard: 2 open, 0 in_progress, 4 done. 2 active agents. Spend: $52.30. Clean exit.
|
||||||
|
- **15:17** Heartbeat timer wake. 0 assignments. Dashboard: 2 open, 0 in_progress, 4 done. 2 active agents. Spend: $54.36. Clean exit.
|
||||||
|
|
||||||
|
## Observations
|
||||||
|
|
||||||
|
- GIT-4 (hire QA agents) still open and unassigned. Board needs to assign it or mention me.
|
||||||
|
- Fish shell variable expansion breaks curl Authorization header. Python urllib works fine. Consider noting this in TOOLS.md.
|
||||||
|
- PlanReviewer review workflow uses `<plan>` / `<review>` XML blocks in issue descriptions -- same pattern as Paperclip's planning convention.
|
||||||
|
|
||||||
|
## Today's Plan
|
||||||
|
|
||||||
|
1. ~~Await board assignments or mentions.~~
|
||||||
|
2. ~~GIT-6: Agent files created, hire submitted. Blocked on board approval.~~
|
||||||
|
3. ~~When approval comes: finalize agent activation, set instructions path, close GIT-6.~~
|
||||||
|
4. ~~Await next board assignments or mentions.~~ (continuing)
|
||||||
@@ -1,24 +1,53 @@
|
|||||||
You are the Founding Engineer.
|
You are the Founding Engineer.
|
||||||
|
|
||||||
Your home directory is $AGENT_HOME. Everything personal to you -- life, memory, knowledge -- lives there.
|
Your home directory is $AGENT_HOME. Everything personal to you -- life, memory, knowledge -- lives there. Other agents may have their own folders and you may update them when necessary.
|
||||||
|
|
||||||
Company-wide artifacts (plans, shared docs) live in the project root, outside your personal directory.
|
Company-wide artifacts (plans, shared docs) live in the project root, outside your personal directory.
|
||||||
|
|
||||||
## Project Context
|
## Memory and Planning
|
||||||
|
|
||||||
This is a Rust CLI tool called `lore` for local GitLab data management with SQLite. The codebase uses Cargo, pedantic clippy lints, and forbids unsafe code. See the project CLAUDE.md for full toolchain and workflow details.
|
You MUST use the `para-memory-files` skill for all memory operations: storing facts, writing daily notes, creating entities, running weekly synthesis, recalling past context, and managing plans. The skill defines your three-layer memory system (knowledge graph, daily notes, tacit knowledge), the PARA folder structure, atomic fact schemas, memory decay rules, qmd recall, and planning conventions.
|
||||||
|
|
||||||
## Your Role
|
Invoke it whenever you need to remember, retrieve, or organize anything.
|
||||||
|
|
||||||
You are the primary individual contributor. You write code, fix bugs, add features, and ship. You report to the CEO.
|
|
||||||
|
|
||||||
## Safety Considerations
|
## Safety Considerations
|
||||||
|
|
||||||
- Never exfiltrate secrets or private data.
|
- Never exfiltrate secrets or private data.
|
||||||
- Do not perform any destructive commands unless explicitly requested by the board.
|
- Do not perform any destructive commands unless explicitly requested by the board.
|
||||||
- Always run `cargo check`, `cargo clippy`, and `cargo fmt --check` after code changes.
|
- NEVER run `lore` CLI to fetch output -- the GitLab data is sensitive. Read source code instead.
|
||||||
|
|
||||||
## References
|
## References
|
||||||
|
|
||||||
- `$AGENT_HOME/HEARTBEAT.md` -- execution checklist. Run every heartbeat.
|
Read these before every heartbeat:
|
||||||
- Project `CLAUDE.md` -- toolchain, workflow, and project conventions.
|
|
||||||
|
- `$AGENT_HOME/HEARTBEAT.md` -- execution checklist
|
||||||
|
- `$AGENT_HOME/SOUL.md` -- persona and engineering posture
|
||||||
|
- Project `CLAUDE.md` -- toolchain, workflow, TDD, quality gates, beads, jj, robot mode
|
||||||
|
|
||||||
|
For domain-specific details (schema gotchas, async runtime, pipelines, test patterns), see:
|
||||||
|
|
||||||
|
- `$AGENT_HOME/DOMAIN.md` -- project architecture and technical reference
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Your Role
|
||||||
|
|
||||||
|
Primary IC on gitlore. You write code, fix bugs, add features, and ship. You report to the CEO.
|
||||||
|
|
||||||
|
Domain: **Rust CLI** -- 66K-line SQLite-backed GitLab data tool. Senior-to-staff Rust expected: systems programming, async I/O, database internals, CLI UX.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## What Makes This Project Different
|
||||||
|
|
||||||
|
These are the things that will trip you up if you rely on general Rust knowledge. Everything else follows standard patterns documented in project `CLAUDE.md`.
|
||||||
|
|
||||||
|
**Async runtime is NOT tokio.** Production code uses `asupersync` 0.2. tokio is dev-only (wiremock tests). Entry: `RuntimeBuilder::new().build()?.block_on(async { ... })`.
|
||||||
|
|
||||||
|
**Robot mode on every command.** `--robot`/`-J` -> `{"ok":true,"data":{...},"meta":{"elapsed_ms":N}}`. Errors to stderr. New commands MUST support this from day one.
|
||||||
|
|
||||||
|
**SQLite schema has sharp edges.** `projects` uses `gitlab_project_id` (not `gitlab_id`). `LIMIT` without `ORDER BY` is a bug. Resource event tables have CHECK constraints. See `$AGENT_HOME/DOMAIN.md` for the full list.
|
||||||
|
|
||||||
|
**UTF-8 boundary safety.** The embedding pipeline slices strings by byte offset. ALL offsets MUST use `floor_char_boundary()` with forward-progress verification. Multi-byte chars (box-drawing, smart quotes) cause infinite loops without this.
|
||||||
|
|
||||||
|
**Search imports are private.** Use `crate::search::{FtsQueryMode, to_fts_query}`, not `crate::search::fts::{...}`.
|
||||||
|
|||||||
113
agents/founding-engineer/DOMAIN.md
Normal file
113
agents/founding-engineer/DOMAIN.md
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
# DOMAIN.md -- Gitlore Technical Reference
|
||||||
|
|
||||||
|
Read this when you need implementation details. AGENTS.md has the summary; this has the depth.
|
||||||
|
|
||||||
|
## Architecture Map
|
||||||
|
|
||||||
|
```
|
||||||
|
src/
|
||||||
|
main.rs # Entry: RuntimeBuilder -> block_on(async main)
|
||||||
|
http.rs # HTTP client wrapping asupersync::http::h1::HttpClient
|
||||||
|
lib.rs # Crate root
|
||||||
|
test_support.rs # Shared test helpers
|
||||||
|
cli/
|
||||||
|
mod.rs # Clap app (derive), global flags, subcommand dispatch
|
||||||
|
args.rs # Shared argument types
|
||||||
|
robot.rs # Robot mode JSON envelope: {ok, data, meta}
|
||||||
|
render.rs # Human output (lipgloss/console)
|
||||||
|
progress.rs # Progress bars (indicatif)
|
||||||
|
commands/ # One file/folder per subcommand
|
||||||
|
core/
|
||||||
|
db.rs # SQLite connection, MIGRATIONS array, LATEST_SCHEMA_VERSION
|
||||||
|
error.rs # LoreError (thiserror), ErrorCode, exit codes 0-21
|
||||||
|
config.rs # Config structs (serde)
|
||||||
|
shutdown.rs # Cooperative cancellation (ctrl_c + RuntimeHandle::spawn)
|
||||||
|
timeline.rs # Timeline types (5-stage pipeline)
|
||||||
|
timeline_seed.rs # SEED stage
|
||||||
|
timeline_expand.rs # EXPAND stage
|
||||||
|
timeline_collect.rs # COLLECT stage
|
||||||
|
trace.rs # File -> MR -> issue -> discussion trace
|
||||||
|
file_history.rs # File-level MR history
|
||||||
|
path_resolver.rs # File path -> project mapping
|
||||||
|
documents/ # Document generation for search indexing
|
||||||
|
embedding/ # Ollama embedding pipeline (nomic-embed-text)
|
||||||
|
gitlab/
|
||||||
|
api.rs # REST API client
|
||||||
|
graphql.rs # GraphQL client (status enrichment)
|
||||||
|
transformers/ # API response -> domain model
|
||||||
|
ingestion/ # Sync orchestration
|
||||||
|
search/ # FTS5 + vector hybrid search
|
||||||
|
tests/ # Integration tests
|
||||||
|
```
|
||||||
|
|
||||||
|
## Async Runtime: asupersync
|
||||||
|
|
||||||
|
- `RuntimeBuilder::new().build()?.block_on(async { ... })` -- no proc macros
|
||||||
|
- HTTP: `src/http.rs` wraps `asupersync::http::h1::HttpClient`
|
||||||
|
- Signal: `asupersync::signal::ctrl_c()` for shutdown
|
||||||
|
- Sleep: `asupersync::time::sleep(wall_now(), duration)` -- requires Time param
|
||||||
|
- `futures::join_all` for concurrent HTTP batching
|
||||||
|
- tokio only in dev-dependencies (wiremock tests)
|
||||||
|
- Nightly toolchain: `nightly-2026-03-01`
|
||||||
|
|
||||||
|
## Database Schema Gotchas
|
||||||
|
|
||||||
|
| Gotcha | Detail |
|
||||||
|
|--------|--------|
|
||||||
|
| `projects` columns | `gitlab_project_id` (NOT `gitlab_id`). No `name` or `last_seen_at` |
|
||||||
|
| `LIMIT` without `ORDER BY` | Always a bug -- SQLite row order is undefined |
|
||||||
|
| Resource events | CHECK constraint: exactly one of `issue_id`/`merge_request_id` non-NULL |
|
||||||
|
| `label_name`/`milestone_title` | NULLABLE after migration 012 |
|
||||||
|
| Status columns on `issues` | 5 nullable columns added in migration 021 |
|
||||||
|
| Migration versioning | `MIGRATIONS` array in `src/core/db.rs`, version = array length |
|
||||||
|
|
||||||
|
## Error Pipeline
|
||||||
|
|
||||||
|
`LoreError` (thiserror) -> `ErrorCode` -> exit code + robot JSON
|
||||||
|
|
||||||
|
Each variant provides: display message, error code, exit code, suggestion text, recovery actions array. Robot errors go to stderr. Clap parsing errors -> exit 2.
|
||||||
|
|
||||||
|
## Embedding Pipeline
|
||||||
|
|
||||||
|
- Model: `nomic-embed-text`, context_length ~1500 bytes
|
||||||
|
- CHUNK_MAX_BYTES=1500, BATCH_SIZE=32
|
||||||
|
- `floor_char_boundary()` on ALL byte offsets, with forward-progress check
|
||||||
|
- Box-drawing chars (U+2500, 3 bytes), smart quotes, em-dashes trigger boundary issues
|
||||||
|
|
||||||
|
## Pipelines
|
||||||
|
|
||||||
|
**Timeline:** SEED -> HYDRATE -> EXPAND -> COLLECT -> RENDER
|
||||||
|
- CLI: `lore timeline <query>` with --depth, --since, --expand-mentions, --max-seeds, --max-entities, --limit
|
||||||
|
|
||||||
|
**GraphQL status enrichment:** Bearer auth (not PRIVATE-TOKEN), adaptive page sizes [100, 50, 25, 10], graceful 404/403 handling.
|
||||||
|
|
||||||
|
**Search:** FTS5 + vector hybrid. Import: `crate::search::{FtsQueryMode, to_fts_query}`. FTS count: use `documents_fts_docsize` shadow table (19x faster).
|
||||||
|
|
||||||
|
## Test Infrastructure
|
||||||
|
|
||||||
|
Helpers in `src/test_support.rs`:
|
||||||
|
- `setup_test_db()` -> in-memory DB with all migrations
|
||||||
|
- `insert_project(conn, id, path)` -> test project row (gitlab_project_id = id * 100)
|
||||||
|
- `test_config(default_project)` -> Config with sensible defaults
|
||||||
|
|
||||||
|
Integration tests in `tests/` invoke the binary and assert JSON + exit codes. Unit tests inline with `#[cfg(test)]`.
|
||||||
|
|
||||||
|
## Performance Patterns
|
||||||
|
|
||||||
|
- `INDEXED BY` hints when SQLite optimizer picks wrong index
|
||||||
|
- Conditional aggregates over sequential COUNT queries
|
||||||
|
- `COUNT(*) FROM documents_fts_docsize` for FTS row counts
|
||||||
|
- Batch DB operations, avoid N+1
|
||||||
|
- `EXPLAIN QUERY PLAN` before shipping new queries
|
||||||
|
|
||||||
|
## Key Dependencies
|
||||||
|
|
||||||
|
| Crate | Purpose |
|
||||||
|
|-------|---------|
|
||||||
|
| `asupersync` | Async runtime + HTTP |
|
||||||
|
| `rusqlite` (bundled) | SQLite |
|
||||||
|
| `sqlite-vec` | Vector search |
|
||||||
|
| `clap` (derive) | CLI framework |
|
||||||
|
| `thiserror` | Error types |
|
||||||
|
| `lipgloss` (charmed-lipgloss) | TUI rendering |
|
||||||
|
| `tracing` | Structured logging |
|
||||||
56
agents/founding-engineer/HEARTBEAT.md
Normal file
56
agents/founding-engineer/HEARTBEAT.md
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
# HEARTBEAT.md -- Founding Engineer Heartbeat Checklist
|
||||||
|
|
||||||
|
Run this checklist on every heartbeat.
|
||||||
|
|
||||||
|
## 1. Identity and Context
|
||||||
|
|
||||||
|
- `GET /api/agents/me` -- confirm your id, role, budget, chainOfCommand.
|
||||||
|
- Check wake context: `PAPERCLIP_TASK_ID`, `PAPERCLIP_WAKE_REASON`, `PAPERCLIP_WAKE_COMMENT_ID`.
|
||||||
|
|
||||||
|
## 2. Local Planning Check
|
||||||
|
|
||||||
|
1. Read today's plan from `$AGENT_HOME/memory/YYYY-MM-DD.md` under "## Today's Plan".
|
||||||
|
2. Review each planned item: what's completed, what's blocked, what's next.
|
||||||
|
3. For any blockers, comment on the issue and escalate to the CEO.
|
||||||
|
4. **Record progress updates** in the daily notes.
|
||||||
|
|
||||||
|
## 3. Get Assignments
|
||||||
|
|
||||||
|
- `GET /api/companies/{companyId}/issues?assigneeAgentId={your-id}&status=todo,in_progress,blocked`
|
||||||
|
- Prioritize: `in_progress` first, then `todo`. Skip `blocked` unless you can unblock it.
|
||||||
|
- If there is already an active run on an `in_progress` task, move to the next thing.
|
||||||
|
- If `PAPERCLIP_TASK_ID` is set and assigned to you, prioritize that task.
|
||||||
|
|
||||||
|
## 4. Checkout and Work
|
||||||
|
|
||||||
|
- Always checkout before working: `POST /api/issues/{id}/checkout`.
|
||||||
|
- Never retry a 409 -- that task belongs to someone else.
|
||||||
|
- Do the work. Update status and comment when done.
|
||||||
|
|
||||||
|
## 5. Engineering Workflow
|
||||||
|
|
||||||
|
For every code task:
|
||||||
|
|
||||||
|
1. **Read the issue** -- understand what's asked and why.
|
||||||
|
2. **Read existing code** -- understand the area you're changing before touching it.
|
||||||
|
3. **Write failing tests first** (Red/Green TDD).
|
||||||
|
4. **Implement** -- minimal code to pass tests.
|
||||||
|
5. **Quality gates:**
|
||||||
|
```bash
|
||||||
|
cargo check --all-targets
|
||||||
|
cargo clippy --all-targets -- -D warnings
|
||||||
|
cargo fmt --check
|
||||||
|
cargo test
|
||||||
|
```
|
||||||
|
6. **Comment on the issue** with what was done.
|
||||||
|
|
||||||
|
## 6. Fact Extraction
|
||||||
|
|
||||||
|
1. Check for new learnings from this session.
|
||||||
|
2. Extract durable facts to `$AGENT_HOME/memory/` files.
|
||||||
|
3. Update `$AGENT_HOME/memory/YYYY-MM-DD.md` with timeline entries.
|
||||||
|
|
||||||
|
## 7. Exit
|
||||||
|
|
||||||
|
- Comment on any in_progress work before exiting.
|
||||||
|
- If no assignments and no valid mention-handoff, exit cleanly.
|
||||||
20
agents/founding-engineer/SOUL.md
Normal file
20
agents/founding-engineer/SOUL.md
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
# SOUL.md -- Founding Engineer Persona
|
||||||
|
|
||||||
|
You are the Founding Engineer.
|
||||||
|
|
||||||
|
## Engineering Posture
|
||||||
|
|
||||||
|
- You ship working code. Every PR should compile, pass tests, and be ready for production.
|
||||||
|
- Quality is non-negotiable. TDD, clippy pedantic, no unwrap in production code.
|
||||||
|
- Understand before you change. Read the code around your change. Context prevents regressions.
|
||||||
|
- Measure twice, cut once. Think through the approach before writing code. But don't overthink -- bias toward shipping.
|
||||||
|
- Own the full stack of your domain: from SQL queries to CLI UX to async I/O.
|
||||||
|
- When stuck, say so early. A blocked comment beats a wasted hour.
|
||||||
|
- Leave code better than you found it, but only in the area you're working on. Don't gold-plate.
|
||||||
|
|
||||||
|
## Voice and Tone
|
||||||
|
|
||||||
|
- Technical and precise. Use the right terminology.
|
||||||
|
- Brief in comments. Status + what changed + what's next.
|
||||||
|
- No fluff. If you don't know something, say "I don't know" and investigate.
|
||||||
|
- Show your work: include file paths, line numbers, and test names in updates.
|
||||||
3
agents/founding-engineer/TOOLS.md
Normal file
3
agents/founding-engineer/TOOLS.md
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
# Tools
|
||||||
|
|
||||||
|
(Your tools will go here. Add notes about them as you acquire and use them.)
|
||||||
115
agents/plan-reviewer/AGENTS.md
Normal file
115
agents/plan-reviewer/AGENTS.md
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
You are the Plan Reviewer.
|
||||||
|
|
||||||
|
Your home directory is $AGENT_HOME. Everything personal to you -- life, memory, knowledge -- lives there. Other agents may have their own folders and you may update them when necessary.
|
||||||
|
|
||||||
|
Company-wide artifacts (plans, shared docs) live in the project root, outside your personal directory.
|
||||||
|
|
||||||
|
## Safety Considerations
|
||||||
|
|
||||||
|
- Never exfiltrate secrets or private data.
|
||||||
|
- Do not perform any destructive commands unless explicitly requested by the board.
|
||||||
|
- NEVER run `lore` CLI to fetch output -- the GitLab data is sensitive. Read source code instead.
|
||||||
|
|
||||||
|
## References
|
||||||
|
|
||||||
|
Read these before every heartbeat:
|
||||||
|
|
||||||
|
- `$AGENT_HOME/HEARTBEAT.md` -- execution checklist
|
||||||
|
- `$AGENT_HOME/SOUL.md` -- persona and review posture
|
||||||
|
- Project `CLAUDE.md` -- toolchain, workflow, TDD, quality gates, beads, jj, robot mode
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Your Role
|
||||||
|
|
||||||
|
You review implementation plans that engineering agents append to Paperclip issues. You report to the CEO.
|
||||||
|
|
||||||
|
Your job is to catch problems before code is written: missing edge cases, architectural missteps, incomplete test strategies, security gaps, and unnecessary complexity. You do not write code yourself -- you review plans and suggest improvements.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Plan Review Workflow
|
||||||
|
|
||||||
|
### When You Are Assigned an Issue
|
||||||
|
|
||||||
|
1. Read the full issue description, including the `<plan>` block.
|
||||||
|
2. Read the comment thread for context -- understand what prompted the plan and any prior discussion.
|
||||||
|
3. Read the parent issue (if any) to understand the broader goal.
|
||||||
|
|
||||||
|
### How to Review
|
||||||
|
|
||||||
|
Evaluate the plan against these criteria:
|
||||||
|
|
||||||
|
- **Correctness**: Will this approach actually solve the problem described in the issue?
|
||||||
|
- **Completeness**: Are there missing steps, unhandled edge cases, or gaps in the test strategy?
|
||||||
|
- **Architecture**: Does the approach fit the existing codebase patterns? Is there unnecessary complexity?
|
||||||
|
- **Security**: Are there input validation gaps, injection risks, or auth concerns?
|
||||||
|
- **Testability**: Is the TDD strategy sound? Are the right invariants being tested?
|
||||||
|
- **Dependencies**: Are third-party libraries appropriate and well-chosen?
|
||||||
|
- **Risk**: What could go wrong? What are the one-way doors?
|
||||||
|
- Coherence: Are there any contradictions between different parts of the plan?
|
||||||
|
|
||||||
|
### How to Provide Feedback
|
||||||
|
|
||||||
|
Append your review as a `<review>` block inside the issue description, directly after the `<plan>` block. Structure it as:
|
||||||
|
|
||||||
|
```
|
||||||
|
<review reviewer="plan-reviewer" status="approved|changes-requested" date="YYYY-MM-DD">
|
||||||
|
|
||||||
|
## Summary
|
||||||
|
|
||||||
|
[1-2 sentence overall assessment]
|
||||||
|
|
||||||
|
## Suggestions
|
||||||
|
|
||||||
|
Each suggestion is numbered and tagged with severity:
|
||||||
|
|
||||||
|
### S1 [must-fix|should-fix|consider] — Title
|
||||||
|
[Explanation of the issue and suggested change]
|
||||||
|
|
||||||
|
### S2 [must-fix|should-fix|consider] — Title
|
||||||
|
[Explanation]
|
||||||
|
|
||||||
|
## Verdict
|
||||||
|
|
||||||
|
[approved / changes-requested]
|
||||||
|
[If changes-requested: list which suggestions are blocking (must-fix)]
|
||||||
|
|
||||||
|
</review>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Severity Levels
|
||||||
|
|
||||||
|
- **must-fix**: Blocking. The plan should not proceed without addressing this. Correctness bugs, security issues, architectural mistakes.
|
||||||
|
- **should-fix**: Important but not blocking. Missing test cases, suboptimal approaches, incomplete error handling.
|
||||||
|
- **consider**: Optional improvement. Style, alternative approaches, nice-to-haves.
|
||||||
|
|
||||||
|
### After the Engineer Responds
|
||||||
|
|
||||||
|
When an engineer responds to your review (approving or denying suggestions):
|
||||||
|
|
||||||
|
1. Read their response in the comment thread.
|
||||||
|
2. For approved suggestions: update the `<plan>` block to integrate the changes. Update your `<review>` status to `approved`.
|
||||||
|
3. For denied suggestions: acknowledge in a comment. If you disagree on a must-fix, escalate to the CEO.
|
||||||
|
4. Mark the issue as `done` when the plan is finalized.
|
||||||
|
|
||||||
|
### What NOT to Do
|
||||||
|
|
||||||
|
- Do not rewrite entire plans. Suggest targeted changes.
|
||||||
|
- Do not block on `consider`-level suggestions. Only `must-fix` items are blocking.
|
||||||
|
- Do not review code -- you review plans. If you see code in a plan, evaluate the approach, not the syntax.
|
||||||
|
- Do not create subtasks. Flag issues to the engineer via comments.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Codebase Context
|
||||||
|
|
||||||
|
This is a Rust CLI project (gitlore / `lore`). Key things to know when reviewing plans:
|
||||||
|
|
||||||
|
- **Async runtime**: asupersync 0.2 (NOT tokio). Plans referencing tokio APIs are wrong.
|
||||||
|
- **Robot mode**: Every new command must support `--robot`/`-J` JSON output from day one.
|
||||||
|
- **TDD**: Red/green/refactor is mandatory. Plans without a test strategy are incomplete.
|
||||||
|
- **SQLite**: `LIMIT` without `ORDER BY` is a bug. Schema has sharp edges (see project CLAUDE.md).
|
||||||
|
- **Error pipeline**: `thiserror` derive, each variant maps to exit code + robot error code.
|
||||||
|
- **No unsafe code**: `#![forbid(unsafe_code)]` is enforced.
|
||||||
|
- **Clippy pedantic + nursery**: Plans should account for strict lint requirements.
|
||||||
37
agents/plan-reviewer/HEARTBEAT.md
Normal file
37
agents/plan-reviewer/HEARTBEAT.md
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
# HEARTBEAT.md -- Plan Reviewer Heartbeat Checklist
|
||||||
|
|
||||||
|
Run this checklist on every heartbeat.
|
||||||
|
|
||||||
|
## 1. Identity and Context
|
||||||
|
|
||||||
|
- `GET /api/agents/me` -- confirm your id, role, budget, chainOfCommand.
|
||||||
|
- Check wake context: `PAPERCLIP_TASK_ID`, `PAPERCLIP_WAKE_REASON`, `PAPERCLIP_WAKE_COMMENT_ID`.
|
||||||
|
|
||||||
|
## 2. Get Assignments
|
||||||
|
|
||||||
|
- `GET /api/companies/{companyId}/issues?assigneeAgentId={your-id}&status=todo,in_progress,blocked`
|
||||||
|
- Prioritize: `in_progress` first, then `todo`. Skip `blocked` unless you can unblock it.
|
||||||
|
- If there is already an active run on an `in_progress` task, move to the next thing.
|
||||||
|
- If `PAPERCLIP_TASK_ID` is set and assigned to you, prioritize that task.
|
||||||
|
|
||||||
|
## 3. Checkout and Work
|
||||||
|
|
||||||
|
- Always checkout before working: `POST /api/issues/{id}/checkout`.
|
||||||
|
- Never retry a 409 -- that task belongs to someone else.
|
||||||
|
- Do the review. Update status and comment when done.
|
||||||
|
|
||||||
|
## 4. Review Workflow
|
||||||
|
|
||||||
|
For every plan review task:
|
||||||
|
|
||||||
|
1. **Read the issue** -- understand the full description and `<plan>` block.
|
||||||
|
2. **Read comments** -- understand discussion context and engineer intent.
|
||||||
|
3. **Read parent issue** -- understand the broader goal.
|
||||||
|
4. **Read relevant source code** -- verify the plan's assumptions about existing code.
|
||||||
|
5. **Write your review** -- append `<review>` block to the issue description.
|
||||||
|
6. **Comment** -- leave a summary comment and reassign to the engineer.
|
||||||
|
|
||||||
|
## 5. Exit
|
||||||
|
|
||||||
|
- Comment on any in_progress work before exiting.
|
||||||
|
- If no assignments and no valid mention-handoff, exit cleanly.
|
||||||
21
agents/plan-reviewer/SOUL.md
Normal file
21
agents/plan-reviewer/SOUL.md
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
# SOUL.md -- Plan Reviewer Persona
|
||||||
|
|
||||||
|
You are the Plan Reviewer.
|
||||||
|
|
||||||
|
## Review Posture
|
||||||
|
|
||||||
|
- You catch problems before they become code. Your value is preventing wasted engineering hours.
|
||||||
|
- Be specific. "This might have issues" is useless. "The LIMIT on line 3 of step 2 lacks ORDER BY, which produces nondeterministic results per SQLite docs" is useful.
|
||||||
|
- Calibrate severity honestly. Not everything is a must-fix. Reserve blocking status for real correctness, security, or architectural issues.
|
||||||
|
- Respect the engineer's judgment. They know the codebase better than you. Challenge their approach, but acknowledge when they have good reasons for unconventional choices.
|
||||||
|
- Focus on what matters: correctness, security, completeness, testability. Skip style nitpicks.
|
||||||
|
- Think adversarially. What inputs break this? What happens under load? What if the network fails mid-operation?
|
||||||
|
- Be fast. Engineers are waiting on your review to start coding. A good review in 5 minutes beats a perfect review in an hour.
|
||||||
|
|
||||||
|
## Voice and Tone
|
||||||
|
|
||||||
|
- Direct and technical. Lead with the finding, then explain why it matters.
|
||||||
|
- Constructive, not combative. "This misses X" not "You forgot X."
|
||||||
|
- Brief. A review should be scannable in under 2 minutes.
|
||||||
|
- No filler. Skip "great plan overall" unless it genuinely is and you have something specific to praise.
|
||||||
|
- When uncertain, say so. "I'm not sure if asupersync handles this case -- worth verifying" is better than either silence or false confidence.
|
||||||
388
command-restructure/CLI_AUDIT.md
Normal file
388
command-restructure/CLI_AUDIT.md
Normal file
@@ -0,0 +1,388 @@
|
|||||||
|
# Gitlore CLI Command Audit
|
||||||
|
|
||||||
|
## 1. Full Command Inventory
|
||||||
|
|
||||||
|
**29 visible + 4 hidden + 2 stub = 35 total command surface**
|
||||||
|
|
||||||
|
| # | Command | Aliases | Args | Flags | Purpose |
|
||||||
|
|---|---------|---------|------|-------|---------|
|
||||||
|
| 1 | `issues` | `issue` | `[IID]` | 15 | List/show issues |
|
||||||
|
| 2 | `mrs` | `mr`, `merge-requests` | `[IID]` | 16 | List/show MRs |
|
||||||
|
| 3 | `notes` | `note` | — | 16 | List notes |
|
||||||
|
| 4 | `search` | `find`, `query` | `<QUERY>` | 13 | Hybrid FTS+vector search |
|
||||||
|
| 5 | `timeline` | — | `<QUERY>` | 11 | Chronological event reconstruction |
|
||||||
|
| 6 | `who` | — | `[TARGET]` | 16 | People intelligence (5 modes) |
|
||||||
|
| 7 | `me` | — | — | 10 | Personal dashboard |
|
||||||
|
| 8 | `file-history` | — | `<PATH>` | 6 | MRs that touched a file |
|
||||||
|
| 9 | `trace` | — | `<PATH>` | 5 | file->MR->issue->discussion chain |
|
||||||
|
| 10 | `drift` | — | `<TYPE> <IID>` | 3 | Discussion divergence detection |
|
||||||
|
| 11 | `related` | — | `<QUERY_OR_TYPE> [IID]` | 3 | Semantic similarity |
|
||||||
|
| 12 | `count` | — | `<ENTITY>` | 2 | Count entities |
|
||||||
|
| 13 | `sync` | — | — | 14 | Full pipeline: ingest+docs+embed |
|
||||||
|
| 14 | `ingest` | — | `[ENTITY]` | 5 | Fetch from GitLab API |
|
||||||
|
| 15 | `generate-docs` | — | — | 2 | Build searchable documents |
|
||||||
|
| 16 | `embed` | — | — | 2 | Generate vector embeddings |
|
||||||
|
| 17 | `status` | `st` | — | 0 | Last sync times per project |
|
||||||
|
| 18 | `health` | — | — | 0 | Quick pre-flight (exit code only) |
|
||||||
|
| 19 | `doctor` | — | — | 0 | Full environment diagnostic |
|
||||||
|
| 20 | `stats` | `stat` | — | 3 | Document/index statistics |
|
||||||
|
| 21 | `init` | — | — | 6 | Setup config + database |
|
||||||
|
| 22 | `auth` | — | — | 0 | Verify GitLab token |
|
||||||
|
| 23 | `token` | — | subcommand | 1-2 | Token CRUD (set/show) |
|
||||||
|
| 24 | `cron` | — | subcommand | 0-1 | Auto-sync scheduling |
|
||||||
|
| 25 | `migrate` | — | — | 0 | Apply DB migrations |
|
||||||
|
| 26 | `robot-docs` | — | — | 1 | Agent self-discovery manifest |
|
||||||
|
| 27 | `completions` | — | `<SHELL>` | 0 | Shell completions |
|
||||||
|
| 28 | `version` | — | — | 0 | Version info |
|
||||||
|
| 29 | *help* | — | — | — | (clap built-in) |
|
||||||
|
| | **Hidden/deprecated:** | | | | |
|
||||||
|
| 30 | `list` | — | `<ENTITY>` | 14 | deprecated, use issues/mrs |
|
||||||
|
| 31 | `auth-test` | — | — | 0 | deprecated, use auth |
|
||||||
|
| 32 | `sync-status` | — | — | 0 | deprecated, use status |
|
||||||
|
| 33 | `backup` | — | — | 0 | Stub (not implemented) |
|
||||||
|
| 34 | `reset` | — | — | 1 | Stub (not implemented) |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Semantic Overlap Analysis
|
||||||
|
|
||||||
|
### Cluster A: "Is the system working?" (4 commands, 1 concept)
|
||||||
|
|
||||||
|
| Command | What it checks | Exit code semantics | Has flags? |
|
||||||
|
|---------|---------------|---------------------|------------|
|
||||||
|
| `health` | config exists, DB opens, schema version | 0=healthy, 19=unhealthy | No |
|
||||||
|
| `doctor` | config, token, database, Ollama | informational | No |
|
||||||
|
| `status` | last sync times per project | informational | No |
|
||||||
|
| `stats` | document counts, index size, integrity | informational | `--check`, `--repair` |
|
||||||
|
|
||||||
|
**Problem:** A user/agent asking "is lore working?" must choose among four commands. `health` is a strict subset of `doctor`. `status` and `stats` are near-homonyms that answer different questions -- sync recency vs. index health. `count` (Cluster E) also overlaps with what `stats` reports.
|
||||||
|
|
||||||
|
**Cognitive cost:** High. The CLI literature (Clig.dev, Heroku CLI design guide, 12-factor CLI) consistently warns against >2 "status" commands. Users build a mental model of "the status command" -- when there are four, they pick wrong or give up.
|
||||||
|
|
||||||
|
**Theoretical basis:**
|
||||||
|
|
||||||
|
- **Nielsen's "Recognition over Recall"** -- Four similar system-status commands force users to *recall* which one does what. One command with progressive disclosure (flags for depth) lets them *recognize* the option they need. This is doubly important for LLM agents, which perform better with fewer top-level choices and compositional flags.
|
||||||
|
|
||||||
|
- **Fitts's Law for CLIs** -- Command discovery cost is proportional to list length. Each additional top-level command adds scanning time for humans and token cost for robots.
|
||||||
|
|
||||||
|
### Cluster B: "Data pipeline stages" (4 commands, 1 pipeline)
|
||||||
|
|
||||||
|
| Command | Pipeline stage | Subsumed by `sync`? |
|
||||||
|
|---------|---------------|---------------------|
|
||||||
|
| `sync` | ingest -> generate-docs -> embed | -- (is the parent) |
|
||||||
|
| `ingest` | GitLab API fetch | `sync` without `--no-docs --no-embed` |
|
||||||
|
| `generate-docs` | Build FTS documents | `sync --no-embed` (after ingest) |
|
||||||
|
| `embed` | Vector embeddings via Ollama | (final stage) |
|
||||||
|
|
||||||
|
**Problem:** `sync` already has skip flags (`--no-embed`, `--no-docs`, `--no-events`, `--no-status`, `--no-file-changes`). The individual stage commands duplicate this with less control -- `ingest` has `--full`, `--force`, `--dry-run`, but `sync` also has all three.
|
||||||
|
|
||||||
|
The standalone commands exist for granular debugging, but in practice they're reached for <5% of the time. They inflate the help screen while `sync` handles 95% of use cases.
|
||||||
|
|
||||||
|
### Cluster C: "File-centric intelligence" (3 overlapping surfaces)
|
||||||
|
|
||||||
|
| Command | Input | Output | Key flags |
|
||||||
|
|---------|-------|--------|-----------|
|
||||||
|
| `file-history` | `<PATH>` | MRs that touched file | `-p`, `--discussions`, `--no-follow-renames`, `--merged`, `-n` |
|
||||||
|
| `trace` | `<PATH>` | file->MR->issue->discussion chains | `-p`, `--discussions`, `--no-follow-renames`, `-n` |
|
||||||
|
| `who --path <PATH>` | `<PATH>` via flag | experts for file area | `-p`, `--since`, `-n` |
|
||||||
|
| `who --overlap <PATH>` | `<PATH>` via flag | users touching same files | `-p`, `--since`, `-n` |
|
||||||
|
|
||||||
|
**Problem:** `trace` is a superset of `file-history` -- it follows the same MR chain but additionally links to closing issues and discussions. They share 4 of 5 filter flags. A user who wants "what happened to this file?" has to choose between two commands that sound nearly identical.
|
||||||
|
|
||||||
|
### Cluster D: "Semantic discovery" (3 commands, all need embeddings)
|
||||||
|
|
||||||
|
| Command | Input | Output |
|
||||||
|
|---------|-------|--------|
|
||||||
|
| `search` | free text query | ranked documents |
|
||||||
|
| `related` | entity ref OR free text | similar entities |
|
||||||
|
| `drift` | entity ref | divergence score per discussion |
|
||||||
|
|
||||||
|
`related "some text"` is functionally a vector-only `search "some text" --mode semantic`. The difference is that `related` can also seed from an entity (issues 42), while `search` only accepts text.
|
||||||
|
|
||||||
|
`drift` is specialized enough to stand alone, but it's only used on issues and has a single non-project flag (`--threshold`).
|
||||||
|
|
||||||
|
### Cluster E: "Count" is an orphan
|
||||||
|
|
||||||
|
`count` is a standalone command for `SELECT COUNT(*) FROM <table>`. This could be:
|
||||||
|
- A `--count` flag on `issues`/`mrs`/`notes`
|
||||||
|
- A section in `stats` output (which already shows counts)
|
||||||
|
- Part of `status` output
|
||||||
|
|
||||||
|
It exists as its own top-level command primarily for robot convenience, but adds to the 29-command sprawl.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. Flag Consistency Audit
|
||||||
|
|
||||||
|
### Consistent (good patterns)
|
||||||
|
|
||||||
|
| Flag | Meaning | Used in |
|
||||||
|
|------|---------|---------|
|
||||||
|
| `-p, --project` | Scope to project (fuzzy) | issues, mrs, notes, search, sync, ingest, generate-docs, timeline, who, me, file-history, trace, drift, related |
|
||||||
|
| `-n, --limit` | Max results | issues, mrs, notes, search, timeline, who, me, file-history, trace, related |
|
||||||
|
| `--since` | Temporal filter (7d, 2w, YYYY-MM-DD) | issues, mrs, notes, search, timeline, who, me |
|
||||||
|
| `--fields` | Field selection / `minimal` preset | issues, mrs, notes, search, timeline, who, me |
|
||||||
|
| `--full` | Reset cursors / full rebuild | sync, ingest, embed, generate-docs |
|
||||||
|
| `--force` | Override stale lock | sync, ingest |
|
||||||
|
| `--dry-run` | Preview without changes | sync, ingest, stats |
|
||||||
|
|
||||||
|
### Inconsistencies (problems)
|
||||||
|
|
||||||
|
| Issue | Details | Impact |
|
||||||
|
|-------|---------|--------|
|
||||||
|
| `-f` collision | `ingest -f` = `--force`, `count -f` = `--for` | Robot confusion; violates "same short flag = same semantics" |
|
||||||
|
| `-a` inconsistency | `issues -a` = `--author`, `me` has no `-a` (uses `--user` for analogous concept) | Minor |
|
||||||
|
| `-s` inconsistency | `issues -s` = `--state`, `search` has no `-s` short flag at all | Missed ergonomic shortcut |
|
||||||
|
| `--sort` availability | Present in issues/mrs/notes, absent from search/timeline/file-history | Inconsistent query power |
|
||||||
|
| `--discussions` | `file-history --discussions`, `trace --discussions`, but `issues 42` has no `--discussions` flag | Can't get discussions when showing an issue |
|
||||||
|
| `--open` (browser) | `issues -o`, `mrs -o`, `notes --open` (no `-o`) | Inconsistent short flag |
|
||||||
|
| `--merged` | Only on `file-history`, not on `mrs` (which uses `--state merged`) | Different filter mechanics for same concept |
|
||||||
|
| Entity type naming | `count` takes `issues, mrs, discussions, notes, events`; `search --type` takes `issue, mr, discussion, note` (singular) | Singular vs plural for same concept |
|
||||||
|
|
||||||
|
**Theoretical basis:**
|
||||||
|
|
||||||
|
- **Principle of Least Surprise (POLS)** -- When `-f` means `--force` in one command and `--for` in another, both humans and agents learn the wrong lesson from one interaction and apply it to the other. CLI design guides (GNU standards, POSIX conventions, clig.dev) are unanimous: short flags should have consistent semantics across all subcommands.
|
||||||
|
|
||||||
|
- **Singular/plural inconsistency** (`issues` vs `issue` as entity type values) is particularly harmful for LLM agents, which use pattern matching on prior successful invocations. If `lore count issues` works, the agent will try `lore search --type issues` -- and get a parse error.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. Robot Ergonomics Assessment
|
||||||
|
|
||||||
|
### Strengths (well above average for a CLI)
|
||||||
|
|
||||||
|
| Feature | Rating | Notes |
|
||||||
|
|---------|--------|-------|
|
||||||
|
| Structured output | Excellent | Consistent `{ok, data, meta}` envelope |
|
||||||
|
| Auto-detection | Excellent | Non-TTY -> robot mode, `LORE_ROBOT` env var |
|
||||||
|
| Error output | Excellent | Structured JSON to stderr with `actions` array for recovery |
|
||||||
|
| Exit codes | Excellent | 20 distinct, well-documented codes |
|
||||||
|
| Self-discovery | Excellent | `robot-docs` manifest, `--brief` for token savings |
|
||||||
|
| Typo tolerance | Excellent | Autocorrect with confidence scores + structured warnings |
|
||||||
|
| Field selection | Good | `--fields minimal` saves ~60% tokens |
|
||||||
|
| No-args behavior | Good | Robot mode auto-outputs robot-docs |
|
||||||
|
|
||||||
|
### Weaknesses
|
||||||
|
|
||||||
|
| Issue | Severity | Recommendation |
|
||||||
|
|-------|----------|----------------|
|
||||||
|
| 29 commands in robot-docs manifest | High | Agents spend tokens evaluating which command to use. Grouping would reduce decision space. |
|
||||||
|
| `status`/`stats`/`stat` near-homonyms | High | LLMs are particularly susceptible to surface-level lexical confusion. `stat` is an alias for `stats` while `status` is a different command -- this guarantees agent errors. |
|
||||||
|
| Singular vs plural entity types | Medium | `count issues` works but `search --type issues` fails. Agents learn from one and apply to the other. |
|
||||||
|
| Overlapping file commands | Medium | Agent must decide between `trace`, `file-history`, and `who --path`. The decision tree isn't obvious from names alone. |
|
||||||
|
| `count` as separate command | Low | Could be a flag; standalone command inflates the decision space |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. Human Ergonomics Assessment
|
||||||
|
|
||||||
|
### Strengths
|
||||||
|
|
||||||
|
| Feature | Rating | Notes |
|
||||||
|
|---------|--------|-------|
|
||||||
|
| Help text quality | Excellent | Every command has examples, help headings organize flags |
|
||||||
|
| Short flags | Good | `-p`, `-n`, `-s`, `-a`, `-J` cover 80% of common use |
|
||||||
|
| Alias coverage | Good | `issue`/`issues`, `mr`/`mrs`, `st`/`status`, `find`/`search` |
|
||||||
|
| Subcommand inference | Good | `lore issu` -> `issues` via clap infer |
|
||||||
|
| Color/icon system | Good | Auto, with overrides |
|
||||||
|
|
||||||
|
### Weaknesses
|
||||||
|
|
||||||
|
| Issue | Severity | Recommendation |
|
||||||
|
|-------|----------|----------------|
|
||||||
|
| 29 commands in flat help | High | Doesn't fit one terminal screen. No grouping -> overwhelming |
|
||||||
|
| `status` vs `stats` naming | High | Humans will type wrong one repeatedly |
|
||||||
|
| `health` vs `doctor` distinction | Medium | "Which one do I run?" -- unclear from names |
|
||||||
|
| `who` 5-mode overload | Medium | Help text is long; mode exclusions are complex |
|
||||||
|
| Pipeline stages as top-level | Low | `ingest`/`generate-docs`/`embed` rarely used directly but clutter help |
|
||||||
|
| `generate-docs` is 14 chars | Low | Longest command name; `gen-docs` or `gendocs` would help |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 6. Proposals (Ranked by Impact x Feasibility)
|
||||||
|
|
||||||
|
### P1: Help Grouping (HIGH impact, LOW effort)
|
||||||
|
|
||||||
|
**Problem:** 29 flat commands -> information overload.
|
||||||
|
|
||||||
|
**Fix:** Use clap's `help_heading` on subcommands to group them:
|
||||||
|
|
||||||
|
```
|
||||||
|
Query:
|
||||||
|
issues List or show issues [aliases: issue]
|
||||||
|
mrs List or show merge requests [aliases: mr]
|
||||||
|
notes List notes from discussions [aliases: note]
|
||||||
|
search Search indexed documents [aliases: find]
|
||||||
|
count Count entities in local database
|
||||||
|
|
||||||
|
Intelligence:
|
||||||
|
timeline Chronological timeline of events
|
||||||
|
who People intelligence: experts, workload, overlap
|
||||||
|
me Personal work dashboard
|
||||||
|
|
||||||
|
File Analysis:
|
||||||
|
trace Trace why code was introduced
|
||||||
|
file-history Show MRs that touched a file
|
||||||
|
related Find semantically related entities
|
||||||
|
drift Detect discussion divergence
|
||||||
|
|
||||||
|
Data Pipeline:
|
||||||
|
sync Run full sync pipeline
|
||||||
|
ingest Ingest data from GitLab
|
||||||
|
generate-docs Generate searchable documents
|
||||||
|
embed Generate vector embeddings
|
||||||
|
|
||||||
|
System:
|
||||||
|
init Initialize configuration and database
|
||||||
|
status Show sync state [aliases: st]
|
||||||
|
health Quick health check
|
||||||
|
doctor Check environment health
|
||||||
|
stats Document and index statistics [aliases: stat]
|
||||||
|
auth Verify GitLab authentication
|
||||||
|
token Manage stored GitLab token
|
||||||
|
migrate Run pending database migrations
|
||||||
|
cron Manage automatic syncing
|
||||||
|
completions Generate shell completions
|
||||||
|
robot-docs Agent self-discovery manifest
|
||||||
|
version Show version information
|
||||||
|
```
|
||||||
|
|
||||||
|
**Effort:** ~20 lines of `#[command(help_heading = "...")]` annotations. No behavior changes.
|
||||||
|
|
||||||
|
### P2: Resolve `status`/`stats` Confusion (HIGH impact, LOW effort)
|
||||||
|
|
||||||
|
**Option A (recommended):** Rename `stats` -> `index`.
|
||||||
|
- `lore status` = when did I last sync? (pipeline state)
|
||||||
|
- `lore index` = how big is my index? (data inventory)
|
||||||
|
- The alias `stat` goes away (it was causing confusion anyway)
|
||||||
|
|
||||||
|
**Option B:** Rename `status` -> `sync-state` and `stats` -> `db-stats`. More descriptive but longer.
|
||||||
|
|
||||||
|
**Option C:** Merge both under `check` (see P4).
|
||||||
|
|
||||||
|
### P3: Fix Singular/Plural Entity Type Inconsistency (MEDIUM impact, TRIVIAL effort)
|
||||||
|
|
||||||
|
Accept both singular and plural forms everywhere:
|
||||||
|
- `count` already takes `issues` (plural) -- also accept `issue`
|
||||||
|
- `search --type` already takes `issue` (singular) -- also accept `issues`
|
||||||
|
- `drift` takes `issues` -- also accept `issue`
|
||||||
|
|
||||||
|
This is a ~10 line change in the value parsers and eliminates an entire class of agent errors.
|
||||||
|
|
||||||
|
### P4: Merge `health` + `doctor` (MEDIUM impact, LOW effort)
|
||||||
|
|
||||||
|
`health` is a fast subset of `doctor`. Merge:
|
||||||
|
- `lore doctor` = full diagnostic (current behavior)
|
||||||
|
- `lore doctor --quick` = fast pre-flight, exit-code-only (current `health`)
|
||||||
|
- Drop `health` as a separate command, add a hidden alias for backward compat
|
||||||
|
|
||||||
|
### P5: Fix `-f` Short Flag Collision (MEDIUM impact, TRIVIAL effort)
|
||||||
|
|
||||||
|
Change `count`'s `-f, --for` to just `--for` (no short flag). `-f` should mean `--force` project-wide, or nowhere.
|
||||||
|
|
||||||
|
### P6: Consolidate `trace` + `file-history` (MEDIUM impact, MEDIUM effort)
|
||||||
|
|
||||||
|
`trace` already does everything `file-history` does plus more. Options:
|
||||||
|
|
||||||
|
**Option A:** Make `file-history` an alias for `trace --flat` (shows MR list without issue/discussion linking).
|
||||||
|
|
||||||
|
**Option B:** Add `--mrs-only` to `trace` that produces `file-history` output. Deprecate `file-history` with a hidden alias.
|
||||||
|
|
||||||
|
Either way, one fewer top-level command and no lost functionality.
|
||||||
|
|
||||||
|
### P7: Hide Pipeline Sub-stages (LOW impact, TRIVIAL effort)
|
||||||
|
|
||||||
|
Move `ingest`, `generate-docs`, `embed` to `#[command(hide = true)]`. They remain usable but don't clutter `--help`. Direct users to `sync` with stage-skip flags.
|
||||||
|
|
||||||
|
For power users who need individual stages, document in `sync --help`:
|
||||||
|
```
|
||||||
|
To run individual stages:
|
||||||
|
lore ingest # Fetch from GitLab only
|
||||||
|
lore generate-docs # Rebuild documents only
|
||||||
|
lore embed # Re-embed only
|
||||||
|
```
|
||||||
|
|
||||||
|
### P8: Make `count` a Flag, Not a Command (LOW impact, MEDIUM effort)
|
||||||
|
|
||||||
|
Add `--count` to `issues` and `mrs`:
|
||||||
|
```bash
|
||||||
|
lore issues --count # replaces: lore count issues
|
||||||
|
lore mrs --count # replaces: lore count mrs
|
||||||
|
lore notes --count # replaces: lore count notes
|
||||||
|
```
|
||||||
|
|
||||||
|
Keep `count` as a hidden alias for backward compatibility. Removes one top-level command.
|
||||||
|
|
||||||
|
### P9: Consistent `--open` Short Flag (LOW impact, TRIVIAL effort)
|
||||||
|
|
||||||
|
`notes --open` lacks the `-o` shorthand that `issues` and `mrs` have. Add it.
|
||||||
|
|
||||||
|
### P10: Add `--sort` to `search` (LOW impact, LOW effort)
|
||||||
|
|
||||||
|
`search` returns ranked results but offers no `--sort` override. Adding `--sort=score,created,updated` would bring it in line with `issues`/`mrs`/`notes`.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 7. Summary: Proposed Command Tree (After All Changes)
|
||||||
|
|
||||||
|
If all proposals were adopted, the visible top-level shrinks from **29 -> 21**:
|
||||||
|
|
||||||
|
| Before (29) | After (21) | Change |
|
||||||
|
|-------------|------------|--------|
|
||||||
|
| `issues` | `issues` | -- |
|
||||||
|
| `mrs` | `mrs` | -- |
|
||||||
|
| `notes` | `notes` | -- |
|
||||||
|
| `search` | `search` | -- |
|
||||||
|
| `timeline` | `timeline` | -- |
|
||||||
|
| `who` | `who` | -- |
|
||||||
|
| `me` | `me` | -- |
|
||||||
|
| `file-history` | *(hidden, alias for `trace --flat`)* | **merged into trace** |
|
||||||
|
| `trace` | `trace` | absorbs file-history |
|
||||||
|
| `drift` | `drift` | -- |
|
||||||
|
| `related` | `related` | -- |
|
||||||
|
| `count` | *(hidden, `issues --count` replaces)* | **absorbed** |
|
||||||
|
| `sync` | `sync` | -- |
|
||||||
|
| `ingest` | *(hidden)* | **hidden** |
|
||||||
|
| `generate-docs` | *(hidden)* | **hidden** |
|
||||||
|
| `embed` | *(hidden)* | **hidden** |
|
||||||
|
| `status` | `status` | -- |
|
||||||
|
| `health` | *(merged into doctor)* | **merged** |
|
||||||
|
| `doctor` | `doctor` | absorbs health |
|
||||||
|
| `stats` | `index` | **renamed** |
|
||||||
|
| `init` | `init` | -- |
|
||||||
|
| `auth` | `auth` | -- |
|
||||||
|
| `token` | `token` | -- |
|
||||||
|
| `migrate` | `migrate` | -- |
|
||||||
|
| `cron` | `cron` | -- |
|
||||||
|
| `robot-docs` | `robot-docs` | -- |
|
||||||
|
| `completions` | `completions` | -- |
|
||||||
|
| `version` | `version` | -- |
|
||||||
|
|
||||||
|
**Net reduction:** 29 -> 21 visible (-28%). The hidden commands remain fully functional and documented in `robot-docs` for agents that already use them.
|
||||||
|
|
||||||
|
**Theoretical basis:**
|
||||||
|
|
||||||
|
- **Miller's Law** -- Humans can hold 7+/-2 items in working memory. 29 commands far exceeds this. Even with help grouping (P1), the sheer count creates decision fatigue. The literature on CLI design (Heroku's "12-Factor CLI", clig.dev's "Command Line Interface Guidelines") recommends 10-15 top-level commands maximum, with grouping or nesting for anything beyond.
|
||||||
|
|
||||||
|
- **For LLM agents specifically:** Research on tool-use with large tool sets (Schick et al. 2023, Qin et al. 2023) shows that agent accuracy degrades as the tool count increases, roughly following an inverse log curve. Reducing from 29 to 21 commands in the robot-docs manifest would measurably improve agent command selection accuracy.
|
||||||
|
|
||||||
|
- **Backward compatibility is free:** Since AGENTS.md says "we don't care about backward compatibility," hidden aliases cost nothing and prevent breakage for agents with cached robot-docs.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 8. Priority Matrix
|
||||||
|
|
||||||
|
| Proposal | Impact | Effort | Risk | Recommended Order |
|
||||||
|
|----------|--------|--------|------|-------------------|
|
||||||
|
| P1: Help grouping | High | Trivial | None | **Do first** |
|
||||||
|
| P3: Singular/plural fix | Medium | Trivial | None | **Do first** |
|
||||||
|
| P5: Fix `-f` collision | Medium | Trivial | None | **Do first** |
|
||||||
|
| P9: `notes -o` shorthand | Low | Trivial | None | **Do first** |
|
||||||
|
| P2: Rename `stats`->`index` | High | Low | Alias needed | **Do second** |
|
||||||
|
| P4: Merge health->doctor | Medium | Low | Alias needed | **Do second** |
|
||||||
|
| P7: Hide pipeline stages | Low | Trivial | Needs docs update | **Do second** |
|
||||||
|
| P6: Merge file-history->trace | Medium | Medium | Flag design | **Plan carefully** |
|
||||||
|
| P8: count -> --count flag | Low | Medium | Compat shim | **Plan carefully** |
|
||||||
|
| P10: `--sort` on search | Low | Low | None | **When convenient** |
|
||||||
|
|
||||||
|
The "do first" tier is 4 changes that could ship in a single commit with zero risk and immediate ergonomic improvement for both humans and agents.
|
||||||
966
command-restructure/IMPLEMENTATION_PLAN.md
Normal file
966
command-restructure/IMPLEMENTATION_PLAN.md
Normal file
@@ -0,0 +1,966 @@
|
|||||||
|
# Command Restructure: Implementation Plan
|
||||||
|
|
||||||
|
**Reference:** `command-restructure/CLI_AUDIT.md`
|
||||||
|
**Scope:** 10 proposals, 3 implementation phases, estimated ~15 files touched
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Phase 1: Zero-Risk Quick Wins (1 commit)
|
||||||
|
|
||||||
|
These four changes are purely additive -- no behavior changes, no renames, no removed commands.
|
||||||
|
|
||||||
|
### P1: Help Grouping
|
||||||
|
|
||||||
|
**Goal:** Group the 29 visible commands into 5 semantic clusters in `--help` output.
|
||||||
|
|
||||||
|
**File:** `src/cli/mod.rs` (lines 117-399, the `Commands` enum)
|
||||||
|
|
||||||
|
**Changes:** Add `#[command(help_heading = "...")]` to each variant:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
#[derive(Subcommand)]
|
||||||
|
#[allow(clippy::large_enum_variant)]
|
||||||
|
pub enum Commands {
|
||||||
|
// ── Query ──────────────────────────────────────────────
|
||||||
|
/// List or show issues
|
||||||
|
#[command(visible_alias = "issue", help_heading = "Query")]
|
||||||
|
Issues(IssuesArgs),
|
||||||
|
|
||||||
|
/// List or show merge requests
|
||||||
|
#[command(visible_alias = "mr", alias = "merge-requests", alias = "merge-request", help_heading = "Query")]
|
||||||
|
Mrs(MrsArgs),
|
||||||
|
|
||||||
|
/// List notes from discussions
|
||||||
|
#[command(visible_alias = "note", help_heading = "Query")]
|
||||||
|
Notes(NotesArgs),
|
||||||
|
|
||||||
|
/// Search indexed documents
|
||||||
|
#[command(visible_alias = "find", alias = "query", help_heading = "Query")]
|
||||||
|
Search(SearchArgs),
|
||||||
|
|
||||||
|
/// Count entities in local database
|
||||||
|
#[command(help_heading = "Query")]
|
||||||
|
Count(CountArgs),
|
||||||
|
|
||||||
|
// ── Intelligence ───────────────────────────────────────
|
||||||
|
/// Show a chronological timeline of events matching a query
|
||||||
|
#[command(help_heading = "Intelligence")]
|
||||||
|
Timeline(TimelineArgs),
|
||||||
|
|
||||||
|
/// People intelligence: experts, workload, active discussions, overlap
|
||||||
|
#[command(help_heading = "Intelligence")]
|
||||||
|
Who(WhoArgs),
|
||||||
|
|
||||||
|
/// Personal work dashboard: open issues, authored/reviewing MRs, activity
|
||||||
|
#[command(help_heading = "Intelligence")]
|
||||||
|
Me(MeArgs),
|
||||||
|
|
||||||
|
// ── File Analysis ──────────────────────────────────────
|
||||||
|
/// Trace why code was introduced: file -> MR -> issue -> discussion
|
||||||
|
#[command(help_heading = "File Analysis")]
|
||||||
|
Trace(TraceArgs),
|
||||||
|
|
||||||
|
/// Show MRs that touched a file, with linked discussions
|
||||||
|
#[command(name = "file-history", help_heading = "File Analysis")]
|
||||||
|
FileHistory(FileHistoryArgs),
|
||||||
|
|
||||||
|
/// Find semantically related entities via vector search
|
||||||
|
#[command(help_heading = "File Analysis", ...)]
|
||||||
|
Related { ... },
|
||||||
|
|
||||||
|
/// Detect discussion divergence from original intent
|
||||||
|
#[command(help_heading = "File Analysis", ...)]
|
||||||
|
Drift { ... },
|
||||||
|
|
||||||
|
// ── Data Pipeline ──────────────────────────────────────
|
||||||
|
/// Run full sync pipeline: ingest -> generate-docs -> embed
|
||||||
|
#[command(help_heading = "Data Pipeline")]
|
||||||
|
Sync(SyncArgs),
|
||||||
|
|
||||||
|
/// Ingest data from GitLab
|
||||||
|
#[command(help_heading = "Data Pipeline")]
|
||||||
|
Ingest(IngestArgs),
|
||||||
|
|
||||||
|
/// Generate searchable documents from ingested data
|
||||||
|
#[command(name = "generate-docs", help_heading = "Data Pipeline")]
|
||||||
|
GenerateDocs(GenerateDocsArgs),
|
||||||
|
|
||||||
|
/// Generate vector embeddings for documents via Ollama
|
||||||
|
#[command(help_heading = "Data Pipeline")]
|
||||||
|
Embed(EmbedArgs),
|
||||||
|
|
||||||
|
// ── System ─────────────────────────────────────────────
|
||||||
|
// (init, status, health, doctor, stats, auth, token, migrate, cron,
|
||||||
|
// completions, robot-docs, version -- all get help_heading = "System")
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Verification:**
|
||||||
|
- `lore --help` shows grouped output
|
||||||
|
- All existing commands still work identically
|
||||||
|
- `lore robot-docs` output unchanged (robot-docs is hand-crafted, not derived from clap)
|
||||||
|
|
||||||
|
**Files touched:** `src/cli/mod.rs` only
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### P3: Singular/Plural Entity Type Fix
|
||||||
|
|
||||||
|
**Goal:** Accept both `issue`/`issues`, `mr`/`mrs` everywhere entity types are value-parsed.
|
||||||
|
|
||||||
|
**File:** `src/cli/args.rs`
|
||||||
|
|
||||||
|
**Change 1 -- `CountArgs.entity` (line 819):**
|
||||||
|
```rust
|
||||||
|
// BEFORE:
|
||||||
|
#[arg(value_parser = ["issues", "mrs", "discussions", "notes", "events"])]
|
||||||
|
pub entity: String,
|
||||||
|
|
||||||
|
// AFTER:
|
||||||
|
#[arg(value_parser = ["issue", "issues", "mr", "mrs", "discussion", "discussions", "note", "notes", "event", "events"])]
|
||||||
|
pub entity: String,
|
||||||
|
```
|
||||||
|
|
||||||
|
**File:** `src/cli/args.rs`
|
||||||
|
|
||||||
|
**Change 2 -- `SearchArgs.source_type` (line 369):**
|
||||||
|
```rust
|
||||||
|
// BEFORE:
|
||||||
|
#[arg(long = "type", value_parser = ["issue", "mr", "discussion", "note"], ...)]
|
||||||
|
pub source_type: Option<String>,
|
||||||
|
|
||||||
|
// AFTER:
|
||||||
|
#[arg(long = "type", value_parser = ["issue", "issues", "mr", "mrs", "discussion", "discussions", "note", "notes"], ...)]
|
||||||
|
pub source_type: Option<String>,
|
||||||
|
```
|
||||||
|
|
||||||
|
**File:** `src/cli/mod.rs`
|
||||||
|
|
||||||
|
**Change 3 -- `Drift.entity_type` (line 287):**
|
||||||
|
```rust
|
||||||
|
// BEFORE:
|
||||||
|
#[arg(value_parser = ["issues"])]
|
||||||
|
pub entity_type: String,
|
||||||
|
|
||||||
|
// AFTER:
|
||||||
|
#[arg(value_parser = ["issue", "issues"])]
|
||||||
|
pub entity_type: String,
|
||||||
|
```
|
||||||
|
|
||||||
|
**Normalization layer:** In the handlers that consume these values, normalize to the canonical form (plural for entity names, singular for source_type) so downstream code doesn't need changes:
|
||||||
|
|
||||||
|
**File:** `src/app/handlers.rs`
|
||||||
|
|
||||||
|
In `handle_count` (~line 409): Normalize entity string before passing to `run_count`:
|
||||||
|
```rust
|
||||||
|
let entity = match args.entity.as_str() {
|
||||||
|
"issue" => "issues",
|
||||||
|
"mr" => "mrs",
|
||||||
|
"discussion" => "discussions",
|
||||||
|
"note" => "notes",
|
||||||
|
"event" => "events",
|
||||||
|
other => other,
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
In `handle_search` (search handler): Normalize source_type:
|
||||||
|
```rust
|
||||||
|
let source_type = args.source_type.as_deref().map(|t| match t {
|
||||||
|
"issues" => "issue",
|
||||||
|
"mrs" => "mr",
|
||||||
|
"discussions" => "discussion",
|
||||||
|
"notes" => "note",
|
||||||
|
other => other,
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
In `handle_drift` (~line 225): Normalize entity_type:
|
||||||
|
```rust
|
||||||
|
let entity_type = if entity_type == "issue" { "issues" } else { &entity_type };
|
||||||
|
```
|
||||||
|
|
||||||
|
**Verification:**
|
||||||
|
- `lore count issue` works (same as `lore count issues`)
|
||||||
|
- `lore search --type issues 'foo'` works (same as `--type issue`)
|
||||||
|
- `lore drift issue 42` works (same as `drift issues 42`)
|
||||||
|
- All existing invocations unchanged
|
||||||
|
|
||||||
|
**Files touched:** `src/cli/args.rs`, `src/cli/mod.rs`, `src/app/handlers.rs`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### P5: Fix `-f` Short Flag Collision
|
||||||
|
|
||||||
|
**Goal:** Remove `-f` shorthand from `count --for` so `-f` consistently means `--force` across the CLI.
|
||||||
|
|
||||||
|
**File:** `src/cli/args.rs` (line 823)
|
||||||
|
|
||||||
|
```rust
|
||||||
|
// BEFORE:
|
||||||
|
#[arg(short = 'f', long = "for", value_parser = ["issue", "mr"])]
|
||||||
|
pub for_entity: Option<String>,
|
||||||
|
|
||||||
|
// AFTER:
|
||||||
|
#[arg(long = "for", value_parser = ["issue", "mr"])]
|
||||||
|
pub for_entity: Option<String>,
|
||||||
|
```
|
||||||
|
|
||||||
|
**Also update the value_parser to accept both forms** (while we're here):
|
||||||
|
```rust
|
||||||
|
#[arg(long = "for", value_parser = ["issue", "issues", "mr", "mrs"])]
|
||||||
|
pub for_entity: Option<String>,
|
||||||
|
```
|
||||||
|
|
||||||
|
And normalize in `handle_count`:
|
||||||
|
```rust
|
||||||
|
let for_entity = args.for_entity.as_deref().map(|f| match f {
|
||||||
|
"issues" => "issue",
|
||||||
|
"mrs" => "mr",
|
||||||
|
other => other,
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
**File:** `src/app/robot_docs.rs` (line 173) -- update the robot-docs entry:
|
||||||
|
```rust
|
||||||
|
// BEFORE:
|
||||||
|
"flags": ["<entity: issues|mrs|discussions|notes|events>", "-f/--for <issue|mr>"],
|
||||||
|
|
||||||
|
// AFTER:
|
||||||
|
"flags": ["<entity: issues|mrs|discussions|notes|events>", "--for <issue|mr>"],
|
||||||
|
```
|
||||||
|
|
||||||
|
**Verification:**
|
||||||
|
- `lore count notes --for mr` still works
|
||||||
|
- `lore count notes -f mr` now fails with a clear error (unknown flag `-f`)
|
||||||
|
- `lore ingest -f` still works (means `--force`)
|
||||||
|
|
||||||
|
**Files touched:** `src/cli/args.rs`, `src/app/robot_docs.rs`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### P9: Consistent `--open` Short Flag on `notes`
|
||||||
|
|
||||||
|
**Goal:** Add `-o` shorthand to `notes --open`, matching `issues` and `mrs`.
|
||||||
|
|
||||||
|
**File:** `src/cli/args.rs` (line 292)
|
||||||
|
|
||||||
|
```rust
|
||||||
|
// BEFORE:
|
||||||
|
#[arg(long, help_heading = "Actions")]
|
||||||
|
pub open: bool,
|
||||||
|
|
||||||
|
// AFTER:
|
||||||
|
#[arg(short = 'o', long, help_heading = "Actions", overrides_with = "no_open")]
|
||||||
|
pub open: bool,
|
||||||
|
|
||||||
|
#[arg(long = "no-open", hide = true, overrides_with = "open")]
|
||||||
|
pub no_open: bool,
|
||||||
|
```
|
||||||
|
|
||||||
|
**Verification:**
|
||||||
|
- `lore notes -o` opens first result in browser
|
||||||
|
- Matches behavior of `lore issues -o` and `lore mrs -o`
|
||||||
|
|
||||||
|
**Files touched:** `src/cli/args.rs`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Phase 1 Commit Summary
|
||||||
|
|
||||||
|
**Files modified:**
|
||||||
|
1. `src/cli/mod.rs` -- help_heading on all Commands variants + drift value_parser
|
||||||
|
2. `src/cli/args.rs` -- singular/plural value_parsers, remove `-f` from count, add `-o` to notes
|
||||||
|
3. `src/app/handlers.rs` -- normalization of entity/source_type strings
|
||||||
|
4. `src/app/robot_docs.rs` -- update count flags documentation
|
||||||
|
|
||||||
|
**Test plan:**
|
||||||
|
```bash
|
||||||
|
cargo check --all-targets
|
||||||
|
cargo clippy --all-targets -- -D warnings
|
||||||
|
cargo fmt --check
|
||||||
|
cargo test
|
||||||
|
lore --help # Verify grouped output
|
||||||
|
lore count issue # Verify singular accepted
|
||||||
|
lore search --type issues 'x' # Verify plural accepted
|
||||||
|
lore drift issue 42 # Verify singular accepted
|
||||||
|
lore notes -o # Verify short flag works
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Phase 2: Renames and Merges (2-3 commits)
|
||||||
|
|
||||||
|
These changes rename commands and merge overlapping ones. Hidden aliases preserve backward compatibility.
|
||||||
|
|
||||||
|
### P2: Rename `stats` -> `index`
|
||||||
|
|
||||||
|
**Goal:** Eliminate `status`/`stats`/`stat` confusion. `stats` becomes `index`.
|
||||||
|
|
||||||
|
**File:** `src/cli/mod.rs`
|
||||||
|
|
||||||
|
```rust
|
||||||
|
// BEFORE:
|
||||||
|
/// Show document and index statistics
|
||||||
|
#[command(visible_alias = "stat", help_heading = "System")]
|
||||||
|
Stats(StatsArgs),
|
||||||
|
|
||||||
|
// AFTER:
|
||||||
|
/// Show document and index statistics
|
||||||
|
#[command(visible_alias = "idx", alias = "stats", alias = "stat", help_heading = "System")]
|
||||||
|
Index(StatsArgs),
|
||||||
|
```
|
||||||
|
|
||||||
|
Note: `alias = "stats"` and `alias = "stat"` are hidden aliases (not `visible_alias`) -- old invocations still work, but `--help` shows `index`.
|
||||||
|
|
||||||
|
**File:** `src/main.rs` (line 257)
|
||||||
|
|
||||||
|
```rust
|
||||||
|
// BEFORE:
|
||||||
|
Some(Commands::Stats(args)) => handle_stats(cli.config.as_deref(), args, robot_mode).await,
|
||||||
|
|
||||||
|
// AFTER:
|
||||||
|
Some(Commands::Index(args)) => handle_stats(cli.config.as_deref(), args, robot_mode).await,
|
||||||
|
```
|
||||||
|
|
||||||
|
**File:** `src/app/robot_docs.rs` (line 181)
|
||||||
|
|
||||||
|
```rust
|
||||||
|
// BEFORE:
|
||||||
|
"stats": {
|
||||||
|
"description": "Show document and index statistics",
|
||||||
|
...
|
||||||
|
|
||||||
|
// AFTER:
|
||||||
|
"index": {
|
||||||
|
"description": "Show document and index statistics (formerly 'stats')",
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
Also update references in:
|
||||||
|
- `robot_docs.rs` quick_start.lore_exclusive array (line 415): `"stats: Database statistics..."` -> `"index: Database statistics..."`
|
||||||
|
- `robot_docs.rs` aliases.deprecated_commands: add `"stats": "index"`, `"stat": "index"`
|
||||||
|
|
||||||
|
**File:** `src/cli/autocorrect.rs`
|
||||||
|
|
||||||
|
Update `CANONICAL_SUBCOMMANDS` (line 366-area):
|
||||||
|
```rust
|
||||||
|
// Replace "stats" with "index" in the canonical list
|
||||||
|
// Add ("stats", "index") and ("stat", "index") to SUBCOMMAND_ALIASES
|
||||||
|
```
|
||||||
|
|
||||||
|
Update `COMMAND_FLAGS` (line 166-area):
|
||||||
|
```rust
|
||||||
|
// BEFORE:
|
||||||
|
("stats", &["--check", ...]),
|
||||||
|
|
||||||
|
// AFTER:
|
||||||
|
("index", &["--check", ...]),
|
||||||
|
```
|
||||||
|
|
||||||
|
**File:** `src/cli/robot.rs` -- update `expand_fields_preset` if any preset key is `"stats"` (currently no stats preset, so no change needed).
|
||||||
|
|
||||||
|
**Verification:**
|
||||||
|
- `lore index` works (shows document/index stats)
|
||||||
|
- `lore stats` still works (hidden alias)
|
||||||
|
- `lore stat` still works (hidden alias)
|
||||||
|
- `lore index --check` works
|
||||||
|
- `lore --help` shows `index` in System group, not `stats`
|
||||||
|
- `lore robot-docs` shows `index` key in commands map
|
||||||
|
|
||||||
|
**Files touched:** `src/cli/mod.rs`, `src/main.rs`, `src/app/robot_docs.rs`, `src/cli/autocorrect.rs`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### P4: Merge `health` into `doctor`
|
||||||
|
|
||||||
|
**Goal:** One diagnostic command (`doctor`) with a `--quick` flag for the pre-flight check that `health` currently provides.
|
||||||
|
|
||||||
|
**File:** `src/cli/mod.rs`
|
||||||
|
|
||||||
|
```rust
|
||||||
|
// BEFORE:
|
||||||
|
/// Quick health check: config, database, schema version
|
||||||
|
#[command(after_help = "...")]
|
||||||
|
Health,
|
||||||
|
|
||||||
|
/// Check environment health
|
||||||
|
#[command(after_help = "...")]
|
||||||
|
Doctor,
|
||||||
|
|
||||||
|
// AFTER:
|
||||||
|
// Remove Health variant entirely. Add hidden alias:
|
||||||
|
/// Check environment health (--quick for fast pre-flight)
|
||||||
|
#[command(
|
||||||
|
after_help = "...",
|
||||||
|
alias = "health", // hidden backward compat
|
||||||
|
help_heading = "System"
|
||||||
|
)]
|
||||||
|
Doctor {
|
||||||
|
/// Fast pre-flight check only (config, DB, schema). Exit 0 = healthy.
|
||||||
|
#[arg(long)]
|
||||||
|
quick: bool,
|
||||||
|
},
|
||||||
|
```
|
||||||
|
|
||||||
|
**File:** `src/main.rs`
|
||||||
|
|
||||||
|
```rust
|
||||||
|
// BEFORE:
|
||||||
|
Some(Commands::Doctor) => handle_doctor(cli.config.as_deref(), robot_mode).await,
|
||||||
|
...
|
||||||
|
Some(Commands::Health) => handle_health(cli.config.as_deref(), robot_mode).await,
|
||||||
|
|
||||||
|
// AFTER:
|
||||||
|
Some(Commands::Doctor { quick }) => {
|
||||||
|
if quick {
|
||||||
|
handle_health(cli.config.as_deref(), robot_mode).await
|
||||||
|
} else {
|
||||||
|
handle_doctor(cli.config.as_deref(), robot_mode).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Health variant removed from enum, so no separate match arm
|
||||||
|
```
|
||||||
|
|
||||||
|
**File:** `src/app/robot_docs.rs`
|
||||||
|
|
||||||
|
Merge the `health` and `doctor` entries:
|
||||||
|
```rust
|
||||||
|
"doctor": {
|
||||||
|
"description": "Environment health check. Use --quick for fast pre-flight (exit 0 = healthy, 19 = unhealthy).",
|
||||||
|
"flags": ["--quick"],
|
||||||
|
"example": "lore --robot doctor",
|
||||||
|
"notes": {
|
||||||
|
"quick_mode": "lore --robot doctor --quick — fast pre-flight check (formerly 'lore health'). Only checks config, DB, schema version. Returns exit 19 on failure.",
|
||||||
|
"full_mode": "lore --robot doctor — full diagnostic: config, auth, database, Ollama"
|
||||||
|
},
|
||||||
|
"response_schema": {
|
||||||
|
"full": { ... }, // current doctor schema
|
||||||
|
"quick": { ... } // current health schema
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Remove the standalone `health` entry from the commands map.
|
||||||
|
|
||||||
|
**File:** `src/cli/autocorrect.rs`
|
||||||
|
|
||||||
|
- Remove `"health"` from `CANONICAL_SUBCOMMANDS` (clap's `alias` handles it)
|
||||||
|
- Or keep it -- since clap treats aliases as valid subcommands, the autocorrect system will still resolve typos like `"helth"` to `"health"` which clap then maps to `doctor`. Either way works.
|
||||||
|
|
||||||
|
**File:** `src/app/robot_docs.rs` -- update `workflows.pre_flight`:
|
||||||
|
```rust
|
||||||
|
"pre_flight": [
|
||||||
|
"lore --robot doctor --quick"
|
||||||
|
],
|
||||||
|
```
|
||||||
|
|
||||||
|
Add to aliases.deprecated_commands:
|
||||||
|
```rust
|
||||||
|
"health": "doctor --quick"
|
||||||
|
```
|
||||||
|
|
||||||
|
**Verification:**
|
||||||
|
- `lore doctor` runs full diagnostic (unchanged behavior)
|
||||||
|
- `lore doctor --quick` runs fast pre-flight (exit 0/19)
|
||||||
|
- `lore health` still works (hidden alias, runs `doctor --quick`)
|
||||||
|
- `lore --help` shows only `doctor` in System group
|
||||||
|
- `lore robot-docs` shows merged entry
|
||||||
|
|
||||||
|
**Files touched:** `src/cli/mod.rs`, `src/main.rs`, `src/app/robot_docs.rs`, `src/cli/autocorrect.rs`
|
||||||
|
|
||||||
|
**Important edge case:** `lore health` via the hidden alias will invoke `Doctor { quick: false }` unless we handle it specially. Two options:
|
||||||
|
|
||||||
|
**Option A (simpler):** Instead of making `health` an alias of `doctor`, keep both variants but hide `Health`:
|
||||||
|
```rust
|
||||||
|
#[command(hide = true, help_heading = "System")]
|
||||||
|
Health,
|
||||||
|
```
|
||||||
|
Then in `main.rs`, `Commands::Health` maps to `handle_health()` as before. This is less clean but zero-risk.
|
||||||
|
|
||||||
|
**Option B (cleaner):** In the autocorrect layer, rewrite `health` -> `doctor --quick` before clap parsing:
|
||||||
|
```rust
|
||||||
|
// In SUBCOMMAND_ALIASES or a new pre-clap rewrite:
|
||||||
|
("health", "doctor"), // plus inject "--quick" flag
|
||||||
|
```
|
||||||
|
This requires a small enhancement to autocorrect to support flag injection during alias resolution.
|
||||||
|
|
||||||
|
**Recommendation:** Use Option A for initial implementation. It's one line (`hide = true`) and achieves the goal of removing `health` from `--help` while preserving full backward compatibility. The `doctor --quick` flag is additive.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### P7: Hide Pipeline Sub-stages
|
||||||
|
|
||||||
|
**Goal:** Remove `ingest`, `generate-docs`, `embed` from `--help` while keeping them fully functional.
|
||||||
|
|
||||||
|
**File:** `src/cli/mod.rs`
|
||||||
|
|
||||||
|
```rust
|
||||||
|
// Add hide = true to each:
|
||||||
|
|
||||||
|
/// Ingest data from GitLab
|
||||||
|
#[command(hide = true)]
|
||||||
|
Ingest(IngestArgs),
|
||||||
|
|
||||||
|
/// Generate searchable documents from ingested data
|
||||||
|
#[command(name = "generate-docs", hide = true)]
|
||||||
|
GenerateDocs(GenerateDocsArgs),
|
||||||
|
|
||||||
|
/// Generate vector embeddings for documents via Ollama
|
||||||
|
#[command(hide = true)]
|
||||||
|
Embed(EmbedArgs),
|
||||||
|
```
|
||||||
|
|
||||||
|
**File:** `src/cli/mod.rs` -- Update `Sync` help text to mention the individual stage commands:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
/// Run full sync pipeline: ingest -> generate-docs -> embed
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore sync # Full pipeline: ingest + docs + embed
|
||||||
|
lore sync --no-embed # Skip embedding step
|
||||||
|
...
|
||||||
|
|
||||||
|
\x1b[1mIndividual stages:\x1b[0m
|
||||||
|
lore ingest # Fetch from GitLab only
|
||||||
|
lore generate-docs # Rebuild documents only
|
||||||
|
lore embed # Re-embed only",
|
||||||
|
help_heading = "Data Pipeline"
|
||||||
|
)]
|
||||||
|
Sync(SyncArgs),
|
||||||
|
```
|
||||||
|
|
||||||
|
**File:** `src/app/robot_docs.rs` -- Add a `"hidden": true` field to the ingest/generate-docs/embed entries so agents know these are secondary:
|
||||||
|
```rust
|
||||||
|
"ingest": {
|
||||||
|
"hidden": true,
|
||||||
|
"description": "Sync data from GitLab (prefer 'sync' for full pipeline)",
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
**Verification:**
|
||||||
|
- `lore --help` no longer shows ingest, generate-docs, embed
|
||||||
|
- `lore ingest`, `lore generate-docs`, `lore embed` all still work
|
||||||
|
- `lore sync --help` mentions individual stage commands
|
||||||
|
- `lore robot-docs` still includes all three (with `hidden: true`)
|
||||||
|
|
||||||
|
**Files touched:** `src/cli/mod.rs`, `src/app/robot_docs.rs`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Phase 2 Commit Summary
|
||||||
|
|
||||||
|
**Commit A: Rename `stats` -> `index`**
|
||||||
|
- `src/cli/mod.rs`, `src/main.rs`, `src/app/robot_docs.rs`, `src/cli/autocorrect.rs`
|
||||||
|
|
||||||
|
**Commit B: Merge `health` into `doctor`, hide pipeline stages**
|
||||||
|
- `src/cli/mod.rs`, `src/main.rs`, `src/app/robot_docs.rs`, `src/cli/autocorrect.rs`
|
||||||
|
|
||||||
|
**Test plan:**
|
||||||
|
```bash
|
||||||
|
cargo check --all-targets
|
||||||
|
cargo clippy --all-targets -- -D warnings
|
||||||
|
cargo fmt --check
|
||||||
|
cargo test
|
||||||
|
|
||||||
|
# Rename verification
|
||||||
|
lore index # Works (new name)
|
||||||
|
lore stats # Works (hidden alias)
|
||||||
|
lore index --check # Works
|
||||||
|
|
||||||
|
# Doctor merge verification
|
||||||
|
lore doctor # Full diagnostic
|
||||||
|
lore doctor --quick # Fast pre-flight
|
||||||
|
lore health # Still works (hidden)
|
||||||
|
|
||||||
|
# Hidden stages verification
|
||||||
|
lore --help # ingest/generate-docs/embed gone
|
||||||
|
lore ingest # Still works
|
||||||
|
lore sync --help # Mentions individual stages
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Phase 3: Structural Consolidation (requires careful design)
|
||||||
|
|
||||||
|
These changes merge or absorb commands. More effort, more testing, but the biggest UX wins.
|
||||||
|
|
||||||
|
### P6: Consolidate `file-history` into `trace`
|
||||||
|
|
||||||
|
**Goal:** `trace` absorbs `file-history`. One command for file-centric intelligence.
|
||||||
|
|
||||||
|
**Approach:** Add `--mrs-only` flag to `trace`. When set, output matches `file-history` format (flat MR list, no issue/discussion linking). `file-history` becomes a hidden alias.
|
||||||
|
|
||||||
|
**File:** `src/cli/args.rs` -- Add flag to `TraceArgs`:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
pub struct TraceArgs {
|
||||||
|
pub path: String,
|
||||||
|
|
||||||
|
#[arg(short = 'p', long, help_heading = "Filters")]
|
||||||
|
pub project: Option<String>,
|
||||||
|
|
||||||
|
#[arg(long, help_heading = "Output")]
|
||||||
|
pub discussions: bool,
|
||||||
|
|
||||||
|
#[arg(long = "no-follow-renames", help_heading = "Filters")]
|
||||||
|
pub no_follow_renames: bool,
|
||||||
|
|
||||||
|
#[arg(short = 'n', long = "limit", default_value = "20", help_heading = "Output")]
|
||||||
|
pub limit: usize,
|
||||||
|
|
||||||
|
// NEW: absorb file-history behavior
|
||||||
|
/// Show only MR list without issue/discussion linking (file-history mode)
|
||||||
|
#[arg(long = "mrs-only", help_heading = "Output")]
|
||||||
|
pub mrs_only: bool,
|
||||||
|
|
||||||
|
/// Only show merged MRs (file-history mode)
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub merged: bool,
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**File:** `src/cli/mod.rs` -- Hide `FileHistory`:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
/// Show MRs that touched a file, with linked discussions
|
||||||
|
#[command(name = "file-history", hide = true, help_heading = "File Analysis")]
|
||||||
|
FileHistory(FileHistoryArgs),
|
||||||
|
```
|
||||||
|
|
||||||
|
**File:** `src/app/handlers.rs` -- Route `trace --mrs-only` to the file-history handler:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
fn handle_trace(
|
||||||
|
config_override: Option<&str>,
|
||||||
|
args: TraceArgs,
|
||||||
|
robot_mode: bool,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
if args.mrs_only {
|
||||||
|
// Delegate to file-history handler
|
||||||
|
let fh_args = FileHistoryArgs {
|
||||||
|
path: args.path,
|
||||||
|
project: args.project,
|
||||||
|
discussions: args.discussions,
|
||||||
|
no_follow_renames: args.no_follow_renames,
|
||||||
|
merged: args.merged,
|
||||||
|
limit: args.limit,
|
||||||
|
};
|
||||||
|
return handle_file_history(config_override, fh_args, robot_mode);
|
||||||
|
}
|
||||||
|
// ... existing trace logic ...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**File:** `src/app/robot_docs.rs` -- Update trace entry, mark file-history as deprecated:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
"trace": {
|
||||||
|
"description": "Trace why code was introduced: file -> MR -> issue -> discussion. Use --mrs-only for flat MR listing.",
|
||||||
|
"flags": ["<path>", "-p/--project", "--discussions", "--no-follow-renames", "-n/--limit", "--mrs-only", "--merged"],
|
||||||
|
...
|
||||||
|
},
|
||||||
|
"file-history": {
|
||||||
|
"hidden": true,
|
||||||
|
"deprecated": "Use 'trace --mrs-only' instead",
|
||||||
|
...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Verification:**
|
||||||
|
- `lore trace src/main.rs` works unchanged
|
||||||
|
- `lore trace src/main.rs --mrs-only` produces file-history output
|
||||||
|
- `lore trace src/main.rs --mrs-only --merged` filters to merged MRs
|
||||||
|
- `lore file-history src/main.rs` still works (hidden command)
|
||||||
|
- `lore --help` shows only `trace` in File Analysis group
|
||||||
|
|
||||||
|
**Files touched:** `src/cli/args.rs`, `src/cli/mod.rs`, `src/app/handlers.rs`, `src/app/robot_docs.rs`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### P8: Make `count` a Flag on Entity Commands
|
||||||
|
|
||||||
|
**Goal:** `lore issues --count` replaces `lore count issues`. Standalone `count` becomes hidden.
|
||||||
|
|
||||||
|
**File:** `src/cli/args.rs` -- Add `--count` to `IssuesArgs`, `MrsArgs`, `NotesArgs`:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
// In IssuesArgs:
|
||||||
|
/// Show count only (no listing)
|
||||||
|
#[arg(long, help_heading = "Output", conflicts_with_all = ["iid", "open"])]
|
||||||
|
pub count: bool,
|
||||||
|
|
||||||
|
// In MrsArgs:
|
||||||
|
/// Show count only (no listing)
|
||||||
|
#[arg(long, help_heading = "Output", conflicts_with_all = ["iid", "open"])]
|
||||||
|
pub count: bool,
|
||||||
|
|
||||||
|
// In NotesArgs:
|
||||||
|
/// Show count only (no listing)
|
||||||
|
#[arg(long, help_heading = "Output", conflicts_with = "open")]
|
||||||
|
pub count: bool,
|
||||||
|
```
|
||||||
|
|
||||||
|
**File:** `src/app/handlers.rs` -- In `handle_issues`, `handle_mrs`, `handle_notes`, check the count flag early:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
// In handle_issues (pseudocode):
|
||||||
|
if args.count {
|
||||||
|
let count_args = CountArgs { entity: "issues".to_string(), for_entity: None };
|
||||||
|
return handle_count(config_override, count_args, robot_mode).await;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**File:** `src/cli/mod.rs` -- Hide `Count`:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
/// Count entities in local database
|
||||||
|
#[command(hide = true, help_heading = "Query")]
|
||||||
|
Count(CountArgs),
|
||||||
|
```
|
||||||
|
|
||||||
|
**File:** `src/app/robot_docs.rs` -- Mark count as hidden, add `--count` documentation to issues/mrs/notes entries.
|
||||||
|
|
||||||
|
**Verification:**
|
||||||
|
- `lore issues --count` returns issue count
|
||||||
|
- `lore mrs --count` returns MR count
|
||||||
|
- `lore notes --count` returns note count
|
||||||
|
- `lore count issues` still works (hidden)
|
||||||
|
- `lore count discussions --for mr` still works (no equivalent in the new pattern -- discussions/events/references still need the standalone `count` command)
|
||||||
|
|
||||||
|
**Important note:** `count` supports entity types that don't have their own command (discussions, events, references). The standalone `count` must remain functional (just hidden). The `--count` flag on `issues`/`mrs`/`notes` handles the common cases only.
|
||||||
|
|
||||||
|
**Files touched:** `src/cli/args.rs`, `src/cli/mod.rs`, `src/app/handlers.rs`, `src/app/robot_docs.rs`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### P10: Add `--sort` to `search`
|
||||||
|
|
||||||
|
**Goal:** Allow sorting search results by score, created date, or updated date.
|
||||||
|
|
||||||
|
**File:** `src/cli/args.rs` -- Add to `SearchArgs`:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
/// Sort results by field (score is default for ranked search)
|
||||||
|
#[arg(long, value_parser = ["score", "created", "updated"], default_value = "score", help_heading = "Sorting")]
|
||||||
|
pub sort: String,
|
||||||
|
|
||||||
|
/// Sort ascending (default: descending)
|
||||||
|
#[arg(long, help_heading = "Sorting", overrides_with = "no_asc")]
|
||||||
|
pub asc: bool,
|
||||||
|
|
||||||
|
#[arg(long = "no-asc", hide = true, overrides_with = "asc")]
|
||||||
|
pub no_asc: bool,
|
||||||
|
```
|
||||||
|
|
||||||
|
**File:** `src/cli/commands/search.rs` -- Thread the sort parameter through to the search query.
|
||||||
|
|
||||||
|
The search function currently returns results sorted by score. When `--sort created` or `--sort updated` is specified, apply an `ORDER BY` clause to the final result set.
|
||||||
|
|
||||||
|
**File:** `src/app/robot_docs.rs` -- Add `--sort` and `--asc` to the search command's flags list.
|
||||||
|
|
||||||
|
**Verification:**
|
||||||
|
- `lore search 'auth' --sort score` (default, unchanged)
|
||||||
|
- `lore search 'auth' --sort created --asc` (oldest first)
|
||||||
|
- `lore search 'auth' --sort updated` (most recently updated first)
|
||||||
|
|
||||||
|
**Files touched:** `src/cli/args.rs`, `src/cli/commands/search.rs`, `src/app/robot_docs.rs`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Phase 3 Commit Summary
|
||||||
|
|
||||||
|
**Commit C: Consolidate file-history into trace**
|
||||||
|
- `src/cli/args.rs`, `src/cli/mod.rs`, `src/app/handlers.rs`, `src/app/robot_docs.rs`
|
||||||
|
|
||||||
|
**Commit D: Add `--count` flag to entity commands**
|
||||||
|
- `src/cli/args.rs`, `src/cli/mod.rs`, `src/app/handlers.rs`, `src/app/robot_docs.rs`
|
||||||
|
|
||||||
|
**Commit E: Add `--sort` to search**
|
||||||
|
- `src/cli/args.rs`, `src/cli/commands/search.rs`, `src/app/robot_docs.rs`
|
||||||
|
|
||||||
|
**Test plan:**
|
||||||
|
```bash
|
||||||
|
cargo check --all-targets
|
||||||
|
cargo clippy --all-targets -- -D warnings
|
||||||
|
cargo fmt --check
|
||||||
|
cargo test
|
||||||
|
|
||||||
|
# trace consolidation
|
||||||
|
lore trace src/main.rs --mrs-only
|
||||||
|
lore trace src/main.rs --mrs-only --merged --discussions
|
||||||
|
lore file-history src/main.rs # backward compat
|
||||||
|
|
||||||
|
# count flag
|
||||||
|
lore issues --count
|
||||||
|
lore mrs --count -s opened
|
||||||
|
lore notes --count --for-issue 42
|
||||||
|
lore count discussions --for mr # still works
|
||||||
|
|
||||||
|
# search sort
|
||||||
|
lore search 'auth' --sort created --asc
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Documentation Updates
|
||||||
|
|
||||||
|
After all implementation is complete:
|
||||||
|
|
||||||
|
### CLAUDE.md / AGENTS.md
|
||||||
|
|
||||||
|
Update the robot mode command reference to reflect:
|
||||||
|
- `stats` -> `index` (with note that `stats` is a hidden alias)
|
||||||
|
- `health` -> `doctor --quick` (with note that `health` is a hidden alias)
|
||||||
|
- Remove `ingest`, `generate-docs`, `embed` from the primary command table (mention as "hidden, use `sync`")
|
||||||
|
- Remove `file-history` from primary table (mention as "hidden, use `trace --mrs-only`")
|
||||||
|
- Add `--count` flag to issues/mrs/notes documentation
|
||||||
|
- Add `--sort` flag to search documentation
|
||||||
|
- Add `--mrs-only` and `--merged` flags to trace documentation
|
||||||
|
|
||||||
|
### robot-docs Self-Discovery
|
||||||
|
|
||||||
|
The `robot_docs.rs` changes above handle this. Key points:
|
||||||
|
- New `"hidden": true` field on deprecated/hidden commands
|
||||||
|
- Updated descriptions mentioning canonical alternatives
|
||||||
|
- Updated flags lists
|
||||||
|
- Updated workflows section
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## File Impact Summary
|
||||||
|
|
||||||
|
| File | Phase 1 | Phase 2 | Phase 3 | Total Changes |
|
||||||
|
|------|---------|---------|---------|---------------|
|
||||||
|
| `src/cli/mod.rs` | help_heading, drift value_parser | stats->index rename, hide health, hide pipeline stages | hide file-history, hide count | 4 passes |
|
||||||
|
| `src/cli/args.rs` | singular/plural, remove `-f`, add `-o` | — | `--mrs-only`/`--merged` on trace, `--count` on entities, `--sort` on search | 2 passes |
|
||||||
|
| `src/app/handlers.rs` | normalize entity strings | route doctor --quick | trace mrs-only delegation, count flag routing | 3 passes |
|
||||||
|
| `src/app/robot_docs.rs` | update count flags | rename stats->index, merge health+doctor, add hidden field | update trace, file-history, count, search entries | 3 passes |
|
||||||
|
| `src/cli/autocorrect.rs` | — | update CANONICAL_SUBCOMMANDS, SUBCOMMAND_ALIASES, COMMAND_FLAGS | — | 1 pass |
|
||||||
|
| `src/main.rs` | — | stats->index variant rename, doctor variant change | — | 1 pass |
|
||||||
|
| `src/cli/commands/search.rs` | — | — | sort parameter threading | 1 pass |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Before / After Summary
|
||||||
|
|
||||||
|
### Command Count
|
||||||
|
|
||||||
|
| Metric | Before | After | Delta |
|
||||||
|
|--------|--------|-------|-------|
|
||||||
|
| Visible top-level commands | 29 | 21 | -8 (-28%) |
|
||||||
|
| Hidden commands (functional) | 4 | 12 | +8 (absorbed) |
|
||||||
|
| Stub/unimplemented commands | 2 | 2 | 0 |
|
||||||
|
| Total functional commands | 33 | 33 | 0 (nothing lost) |
|
||||||
|
|
||||||
|
### `lore --help` Output
|
||||||
|
|
||||||
|
**Before (29 commands, flat list, ~50 lines of commands):**
|
||||||
|
```
|
||||||
|
Commands:
|
||||||
|
issues List or show issues [aliases: issue]
|
||||||
|
mrs List or show merge requests [aliases: mr]
|
||||||
|
notes List notes from discussions [aliases: note]
|
||||||
|
ingest Ingest data from GitLab
|
||||||
|
count Count entities in local database
|
||||||
|
status Show sync state [aliases: st]
|
||||||
|
auth Verify GitLab authentication
|
||||||
|
doctor Check environment health
|
||||||
|
version Show version information
|
||||||
|
init Initialize configuration and database
|
||||||
|
search Search indexed documents [aliases: find]
|
||||||
|
stats Show document and index statistics [aliases: stat]
|
||||||
|
generate-docs Generate searchable documents from ingested data
|
||||||
|
embed Generate vector embeddings for documents via Ollama
|
||||||
|
sync Run full sync pipeline: ingest -> generate-docs -> embed
|
||||||
|
migrate Run pending database migrations
|
||||||
|
health Quick health check: config, database, schema version
|
||||||
|
robot-docs Machine-readable command manifest for agent self-discovery
|
||||||
|
completions Generate shell completions
|
||||||
|
timeline Show a chronological timeline of events matching a query
|
||||||
|
who People intelligence: experts, workload, active discussions, overlap
|
||||||
|
me Personal work dashboard: open issues, authored/reviewing MRs, activity
|
||||||
|
file-history Show MRs that touched a file, with linked discussions
|
||||||
|
trace Trace why code was introduced: file -> MR -> issue -> discussion
|
||||||
|
drift Detect discussion divergence from original intent
|
||||||
|
related Find semantically related entities via vector search
|
||||||
|
cron Manage cron-based automatic syncing
|
||||||
|
token Manage stored GitLab token
|
||||||
|
help Print this message or the help of the given subcommand(s)
|
||||||
|
```
|
||||||
|
|
||||||
|
**After (21 commands, grouped, ~35 lines of commands):**
|
||||||
|
```
|
||||||
|
Query:
|
||||||
|
issues List or show issues [aliases: issue]
|
||||||
|
mrs List or show merge requests [aliases: mr]
|
||||||
|
notes List notes from discussions [aliases: note]
|
||||||
|
search Search indexed documents [aliases: find]
|
||||||
|
|
||||||
|
Intelligence:
|
||||||
|
timeline Chronological timeline of events
|
||||||
|
who People intelligence: experts, workload, overlap
|
||||||
|
me Personal work dashboard
|
||||||
|
|
||||||
|
File Analysis:
|
||||||
|
trace Trace code provenance / file history
|
||||||
|
related Find semantically related entities
|
||||||
|
drift Detect discussion divergence
|
||||||
|
|
||||||
|
Data Pipeline:
|
||||||
|
sync Run full sync pipeline
|
||||||
|
|
||||||
|
System:
|
||||||
|
init Initialize configuration and database
|
||||||
|
status Show sync state [aliases: st]
|
||||||
|
doctor Check environment health (--quick for pre-flight)
|
||||||
|
index Document and index statistics [aliases: idx]
|
||||||
|
auth Verify GitLab authentication
|
||||||
|
token Manage stored GitLab token
|
||||||
|
migrate Run pending database migrations
|
||||||
|
cron Manage automatic syncing
|
||||||
|
robot-docs Agent self-discovery manifest
|
||||||
|
completions Generate shell completions
|
||||||
|
version Show version information
|
||||||
|
```
|
||||||
|
|
||||||
|
### Flag Consistency
|
||||||
|
|
||||||
|
| Issue | Before | After |
|
||||||
|
|-------|--------|-------|
|
||||||
|
| `-f` collision (force vs for) | `ingest -f`=force, `count -f`=for | `-f` removed from count; `-f` = force everywhere |
|
||||||
|
| Singular/plural entity types | `count issues` but `search --type issue` | Both forms accepted everywhere |
|
||||||
|
| `notes --open` missing `-o` | `notes --open` (no shorthand) | `notes -o` works (matches issues/mrs) |
|
||||||
|
| `search` missing `--sort` | No sort override | `--sort score\|created\|updated` + `--asc` |
|
||||||
|
|
||||||
|
### Naming Confusion
|
||||||
|
|
||||||
|
| Before | After | Resolution |
|
||||||
|
|--------|-------|------------|
|
||||||
|
| `status` vs `stats` vs `stat` (3 names, 2 commands) | `status` + `index` (2 names, 2 commands) | Eliminated near-homonym collision |
|
||||||
|
| `health` vs `doctor` (2 commands, overlapping scope) | `doctor` + `doctor --quick` (1 command) | Progressive disclosure |
|
||||||
|
| `trace` vs `file-history` (2 commands, overlapping function) | `trace` + `trace --mrs-only` (1 command) | Superset absorbs subset |
|
||||||
|
|
||||||
|
### Robot Ergonomics
|
||||||
|
|
||||||
|
| Metric | Before | After |
|
||||||
|
|--------|--------|-------|
|
||||||
|
| Commands in robot-docs manifest | 29 | 21 visible + hidden section |
|
||||||
|
| Agent decision space for "system check" | 4 commands | 2 commands (status, doctor) |
|
||||||
|
| Agent decision space for "file query" | 3 commands + 2 who modes | 1 command (trace) + 2 who modes |
|
||||||
|
| Entity type parse errors from singular/plural | Common | Eliminated |
|
||||||
|
| Estimated token cost of robot-docs | Baseline | ~15% reduction (fewer entries, hidden flagged) |
|
||||||
|
|
||||||
|
### What Stays Exactly The Same
|
||||||
|
|
||||||
|
- All 33 functional commands remain callable (nothing is removed)
|
||||||
|
- All existing flags and their behavior are preserved
|
||||||
|
- All response schemas are unchanged
|
||||||
|
- All exit codes are unchanged
|
||||||
|
- The autocorrect system continues to work
|
||||||
|
- All hidden/deprecated commands emit their existing warnings
|
||||||
|
|
||||||
|
### What Breaks (Intentional)
|
||||||
|
|
||||||
|
- `lore count -f mr` (the `-f` shorthand) -- must use `--for` instead
|
||||||
|
- `lore --help` layout changes (commands are grouped, 8 commands hidden)
|
||||||
|
- `lore robot-docs` output changes (new `hidden` field, renamed keys)
|
||||||
|
- Any scripts parsing `--help` text (but `robot-docs` is the stable contract)
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
1. **Make `gitlab_note_id` explicit in all note-level payloads without breaking existing consumers**
|
1. **Make `gitlab_note_id` explicit in all note-level payloads without breaking existing consumers**
|
||||||
Rationale: Your Bridge Contract already requires `gitlab_note_id`, but current plan keeps `gitlab_id` only in `notes` list while adding `gitlab_note_id` only in `show`. That forces agents to special-case commands. Add `gitlab_note_id` as an alias field everywhere note-level data appears, while keeping `gitlab_id` for compatibility.
|
Rationale: Your Bridge Contract already requires `gitlab_note_id`, but current plan keeps `gitlab_id` only in `notes` list while adding `gitlab_note_id` only in detail views. That forces agents to special-case commands. Add `gitlab_note_id` as an alias field everywhere note-level data appears, while keeping `gitlab_id` for compatibility.
|
||||||
|
|
||||||
```diff
|
```diff
|
||||||
@@ Bridge Contract (Cross-Cutting)
|
@@ Bridge Contract (Cross-Cutting)
|
||||||
|
|||||||
@@ -43,7 +43,7 @@ construct API calls without a separate project-ID lookup, even after path change
|
|||||||
**Back-compat rule**: Note payloads in the `notes` list command continue exposing `gitlab_id`
|
**Back-compat rule**: Note payloads in the `notes` list command continue exposing `gitlab_id`
|
||||||
for existing consumers, but **MUST also** expose `gitlab_note_id` with the same value. This
|
for existing consumers, but **MUST also** expose `gitlab_note_id` with the same value. This
|
||||||
ensures agents can use a single field name (`gitlab_note_id`) across all commands — `notes`,
|
ensures agents can use a single field name (`gitlab_note_id`) across all commands — `notes`,
|
||||||
`show`, and `discussions --include-notes` — without special-casing by command.
|
`issues <IID>`/`mrs <IID>`, and `discussions --include-notes` — without special-casing by command.
|
||||||
|
|
||||||
This contract exists so agents can deterministically construct `glab api` write calls without
|
This contract exists so agents can deterministically construct `glab api` write calls without
|
||||||
cross-referencing multiple commands. Each workstream below must satisfy these fields in its
|
cross-referencing multiple commands. Each workstream below must satisfy these fields in its
|
||||||
|
|||||||
@@ -22,6 +22,15 @@ Asupersync is a cancel-correct async runtime with region-owned tasks, obligation
|
|||||||
- Pre-1.0 runtime dependency (mitigated by adapter layer + version pinning)
|
- Pre-1.0 runtime dependency (mitigated by adapter layer + version pinning)
|
||||||
- Deeper function signature changes for Cx threading
|
- Deeper function signature changes for Cx threading
|
||||||
|
|
||||||
|
### Why not tokio CancellationToken + JoinSet?
|
||||||
|
|
||||||
|
The core problems (Ctrl+C drops requests, no structured cancellation) *can* be fixed without replacing the runtime. Tokio's `CancellationToken` + `JoinSet` + explicit task tracking gives structured cancellation for fan-out patterns. This was considered and rejected for two reasons:
|
||||||
|
|
||||||
|
1. **Obligation tracking is the real win.** CancellationToken/JoinSet fix the "cancel cleanly" problem but don't give us obligation tracking (compile-time proof that all spawned work is awaited) or deterministic lab testing. These are the features that prevent *future* concurrency bugs, not just the current Ctrl+C issue.
|
||||||
|
2. **Separation of concerns.** Fixing Ctrl+C with tokio primitives first, then migrating the runtime second, doubles the migration effort (rewrite fan-out twice). Since we have no users and no backwards compatibility concerns, a single clean migration is lower total cost.
|
||||||
|
|
||||||
|
If asupersync proves unviable (nightly breakage, API instability), the fallback is exactly this: tokio + CancellationToken + JoinSet.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Current Tokio Usage Inventory
|
## Current Tokio Usage Inventory
|
||||||
@@ -103,6 +112,8 @@ let delay = self.rate_limiter.lock().expect("rate limiter poisoned").check_delay
|
|||||||
|
|
||||||
Note: `.expect()` over `.unwrap()` for clarity. Poisoning is near-impossible here (the critical section is a trivial `Instant::now()` check), but the explicit message aids debugging if it ever fires.
|
Note: `.expect()` over `.unwrap()` for clarity. Poisoning is near-impossible here (the critical section is a trivial `Instant::now()` check), but the explicit message aids debugging if it ever fires.
|
||||||
|
|
||||||
|
**Contention constraint:** `std::sync::Mutex` blocks the executor thread while held. This is safe *only* because the critical section is a single `Instant::now()` comparison with no I/O. If the rate limiter ever grows to include async work (HTTP calls, DB queries), it must move back to an async-aware lock. Document this constraint with a comment at the lock site.
|
||||||
|
|
||||||
### 0c. Replace tokio::join! with futures::join!
|
### 0c. Replace tokio::join! with futures::join!
|
||||||
|
|
||||||
In `gitlab/client.rs:729,736`. `futures::join!` is runtime-agnostic and already in deps.
|
In `gitlab/client.rs:729,736`. `futures::join!` is runtime-agnostic and already in deps.
|
||||||
@@ -116,7 +127,7 @@ In `gitlab/client.rs:729,736`. `futures::join!` is runtime-agnostic and already
|
|||||||
|
|
||||||
## Phase 0d: Error Type Migration (must precede adapter layer)
|
## Phase 0d: Error Type Migration (must precede adapter layer)
|
||||||
|
|
||||||
The adapter layer (Phase 1) uses `GitLabNetworkError { detail: Option<String> }`, which requires the error type change from Phase 4. Move this change up front so Phases 1-3 compile as a unit.
|
The adapter layer (Phase 1) uses `GitLabNetworkError { detail: Option<String> }`, which requires this error type change before the adapter compiles. Placed here so Phases 1-3 compile as a unit.
|
||||||
|
|
||||||
### `src/core/error.rs`
|
### `src/core/error.rs`
|
||||||
|
|
||||||
@@ -137,6 +148,33 @@ GitLabNetworkError {
|
|||||||
|
|
||||||
The adapter layer stringifies HTTP client errors at the boundary so `LoreError` doesn't depend on any HTTP client's error types. This also means the existing reqwest call sites that construct `GitLabNetworkError` must be updated to pass `detail: Some(format!("{e:?}"))` instead of `source: Some(e)` -- but those sites are rewritten in Phase 2 anyway, so no extra work.
|
The adapter layer stringifies HTTP client errors at the boundary so `LoreError` doesn't depend on any HTTP client's error types. This also means the existing reqwest call sites that construct `GitLabNetworkError` must be updated to pass `detail: Some(format!("{e:?}"))` instead of `source: Some(e)` -- but those sites are rewritten in Phase 2 anyway, so no extra work.
|
||||||
|
|
||||||
|
**Note on error granularity:** Flattening all HTTP errors to `detail: Option<String>` loses the distinction between timeouts, TLS failures, DNS resolution failures, and connection resets. To preserve actionable error categories without coupling `LoreError` to any HTTP client, add a lightweight `NetworkErrorKind` enum:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||||
|
pub enum NetworkErrorKind {
|
||||||
|
Timeout,
|
||||||
|
ConnectionRefused,
|
||||||
|
DnsResolution,
|
||||||
|
Tls,
|
||||||
|
Other,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[error("Cannot connect to GitLab at {base_url}")]
|
||||||
|
GitLabNetworkError {
|
||||||
|
base_url: String,
|
||||||
|
kind: NetworkErrorKind,
|
||||||
|
detail: Option<String>,
|
||||||
|
},
|
||||||
|
```
|
||||||
|
|
||||||
|
The adapter's `execute()` method classifies errors at the boundary:
|
||||||
|
- Timeout from `asupersync::time::timeout` → `NetworkErrorKind::Timeout`
|
||||||
|
- Transport errors from the HTTP client → classified by error type into the appropriate kind
|
||||||
|
- Unknown errors → `NetworkErrorKind::Other`
|
||||||
|
|
||||||
|
This keeps `LoreError` client-agnostic while preserving the ability to make retry decisions based on error *type* (e.g., retry on timeout but not on TLS). The adapter's `execute()` method is the single place where this mapping happens, so adding new kinds is localized.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Phase 1: Build the HTTP Adapter Layer
|
## Phase 1: Build the HTTP Adapter Layer
|
||||||
@@ -232,9 +270,14 @@ impl Client {
|
|||||||
|
|
||||||
let raw = timeout(self.timeout, self.inner.request(method, url, header_tuples, body))
|
let raw = timeout(self.timeout, self.inner.request(method, url, header_tuples, body))
|
||||||
.await
|
.await
|
||||||
.map_err(|_| LoreError::Other(format!("Request timed out after {:?}", self.timeout)))?
|
.map_err(|_| LoreError::GitLabNetworkError {
|
||||||
|
base_url: url.to_string(),
|
||||||
|
kind: NetworkErrorKind::Timeout,
|
||||||
|
detail: Some(format!("Request timed out after {:?}", self.timeout)),
|
||||||
|
})?
|
||||||
.map_err(|e| LoreError::GitLabNetworkError {
|
.map_err(|e| LoreError::GitLabNetworkError {
|
||||||
base_url: url.to_string(),
|
base_url: url.to_string(),
|
||||||
|
kind: classify_transport_error(&e),
|
||||||
detail: Some(format!("{e:?}")),
|
detail: Some(format!("{e:?}")),
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
@@ -280,6 +323,13 @@ impl Response {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Appends query parameters to a URL.
|
||||||
|
///
|
||||||
|
/// Edge cases handled:
|
||||||
|
/// - URLs with existing `?query` → appends with `&`
|
||||||
|
/// - URLs with `#fragment` → inserts query before fragment
|
||||||
|
/// - Empty params → returns URL unchanged
|
||||||
|
/// - Repeated keys → preserved as-is (GitLab API uses repeated `labels[]`)
|
||||||
fn append_query_params(url: &str, params: &[(&str, String)]) -> String {
|
fn append_query_params(url: &str, params: &[(&str, String)]) -> String {
|
||||||
if params.is_empty() {
|
if params.is_empty() {
|
||||||
return url.to_string();
|
return url.to_string();
|
||||||
@@ -289,14 +339,43 @@ fn append_query_params(url: &str, params: &[(&str, String)]) -> String {
|
|||||||
.map(|(k, v)| format!("{}={}", urlencoding::encode(k), urlencoding::encode(v)))
|
.map(|(k, v)| format!("{}={}", urlencoding::encode(k), urlencoding::encode(v)))
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join("&");
|
.join("&");
|
||||||
if url.contains('?') {
|
|
||||||
format!("{url}&{query}")
|
// Preserve URL fragments: split on '#', insert query, rejoin
|
||||||
|
let (base, fragment) = match url.split_once('#') {
|
||||||
|
Some((b, f)) => (b, Some(f)),
|
||||||
|
None => (url, None),
|
||||||
|
};
|
||||||
|
let with_query = if base.contains('?') {
|
||||||
|
format!("{base}&{query}")
|
||||||
} else {
|
} else {
|
||||||
format!("{url}?{query}")
|
format!("{base}?{query}")
|
||||||
|
};
|
||||||
|
match fragment {
|
||||||
|
Some(f) => format!("{with_query}#{f}"),
|
||||||
|
None => with_query,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Response body size guard
|
||||||
|
|
||||||
|
The adapter buffers entire response bodies in memory (`Vec<u8>`). A misconfigured endpoint or unexpected redirect to a large file could cause unbounded memory growth. Add a max response body size check in `execute()`:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
const MAX_RESPONSE_BODY_BYTES: usize = 64 * 1024 * 1024; // 64 MiB — generous for JSON, catches runaways
|
||||||
|
|
||||||
|
// In execute(), after receiving raw response:
|
||||||
|
if raw.body.len() > MAX_RESPONSE_BODY_BYTES {
|
||||||
|
return Err(LoreError::Other(format!(
|
||||||
|
"Response body too large: {} bytes (max {})",
|
||||||
|
raw.body.len(),
|
||||||
|
MAX_RESPONSE_BODY_BYTES,
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This is a safety net, not a tight constraint. GitLab JSON responses are typically < 1 MiB. Ollama embedding responses are < 100 KiB per batch. The 64 MiB limit catches runaways without interfering with normal operation.
|
||||||
|
|
||||||
### Timeout behavior
|
### Timeout behavior
|
||||||
|
|
||||||
Every request is wrapped with `asupersync::time::timeout(self.timeout, ...)`. Default timeouts:
|
Every request is wrapped with `asupersync::time::timeout(self.timeout, ...)`. Default timeouts:
|
||||||
@@ -498,6 +577,8 @@ pub async fn install_ctrl_c_handler(cx: &Cx, signal: ShutdownSignal) {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
**Cleanup concern:** `std::process::exit(130)` on second Ctrl+C bypasses all drop guards, flush operations, and asupersync region cleanup. This is intentional (user demanded hard exit) but means any in-progress DB transaction will be abandoned mid-write. SQLite's journaling makes this safe (uncommitted transactions are rolled back on next open), but verify this holds for WAL mode if enabled. Consider logging a warning before exit so users understand incomplete operations may need re-sync.
|
||||||
|
|
||||||
### 3e. Rate limiter sleep
|
### 3e. Rate limiter sleep
|
||||||
|
|
||||||
```rust
|
```rust
|
||||||
@@ -545,26 +626,56 @@ cx.region(|scope| async {
|
|||||||
let prefetched_batch: Vec<_> = rx.into_iter().collect();
|
let prefetched_batch: Vec<_> = rx.into_iter().collect();
|
||||||
```
|
```
|
||||||
|
|
||||||
Note: The exact result-collection pattern depends on asupersync's region API. If `scope.spawn()` returns a `JoinHandle<T>`, prefer collecting handles and awaiting them. The channel pattern above works regardless of API shape.
|
**IMPORTANT: Semantic differences beyond ordering.** Replacing `join_all` with region-spawned tasks changes three behaviors:
|
||||||
|
|
||||||
|
1. **Ordering:** `join_all` preserves input order — results\[i\] corresponds to futures\[i\]. The `std::sync::mpsc` channel pattern does NOT (results arrive in completion order). If downstream logic assumes positional alignment (e.g., zipping results with input items by index), this is a silent correctness bug. Options:
|
||||||
|
- Send `(index, result)` tuples through the channel and sort by index after collection.
|
||||||
|
- If `scope.spawn()` returns a `JoinHandle<T>`, collect handles in order and await them sequentially.
|
||||||
|
|
||||||
|
2. **Error aggregation:** `join_all` runs all futures to completion even if some fail, collecting all results. Region-spawned tasks with a channel will also run all tasks, but if the region is cancelled mid-flight (e.g., Ctrl+C), some results are lost. Decide per call site: should partial results be processed, or should the entire batch be retried?
|
||||||
|
|
||||||
|
3. **Backpressure:** `join_all` with N futures creates N concurrent tasks. Region-spawned tasks behave similarly, but if the region has concurrency limits, backpressure semantics change. Verify asupersync's region API does not impose implicit concurrency caps.
|
||||||
|
|
||||||
|
4. **Late result loss on cancellation:** When a region is cancelled, tasks that have completed but whose results haven't been received yet may have already sent to the channel. However, tasks that are mid-flight will be dropped, and their results never sent. The channel receiver must drain whatever was sent, but the caller must treat a cancelled region's results as incomplete — never assume all N results arrived. Document per call site whether partial results are safe to process or whether the entire batch should be discarded on cancellation.
|
||||||
|
|
||||||
|
Audit every `join_all` call site for all four assumptions before choosing the pattern.
|
||||||
|
|
||||||
|
Note: The exact result-collection pattern depends on asupersync's region API. If `scope.spawn()` returns a `JoinHandle<T>`, prefer collecting handles and awaiting them (preserves ordering and simplifies error handling).
|
||||||
|
|
||||||
This is the biggest payoff: if Ctrl+C fires during a prefetch batch, the region cancels all in-flight HTTP requests with bounded cleanup instead of silently dropping them.
|
This is the biggest payoff: if Ctrl+C fires during a prefetch batch, the region cancels all in-flight HTTP requests with bounded cleanup instead of silently dropping them.
|
||||||
|
|
||||||
**Estimated signature changes:** ~15 functions gain a `cx: &Cx` parameter.
|
**Estimated signature changes:** ~15 functions gain a `cx: &Cx` parameter.
|
||||||
|
|
||||||
---
|
**Phasing the Cx threading (risk reduction):** Rather than threading `cx` through all ~15 functions at once, split into two steps:
|
||||||
|
|
||||||
## Phase 4: (Moved to Phase 0d)
|
- **Step 1:** Thread `cx` through the orchestration path only (`main.rs` dispatch → `run_sync`/`run_ingest` → orchestrator functions). This is where region-wrapping `join_all` batches happens — the actual cancellation payoff. Verify invariants pass.
|
||||||
|
- **Step 2:** Widen to the command layer and embedding pipeline (`run_embed`, `embed_documents`, `embed_batch_group`, `sync_surgical`). These are lower-risk since they don't have the same fan-out patterns.
|
||||||
|
|
||||||
Error type migration was moved to Phase 0d to resolve a compile-order dependency: the adapter layer (Phase 1) uses the new `GitLabNetworkError { detail }` shape.
|
This reduces the blast radius of Step 1 and provides an earlier validation checkpoint. If Step 1 surfaces problems, Step 2 hasn't been started yet.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Phase 5: Test Migration
|
## Phase 4: Test Migration
|
||||||
|
|
||||||
### Keep on `#[tokio::test]` (wiremock tests -- 42 tests)
|
### Keep on `#[tokio::test]` (wiremock tests -- 42 tests)
|
||||||
|
|
||||||
No changes. `tokio` is in `[dev-dependencies]` with `features = ["rt", "macros"]`.
|
No changes. `tokio` is in `[dev-dependencies]` with `features = ["rt", "macros"]`.
|
||||||
|
|
||||||
|
**Coverage gap:** These tests validate protocol correctness (request format, response parsing, status code handling, pagination) through the adapter layer, but they do NOT exercise asupersync's runtime behavior (timeouts, connection pooling, cancellation). This is acceptable because:
|
||||||
|
1. Protocol correctness is the higher-value test target — it catches most regressions
|
||||||
|
2. Runtime-specific behavior is covered by the new cancellation integration tests (below)
|
||||||
|
3. The adapter layer is thin enough that runtime differences are unlikely to affect request/response semantics
|
||||||
|
|
||||||
|
**Adapter-layer test gap:** The 42 wiremock tests validate protocol correctness (request format, response parsing) but run on tokio, not asupersync. This means the adapter's actual behavior under the production runtime is untested by mocked-response tests. To close this gap, add 3-5 asupersync-native integration tests that exercise the adapter against a simple HTTP server (e.g., `hyper` or a raw TCP listener) rather than wiremock:
|
||||||
|
|
||||||
|
1. **GET with headers + JSON response** — verify header passing and JSON deserialization through the adapter.
|
||||||
|
2. **POST with JSON body** — verify Content-Type injection and body serialization.
|
||||||
|
3. **429 + Retry-After** — verify the adapter surfaces rate-limit responses correctly.
|
||||||
|
4. **Timeout** — verify the adapter's `asupersync::time::timeout` wrapper fires.
|
||||||
|
5. **Large response rejection** — verify the body size guard triggers.
|
||||||
|
|
||||||
|
These tests are cheap to write (~50 LOC each) and close the "works on tokio but does it work on asupersync?" gap that GPT 5.3 flagged.
|
||||||
|
|
||||||
| File | Tests |
|
| File | Tests |
|
||||||
|------|-------|
|
|------|-------|
|
||||||
| `gitlab/graphql_tests.rs` | 30 |
|
| `gitlab/graphql_tests.rs` | 30 |
|
||||||
@@ -582,9 +693,19 @@ No changes. `tokio` is in `[dev-dependencies]` with `features = ["rt", "macros"]
|
|||||||
|
|
||||||
No changes needed.
|
No changes needed.
|
||||||
|
|
||||||
|
### New: Cancellation integration tests (asupersync-native)
|
||||||
|
|
||||||
|
Wiremock tests on tokio validate protocol/serialization correctness but cannot test asupersync's cancellation and region semantics. Add asupersync-native integration tests for:
|
||||||
|
|
||||||
|
1. **Ctrl+C during fan-out:** Simulate cancellation mid-batch in orchestrator. Verify all in-flight tasks are drained, no task leaks, no obligation leaks.
|
||||||
|
2. **Region quiescence:** Verify that after a region completes (normal or cancelled), no background tasks remain running.
|
||||||
|
3. **Transaction integrity under cancellation:** Cancel during an ingestion batch that has fetched data but not yet written to DB. Verify no partial data is committed.
|
||||||
|
|
||||||
|
These tests use asupersync's deterministic lab runtime, which is one of the primary motivations for this migration.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Phase 6: Verify and Harden
|
## Phase 5: Verify and Harden
|
||||||
|
|
||||||
### Verification checklist
|
### Verification checklist
|
||||||
|
|
||||||
@@ -603,25 +724,39 @@ cargo test
|
|||||||
4. **Pagination streams** -- Do `async_stream::stream!` pagination generators work unchanged?
|
4. **Pagination streams** -- Do `async_stream::stream!` pagination generators work unchanged?
|
||||||
5. **Wiremock test isolation** -- Do wiremock tests pass with tokio only in dev-deps?
|
5. **Wiremock test isolation** -- Do wiremock tests pass with tokio only in dev-deps?
|
||||||
|
|
||||||
### Reqwest behavioral differences to audit
|
### HTTP behavior parity acceptance criteria
|
||||||
|
|
||||||
reqwest provides several implicit behaviors that asupersync's h1 client may not. Verify each:
|
reqwest provides several implicit behaviors that asupersync's h1 client may not. Each must pass a concrete acceptance test before the migration is considered complete:
|
||||||
|
|
||||||
| reqwest default | gitlore relies on it? | asupersync equivalent |
|
| reqwest default | Acceptance criterion | Pass/Fail test |
|
||||||
|-----------------|----------------------|----------------------|
|
|-----------------|---------------------|----------------|
|
||||||
| Automatic redirect following (up to 10) | Unlikely (GitLab API doesn't redirect) | Verify: if 3xx is returned, does gitlore handle it? |
|
| Automatic redirect following (up to 10) | If GitLab returns 3xx, gitlore must not silently lose the response. Either follow the redirect or surface a clear error. | Send a request to wiremock returning 301 → verify adapter returns the redirect status (not an opaque failure) |
|
||||||
| Automatic gzip/deflate decompression | No (JSON responses are small) | Not needed |
|
| Automatic gzip/deflate decompression | Not required — JSON responses are small. | N/A (no test needed) |
|
||||||
| Proxy from `HTTP_PROXY`/`HTTPS_PROXY` env | Possibly (corporate environments) | Must verify asupersync proxy support |
|
| Proxy from `HTTP_PROXY`/`HTTPS_PROXY` env | If `HTTP_PROXY` is set, requests must route through it. If asupersync lacks proxy support, document this as a known limitation. | Set `HTTP_PROXY=http://127.0.0.1:9999` → verify connection attempt targets the proxy, or document that proxy is unsupported |
|
||||||
| Connection keep-alive | Yes (pagination batches) | Covered by PoolConfig |
|
| Connection keep-alive | Pagination batches (4-8 sequential requests to same host) must reuse connections. | Measure with `ss`/`netstat`: 8 paginated requests should use ≤2 TCP connections |
|
||||||
| System DNS resolution | Yes | Should be same (OS-level) |
|
| System DNS resolution | Hostnames must resolve via OS resolver. | Verify `lore sync` works against a hostname (not just IP) |
|
||||||
|
| Request body Content-Length | POST requests must include Content-Length header (some proxies/WAFs require it). | Inspect outgoing request headers in wiremock test |
|
||||||
|
| TLS certificate validation | HTTPS requests must validate server certificates using system CA store. | Verify `lore sync` succeeds against production GitLab (valid cert) and fails against self-signed cert |
|
||||||
|
|
||||||
### Cancellation + DB transaction alignment
|
### Cancellation + DB transaction invariants
|
||||||
|
|
||||||
Region-based cancellation stops HTTP tasks cleanly, but partial ingestion can leave the database in an inconsistent state if cancellation fires between "fetched data" and "wrote to DB". Verify:
|
Region-based cancellation stops HTTP tasks cleanly, but partial ingestion can leave the database in an inconsistent state if cancellation fires between "fetched data" and "wrote to DB". The following invariants must hold and be tested:
|
||||||
|
|
||||||
- All DB writes in ingestion batches use `unchecked_transaction()` (already the case for most ingestion paths)
|
**INV-1: Atomic batch writes.** Each ingestion batch (issues, MRs, discussions) writes to the DB inside a single `unchecked_transaction()`. If the transaction is not committed, no partial data from that batch is visible. This is already the case for most ingestion paths — audit all paths and fix any that write outside a transaction.
|
||||||
- Transaction boundaries align with region scope: a cancelled region should not leave partial batch data committed
|
|
||||||
- The existing `ShutdownSignal` check-before-write pattern in orchestrator loops remains functional alongside region cancellation
|
**INV-2: Region cancellation cannot corrupt committed data.** A cancelled region may abandon in-flight HTTP requests, but it must not interrupt a DB transaction mid-write. This holds naturally because SQLite transactions are synchronous (not async) — once `tx.execute()` starts, it runs to completion on the current thread regardless of task cancellation. Verify this assumption holds for WAL mode.
|
||||||
|
|
||||||
|
**Hard rule: no `.await` between transaction open and commit/rollback.** Cancellation can fire at any `.await` point. If an `.await` exists between `unchecked_transaction()` and `tx.commit()`, a cancelled region could drop the transaction guard mid-batch, rolling back partial writes silently. Audit all ingestion paths to confirm this invariant holds. If any path must do async work mid-transaction (e.g., fetching related data), restructure to fetch-then-write: complete all async work first, then open the transaction, write synchronously, and commit.
|
||||||
|
|
||||||
|
**INV-3: No partial batch visibility.** If cancellation fires after fetching N items but before the batch transaction commits, zero items from that batch are persisted. The next sync picks up where it left off using cursor-based pagination.
|
||||||
|
|
||||||
|
**INV-4: ShutdownSignal + region cancellation are complementary.** The existing `ShutdownSignal` check-before-write pattern in orchestrator loops (`if signal.is_cancelled() { break; }`) remains the first line of defense. Region cancellation is the second — it ensures in-flight HTTP tasks are drained even if the orchestrator loop has already moved past the signal check. Both mechanisms must remain active.
|
||||||
|
|
||||||
|
**Test plan for invariants:**
|
||||||
|
- INV-1: Cancellation integration test — cancel mid-batch, verify DB has zero partial rows from that batch
|
||||||
|
- INV-2: Verify `unchecked_transaction()` commit is not interruptible by task cancellation (lab runtime test)
|
||||||
|
- INV-3: Cancel after fetch, re-run sync, verify no duplicates and no gaps
|
||||||
|
- INV-4: Verify both ShutdownSignal and region cancellation are triggered on Ctrl+C
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -660,6 +795,9 @@ Phase 0a-0c (prep, safe, independent)
|
|||||||
Phase 0d (error type migration -- required before adapter compiles)
|
Phase 0d (error type migration -- required before adapter compiles)
|
||||||
|
|
|
|
||||||
v
|
v
|
||||||
|
DECISION GATE: verify nightly + asupersync + tls-native-roots compile AND behavioral smoke tests pass
|
||||||
|
|
|
||||||
|
v
|
||||||
Phase 1 (adapter layer, compiles but unused) ----+
|
Phase 1 (adapter layer, compiles but unused) ----+
|
||||||
| |
|
| |
|
||||||
v | These 3 are one
|
v | These 3 are one
|
||||||
@@ -669,16 +807,47 @@ Phase 2 (migrate 3 HTTP modules to adapter) ------+ atomic commit
|
|||||||
Phase 3 (swap runtime, Cx threading) ------------+
|
Phase 3 (swap runtime, Cx threading) ------------+
|
||||||
|
|
|
|
||||||
v
|
v
|
||||||
Phase 5 (test migration)
|
Phase 4 (test migration)
|
||||||
|
|
|
|
||||||
v
|
v
|
||||||
Phase 6 (verify + harden)
|
Phase 5 (verify + harden)
|
||||||
```
|
```
|
||||||
|
|
||||||
Phase 0a-0c can be committed independently (good cleanup regardless).
|
Phase 0a-0c can be committed independently (good cleanup regardless).
|
||||||
Phase 0d (error types) can also land independently, but MUST precede the adapter layer.
|
Phase 0d (error types) can also land independently, but MUST precede the adapter layer.
|
||||||
|
**Decision gate:** After Phase 0d, create `rust-toolchain.toml` with nightly pin and verify `asupersync = "0.2"` compiles with `tls-native-roots` on macOS. Then run behavioral smoke tests in a throwaway binary or integration test:
|
||||||
|
|
||||||
|
1. **TLS validation:** HTTPS GET to a public endpoint (e.g., `https://gitlab.com/api/v4/version`) succeeds with valid cert.
|
||||||
|
2. **DNS resolution:** Request using hostname (not IP) resolves correctly.
|
||||||
|
3. **Redirect handling:** GET to a 301/302 endpoint — verify the adapter returns the redirect status (not an opaque error) so call sites can decide whether to follow.
|
||||||
|
4. **Timeout behavior:** Request to a slow/non-responsive endpoint times out within the configured duration.
|
||||||
|
5. **Connection pooling:** 4 sequential requests to the same host reuse connections (verify via debug logging or `ss`/`netstat`).
|
||||||
|
|
||||||
|
If compilation fails or any behavioral test reveals a showstopper (e.g., TLS doesn't work on macOS, timeouts don't fire), stop and evaluate the tokio CancellationToken fallback before investing in Phases 1-3.
|
||||||
|
|
||||||
|
Compile-only gating is insufficient — this migration's failure modes are semantic (HTTP behavior parity), not just syntactic.
|
||||||
|
|
||||||
Phases 1-3 must land together (removing reqwest requires both the adapter AND the new runtime).
|
Phases 1-3 must land together (removing reqwest requires both the adapter AND the new runtime).
|
||||||
Phases 5-6 are cleanup that can be incremental.
|
Phases 4-5 are cleanup that can be incremental.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Rollback Strategy
|
||||||
|
|
||||||
|
If the migration stalls or asupersync proves unviable after partial completion:
|
||||||
|
|
||||||
|
- **Phase 0a-0c completed:** No rollback needed. These are independently valuable cleanup regardless of runtime choice.
|
||||||
|
- **Phase 0d completed:** `GitLabNetworkError { detail }` is runtime-agnostic. Keep it.
|
||||||
|
- **Phases 1-3 partially completed:** These must land atomically. If any phase in 1-3 fails, revert the entire atomic commit. The adapter layer (Phase 1) imports asupersync types, so it cannot exist without the runtime.
|
||||||
|
- **Full rollback to tokio:** If asupersync is abandoned entirely, the fallback path is tokio + `CancellationToken` + `JoinSet` (see "Why not tokio CancellationToken + JoinSet?" above). The adapter layer design is still valid — swap `asupersync::http` for `reqwest` behind the same `crate::http::Client` API.
|
||||||
|
|
||||||
|
**Decision point:** After Phase 0 is complete, verify asupersync compiles on the pinned nightly with `tls-native-roots` before committing to Phases 1-3. If TLS or nightly issues surface, stop and evaluate the tokio fallback.
|
||||||
|
|
||||||
|
**Concrete escape hatch triggers (abandon asupersync, fall back to tokio + CancellationToken + JoinSet):**
|
||||||
|
1. **Nightly breakage > 7 days:** If the pinned nightly breaks and no newer nightly restores compilation within 7 days, abort.
|
||||||
|
2. **TLS incompatibility:** If `tls-native-roots` cannot validate certificates on macOS (system CA store) and `tls-webpki-roots` also fails, abort.
|
||||||
|
3. **API instability:** If asupersync releases a breaking change to `HttpClient`, `region()`, or `Cx` APIs before our migration is complete, evaluate migration cost. If > 2 days of rework, abort.
|
||||||
|
4. **Wiremock incompatibility:** If keeping wiremock tests on tokio while production runs asupersync causes test failures or flaky behavior that cannot be resolved in 1 day, abort.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -687,10 +856,12 @@ Phases 5-6 are cleanup that can be incremental.
|
|||||||
| Risk | Severity | Mitigation |
|
| Risk | Severity | Mitigation |
|
||||||
|------|----------|------------|
|
|------|----------|------------|
|
||||||
| asupersync pre-1.0 API changes | High | Adapter layer isolates call sites. Pin exact version. |
|
| asupersync pre-1.0 API changes | High | Adapter layer isolates call sites. Pin exact version. |
|
||||||
| Nightly Rust breakage | Medium | Pin nightly date in rust-toolchain.toml. CI tests on nightly. |
|
| Nightly Rust breakage | Medium-High | Pin nightly date in rust-toolchain.toml. CI tests on nightly. Coupling runtime + toolchain migration amplifies risk — escape hatch triggers defined in Rollback Strategy. |
|
||||||
| TLS cert issues on macOS | Medium | Test early in Phase 6. Fallback: `tls-webpki-roots` (Mozilla bundle). |
|
| TLS cert issues on macOS | Medium | Test early in Phase 5. Fallback: `tls-webpki-roots` (Mozilla bundle). |
|
||||||
| Connection pool behavior under load | Medium | Stress test with `join_all` of 8+ concurrent requests in Phase 6. |
|
| Connection pool behavior under load | Medium | Stress test with `join_all` of 8+ concurrent requests in Phase 5. |
|
||||||
| async-stream nightly compat | Low | Widely used crate, likely fine. Fallback: manual Stream impl. |
|
| async-stream nightly compat | Low | Widely used crate, likely fine. Fallback: manual Stream impl. |
|
||||||
| Build time increase | Low | Measure before/after. asupersync may be heavier than tokio. |
|
| Build time increase | Low | Measure before/after. asupersync may be heavier than tokio. |
|
||||||
| Reqwest behavioral drift | Medium | reqwest has implicit redirect/proxy/compression handling. Audit each (see Phase 6 table). GitLab API doesn't redirect, so low actual risk. |
|
| Reqwest behavioral drift | Medium | reqwest has implicit redirect/proxy/compression handling. Audit each (see Phase 5 table). GitLab API doesn't redirect, so low actual risk. |
|
||||||
| Partial ingestion on cancel | Medium | Region cancellation can fire between HTTP fetch and DB write. Verify transaction boundaries align with region scope (see Phase 6). |
|
| Partial ingestion on cancel | Medium | Region cancellation can fire between HTTP fetch and DB write. Verify transaction boundaries align with region scope (see Phase 5). |
|
||||||
|
| Unbounded response body buffering | Low | Adapter buffers full response bodies. Mitigated by 64 MiB size guard in adapter `execute()`. |
|
||||||
|
| Manual URL/header handling correctness | Low-Medium | `append_query_params` and case-insensitive header scans replicate reqwest behavior manually. Mitigated by unit tests for edge cases (existing query params, fragments, repeated keys, case folding). |
|
||||||
|
|||||||
@@ -107,12 +107,12 @@ Each criterion is independently testable. Implementation is complete when ALL pa
|
|||||||
|
|
||||||
### AC-7: Show Issue Display (E2E)
|
### AC-7: Show Issue Display (E2E)
|
||||||
|
|
||||||
**Human (`lore show issue 123`):**
|
**Human (`lore issues 123`):**
|
||||||
- [ ] New line after "State": `Status: In progress` (colored by `status_color` hex → nearest terminal color)
|
- [ ] New line after "State": `Status: In progress` (colored by `status_color` hex → nearest terminal color)
|
||||||
- [ ] Status line only shown when `status_name IS NOT NULL`
|
- [ ] Status line only shown when `status_name IS NOT NULL`
|
||||||
- [ ] Category shown in parens when available, lowercased: `Status: In progress (in_progress)`
|
- [ ] Category shown in parens when available, lowercased: `Status: In progress (in_progress)`
|
||||||
|
|
||||||
**Robot (`lore --robot show issue 123`):**
|
**Robot (`lore --robot issues 123`):**
|
||||||
- [ ] JSON includes `status_name`, `status_category`, `status_color`, `status_icon_name`, `status_synced_at` fields
|
- [ ] JSON includes `status_name`, `status_category`, `status_color`, `status_icon_name`, `status_synced_at` fields
|
||||||
- [ ] Fields are `null` (not absent) when status not available
|
- [ ] Fields are `null` (not absent) when status not available
|
||||||
- [ ] `status_synced_at` is integer (ms epoch UTC) or `null` — enables freshness checks by consumers
|
- [ ] `status_synced_at` is integer (ms epoch UTC) or `null` — enables freshness checks by consumers
|
||||||
|
|||||||
2
rust-toolchain.toml
Normal file
2
rust-toolchain.toml
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
[toolchain]
|
||||||
|
channel = "nightly-2026-03-01"
|
||||||
729
specs/SPEC_discussion_analysis.md
Normal file
729
specs/SPEC_discussion_analysis.md
Normal file
@@ -0,0 +1,729 @@
|
|||||||
|
# Spec: Discussion Analysis — LLM-Powered Discourse Enrichment
|
||||||
|
|
||||||
|
**Parent:** SPEC_explain.md (replaces key_decisions heuristic, line 270)
|
||||||
|
**Created:** 2026-03-11
|
||||||
|
**Status:** DRAFT — iterating with user
|
||||||
|
|
||||||
|
## Spec Status
|
||||||
|
| Section | Status | Notes |
|
||||||
|
|---------|--------|-------|
|
||||||
|
| Objective | draft | Core vision defined, success metrics TBD |
|
||||||
|
| Tech Stack | draft | Bedrock + Anthropic API dual-backend |
|
||||||
|
| Architecture | draft | Pre-computed enrichment pipeline |
|
||||||
|
| Schema | draft | `discussion_analysis` table with staleness detection |
|
||||||
|
| CLI Command | draft | `lore enrich discussions` |
|
||||||
|
| LLM Provider | draft | Configurable backend abstraction |
|
||||||
|
| Explain Integration | draft | Replaces heuristic with DB lookup |
|
||||||
|
| Prompt Design | draft | Thread-level discourse classification |
|
||||||
|
| Testing Strategy | draft | Includes mock LLM for deterministic tests |
|
||||||
|
| Boundaries | draft | |
|
||||||
|
| Tasks | not started | Blocked on spec approval |
|
||||||
|
|
||||||
|
**Definition of Complete:** All sections `complete`, Open Questions empty,
|
||||||
|
every user journey has tasks, every task has TDD workflow and acceptance criteria.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Open Questions (Resolve Before Implementation)
|
||||||
|
|
||||||
|
1. **Bedrock model ID**: Which exact Bedrock model will be used? (Assuming `anthropic.claude-3-haiku-*` — need the org-approved ARN or model ID.)
|
||||||
|
2. **Auth mechanism**: Does the Bedrock setup use IAM role assumption, SSO profile, or explicit access keys? This affects the SDK configuration.
|
||||||
|
3. **Rate limiting**: What's the org's Bedrock rate limit? This determines batch concurrency.
|
||||||
|
4. **Cost ceiling**: Should there be a per-run token budget or discussion count cap? (e.g., `--max-threads 200`)
|
||||||
|
5. **Confidence thresholds**: Below what confidence should we discard an analysis vs. store it with low confidence?
|
||||||
|
6. **explain integration field name**: Replace `key_decisions` entirely, or add a new `discourse_analysis` section alongside it? (Recommendation: replace `key_decisions` — the heuristic is acknowledged as inadequate.)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Objective
|
||||||
|
|
||||||
|
**Goal:** Pre-compute structured discourse analysis for discussion threads using an LLM (Claude Haiku via Bedrock or Anthropic API), storing results locally so that `lore explain` and future commands can surface meaningful decisions, answered questions, and consensus without runtime LLM calls.
|
||||||
|
|
||||||
|
**Problem:** The current `key_decisions` heuristic in `explain` correlates state-change events with notes by the same actor within 60 minutes. This produces mostly empty results because real decisions happen in discussion threads, not at the moment of state changes. The heuristic cannot understand conversational semantics — whether a comment confirms a proposal, answers a question, or represents consensus.
|
||||||
|
|
||||||
|
**What this enables:**
|
||||||
|
- `lore explain issues 42` shows *actual* decisions extracted from discussion threads, not event-note temporal coincidences
|
||||||
|
- Reusable across commands — any command can query `discussion_analysis` for pre-computed insights
|
||||||
|
- Fully offline at query time — LLM enrichment is a batch pre-computation step
|
||||||
|
- Incremental — only re-analyzes threads whose notes have changed (staleness via `notes_hash`)
|
||||||
|
|
||||||
|
**Success metrics:**
|
||||||
|
- `lore enrich discussions` processes 100 threads in <60s with Haiku
|
||||||
|
- `lore explain` key_decisions section populated from enrichment data in <500ms (no LLM calls)
|
||||||
|
- Staleness detection: re-running enrichment skips unchanged threads
|
||||||
|
- Zero impact on users without LLM configuration — graceful degradation to empty key_decisions
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Tech Stack & Constraints
|
||||||
|
|
||||||
|
| Layer | Technology | Notes |
|
||||||
|
|-------|-----------|-------|
|
||||||
|
| Language | Rust | nightly-2026-03-01 |
|
||||||
|
| LLM (primary) | Claude Haiku via AWS Bedrock | Org-approved, security-compliant |
|
||||||
|
| LLM (fallback) | Claude Haiku via Anthropic API | For personal/non-org use |
|
||||||
|
| HTTP | asupersync `HttpClient` | Existing wrapper in `src/http.rs` |
|
||||||
|
| Database | SQLite via rusqlite | New migration for `discussion_analysis` table |
|
||||||
|
| Config | `~/.config/lore/config.json` | New `enrichment` section |
|
||||||
|
|
||||||
|
**Constraints:**
|
||||||
|
- Bedrock is the primary backend (org security requirement for Taylor's work context)
|
||||||
|
- Anthropic API is an alternative for non-org users
|
||||||
|
- `lore explain` must NEVER make runtime LLM calls — all enrichment is pre-computed
|
||||||
|
- `lore explain` performance budget unchanged: <500ms
|
||||||
|
- Enrichment is an explicit opt-in step (`lore enrich`), never runs during `sync`
|
||||||
|
- Must work when no LLM is configured — `key_decisions` degrades to empty array (or falls back to heuristic as transitional behavior)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
### System Overview
|
||||||
|
|
||||||
|
```
|
||||||
|
┌─────────────────────────────────────────────────┐
|
||||||
|
│ lore enrich │
|
||||||
|
│ (explicit user/agent command, batch operation) │
|
||||||
|
└──────────────────────┬──────────────────────────┘
|
||||||
|
│
|
||||||
|
┌─────────────▼─────────────┐
|
||||||
|
│ Enrichment Pipeline │
|
||||||
|
│ 1. Select stale threads │
|
||||||
|
│ 2. Build LLM prompts │
|
||||||
|
│ 3. Call LLM (batched) │
|
||||||
|
│ 4. Parse responses │
|
||||||
|
│ 5. Store in DB │
|
||||||
|
└─────────────┬─────────────┘
|
||||||
|
│
|
||||||
|
┌─────────────▼─────────────┐
|
||||||
|
│ discussion_analysis │
|
||||||
|
│ (SQLite table) │
|
||||||
|
└─────────────┬─────────────┘
|
||||||
|
│
|
||||||
|
┌─────────────▼─────────────┐
|
||||||
|
│ lore explain / other │
|
||||||
|
│ (simple SELECT query) │
|
||||||
|
└───────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
### Data Flow
|
||||||
|
|
||||||
|
1. **Staleness detection**: For each discussion, compute `SHA-256(sorted note IDs + note bodies)`. Compare against stored `notes_hash`. Skip if unchanged.
|
||||||
|
2. **Prompt construction**: Extract the last N notes (configurable, default 5) from the thread. Build a structured prompt asking for discourse classification.
|
||||||
|
3. **LLM call**: Send to configured backend (Bedrock or Anthropic API). Parse structured JSON response.
|
||||||
|
4. **Storage**: Upsert into `discussion_analysis` with analysis results, model ID, timestamp, and notes_hash.
|
||||||
|
|
||||||
|
### Pre-computation vs Runtime Trade-offs
|
||||||
|
|
||||||
|
| Concern | Pre-computed (chosen) | Runtime |
|
||||||
|
|---------|----------------------|---------|
|
||||||
|
| explain latency | <500ms (DB query) | 2-5s per thread (LLM call) |
|
||||||
|
| Offline capability | Full | None |
|
||||||
|
| Bedrock compliance | Clean separation | Leaks into explain path |
|
||||||
|
| Reusability | Any command can query | Tied to explain |
|
||||||
|
| Freshness | Stale until re-enriched | Always current |
|
||||||
|
| Cost | Batch (predictable) | Per-query (unbounded) |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### New Migration (next available version)
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TABLE discussion_analysis (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
discussion_id INTEGER NOT NULL REFERENCES discussions(id),
|
||||||
|
analysis_type TEXT NOT NULL, -- 'decision', 'question_answered', 'consensus', 'open_debate', 'informational'
|
||||||
|
confidence REAL NOT NULL, -- 0.0 to 1.0
|
||||||
|
summary TEXT NOT NULL, -- LLM-generated 1-2 sentence summary
|
||||||
|
evidence_note_ids TEXT, -- JSON array of note IDs that support this analysis
|
||||||
|
model_id TEXT NOT NULL, -- e.g. 'anthropic.claude-3-haiku-20240307-v1:0'
|
||||||
|
analyzed_at INTEGER NOT NULL, -- ms epoch
|
||||||
|
notes_hash TEXT NOT NULL, -- SHA-256 of thread content for staleness detection
|
||||||
|
|
||||||
|
UNIQUE(discussion_id, analysis_type)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_discussion_analysis_discussion
|
||||||
|
ON discussion_analysis(discussion_id);
|
||||||
|
|
||||||
|
CREATE INDEX idx_discussion_analysis_type
|
||||||
|
ON discussion_analysis(analysis_type);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Design decisions:**
|
||||||
|
- `UNIQUE(discussion_id, analysis_type)`: A thread can have at most one analysis per type. Re-enrichment upserts.
|
||||||
|
- `evidence_note_ids` is a JSON array (not a junction table) because it's read-only metadata, never queried by note ID.
|
||||||
|
- `notes_hash` enables O(1) staleness checks without re-reading all notes.
|
||||||
|
- `confidence` allows filtering in queries (e.g., only show decisions with confidence > 0.7).
|
||||||
|
- `analysis_type` uses lowercase snake_case strings, not an enum constraint, for forward compatibility.
|
||||||
|
|
||||||
|
### Analysis Types
|
||||||
|
|
||||||
|
| Type | Description | Example |
|
||||||
|
|------|-------------|---------|
|
||||||
|
| `decision` | A concrete decision was made or confirmed | "Team agreed to use Redis for caching" |
|
||||||
|
| `question_answered` | A question was asked and definitively answered | "Confirmed: the API supports pagination via cursor" |
|
||||||
|
| `consensus` | Multiple participants converged on an approach | "All reviewers approved the retry-with-backoff strategy" |
|
||||||
|
| `open_debate` | Active disagreement or unresolved discussion | "Disagreement on whether to use gRPC vs REST" |
|
||||||
|
| `informational` | Thread is purely informational, no actionable discourse | "Status update on deployment progress" |
|
||||||
|
|
||||||
|
### Notes Hash Computation
|
||||||
|
|
||||||
|
```
|
||||||
|
notes_hash = SHA-256(
|
||||||
|
note_1_id + ":" + note_1_body + "\n" +
|
||||||
|
note_2_id + ":" + note_2_body + "\n" +
|
||||||
|
...
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
Notes sorted by `id` (insertion order) before hashing. This means:
|
||||||
|
- New note added → hash changes → re-enrich
|
||||||
|
- Note edited (body changes) → hash changes → re-enrich
|
||||||
|
- No changes → hash matches → skip
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## CLI Command
|
||||||
|
|
||||||
|
### `lore enrich discussions`
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Enrich all stale discussions across all projects
|
||||||
|
lore enrich discussions
|
||||||
|
|
||||||
|
# Scope to a project
|
||||||
|
lore enrich discussions -p group/repo
|
||||||
|
|
||||||
|
# Scope to a single entity's discussions
|
||||||
|
lore enrich discussions --issue 42 -p group/repo
|
||||||
|
lore enrich discussions --mr 99 -p group/repo
|
||||||
|
|
||||||
|
# Force re-enrichment (ignore staleness)
|
||||||
|
lore enrich discussions --force
|
||||||
|
|
||||||
|
# Dry run (show what would be enriched, don't call LLM)
|
||||||
|
lore enrich discussions --dry-run
|
||||||
|
|
||||||
|
# Limit batch size
|
||||||
|
lore enrich discussions --max-threads 50
|
||||||
|
|
||||||
|
# Robot mode
|
||||||
|
lore -J enrich discussions
|
||||||
|
```
|
||||||
|
|
||||||
|
### Robot Mode Output
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"ok": true,
|
||||||
|
"data": {
|
||||||
|
"total_discussions": 1200,
|
||||||
|
"stale": 45,
|
||||||
|
"enriched": 45,
|
||||||
|
"skipped_unchanged": 1155,
|
||||||
|
"errors": 0,
|
||||||
|
"tokens_used": {
|
||||||
|
"input": 23400,
|
||||||
|
"output": 4500
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"meta": { "elapsed_ms": 32000 }
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Human Mode Output
|
||||||
|
|
||||||
|
```
|
||||||
|
Enriching discussions...
|
||||||
|
|
||||||
|
Project: vs/typescript-code
|
||||||
|
Discussions: 1,200 total, 45 stale
|
||||||
|
Enriching: ████████████████████ 45/45
|
||||||
|
Results: 12 decisions, 8 questions answered, 5 consensus, 3 debates, 17 informational
|
||||||
|
Tokens: 23.4K input, 4.5K output
|
||||||
|
|
||||||
|
Done in 32s
|
||||||
|
```
|
||||||
|
|
||||||
|
### Command Registration
|
||||||
|
|
||||||
|
```rust
|
||||||
|
/// Pre-compute discourse analysis for discussion threads using LLM
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore enrich discussions # Enrich all stale discussions
|
||||||
|
lore enrich discussions -p group/repo # Scope to project
|
||||||
|
lore enrich discussions --issue 42 # Single issue's discussions
|
||||||
|
lore -J enrich discussions --dry-run # Preview what would be enriched")]
|
||||||
|
Enrich {
|
||||||
|
/// What to enrich: "discussions"
|
||||||
|
#[arg(value_parser = ["discussions"])]
|
||||||
|
target: String,
|
||||||
|
|
||||||
|
/// Scope to project (fuzzy match)
|
||||||
|
#[arg(short, long)]
|
||||||
|
project: Option<String>,
|
||||||
|
|
||||||
|
/// Scope to a specific issue's discussions
|
||||||
|
#[arg(long, conflicts_with = "mr")]
|
||||||
|
issue: Option<i64>,
|
||||||
|
|
||||||
|
/// Scope to a specific MR's discussions
|
||||||
|
#[arg(long, conflicts_with = "issue")]
|
||||||
|
mr: Option<i64>,
|
||||||
|
|
||||||
|
/// Re-enrich all threads regardless of staleness
|
||||||
|
#[arg(long)]
|
||||||
|
force: bool,
|
||||||
|
|
||||||
|
/// Show what would be enriched without calling LLM
|
||||||
|
#[arg(long)]
|
||||||
|
dry_run: bool,
|
||||||
|
|
||||||
|
/// Maximum threads to enrich in one run
|
||||||
|
#[arg(long, default_value = "500")]
|
||||||
|
max_threads: usize,
|
||||||
|
},
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## LLM Provider Abstraction
|
||||||
|
|
||||||
|
### Config Schema
|
||||||
|
|
||||||
|
New `enrichment` section in `~/.config/lore/config.json`:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"enrichment": {
|
||||||
|
"provider": "bedrock",
|
||||||
|
"bedrock": {
|
||||||
|
"region": "us-east-1",
|
||||||
|
"modelId": "anthropic.claude-3-haiku-20240307-v1:0",
|
||||||
|
"profile": "default"
|
||||||
|
},
|
||||||
|
"anthropicApi": {
|
||||||
|
"modelId": "claude-3-haiku-20240307"
|
||||||
|
},
|
||||||
|
"concurrency": 4,
|
||||||
|
"maxNotesPerThread": 5,
|
||||||
|
"minConfidence": 0.6
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Provider selection:**
|
||||||
|
- `"bedrock"` — AWS Bedrock (uses AWS SDK credential chain: env vars → profile → IAM role)
|
||||||
|
- `"anthropic"` — Anthropic API (uses `ANTHROPIC_API_KEY` env var)
|
||||||
|
- `null` or absent — enrichment disabled, `lore enrich` exits with informative message
|
||||||
|
|
||||||
|
### Rust Abstraction
|
||||||
|
|
||||||
|
```rust
|
||||||
|
/// Trait for LLM backends. Implementations handle auth, serialization, and API specifics.
|
||||||
|
#[async_trait]
|
||||||
|
pub trait LlmProvider: Send + Sync {
|
||||||
|
/// Send a prompt and get a structured response.
|
||||||
|
async fn complete(&self, prompt: &str, max_tokens: u32) -> Result<LlmResponse>;
|
||||||
|
|
||||||
|
/// Provider name for logging/storage (e.g., "bedrock", "anthropic")
|
||||||
|
fn provider_name(&self) -> &str;
|
||||||
|
|
||||||
|
/// Model identifier for storage (e.g., "anthropic.claude-3-haiku-20240307-v1:0")
|
||||||
|
fn model_id(&self) -> &str;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct LlmResponse {
|
||||||
|
pub content: String,
|
||||||
|
pub input_tokens: u32,
|
||||||
|
pub output_tokens: u32,
|
||||||
|
pub stop_reason: String,
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Bedrock Implementation Notes
|
||||||
|
|
||||||
|
- Uses AWS SDK `InvokeModel` API (not Converse) for Anthropic models on Bedrock
|
||||||
|
- Request body follows Anthropic Messages API format, wrapped in Bedrock's envelope
|
||||||
|
- Auth: AWS credential chain (env → profile → IMDS)
|
||||||
|
- Region from config or `AWS_REGION` env var
|
||||||
|
- Content type: `application/json`, accept: `application/json`
|
||||||
|
|
||||||
|
### Anthropic API Implementation Notes
|
||||||
|
|
||||||
|
- Standard Messages API (`POST /v1/messages`)
|
||||||
|
- Auth: `x-api-key` header from `ANTHROPIC_API_KEY` env var
|
||||||
|
- Model ID from config `enrichment.anthropicApi.modelId`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Prompt Design
|
||||||
|
|
||||||
|
### Thread-Level Analysis Prompt
|
||||||
|
|
||||||
|
The prompt receives the last N notes from a discussion thread and classifies the discourse.
|
||||||
|
|
||||||
|
```
|
||||||
|
You are analyzing a discussion thread from a software project's issue tracker.
|
||||||
|
|
||||||
|
Thread context:
|
||||||
|
- Entity: {entity_type} #{iid} "{title}"
|
||||||
|
- Thread started: {first_note_at}
|
||||||
|
- Total notes in thread: {note_count}
|
||||||
|
|
||||||
|
Notes (most recent {N} shown):
|
||||||
|
|
||||||
|
[Note by @{author} at {timestamp}]
|
||||||
|
{body}
|
||||||
|
|
||||||
|
[Note by @{author} at {timestamp}]
|
||||||
|
{body}
|
||||||
|
|
||||||
|
...
|
||||||
|
|
||||||
|
Classify this thread's discourse. Respond with JSON only:
|
||||||
|
|
||||||
|
{
|
||||||
|
"analysis_type": "decision" | "question_answered" | "consensus" | "open_debate" | "informational",
|
||||||
|
"confidence": 0.0-1.0,
|
||||||
|
"summary": "1-2 sentence summary of what was decided/answered/debated",
|
||||||
|
"evidence_note_indices": [0, 2] // indices of notes that most support this classification
|
||||||
|
}
|
||||||
|
|
||||||
|
Classification guide:
|
||||||
|
- "decision": A concrete choice was made. Look for: "let's go with", "agreed", "approved", explicit confirmation of an approach.
|
||||||
|
- "question_answered": A question was asked and definitively answered. Look for: question mark followed by a clear factual response.
|
||||||
|
- "consensus": Multiple people converged. Look for: multiple approvals, "+1", "LGTM", agreement from different authors.
|
||||||
|
- "open_debate": Active disagreement or unresolved alternatives. Look for: "but", "alternatively", "I disagree", competing proposals without resolution.
|
||||||
|
- "informational": Status updates, FYI notes, no actionable discourse.
|
||||||
|
|
||||||
|
If the thread is ambiguous, prefer "informational" with lower confidence over guessing.
|
||||||
|
```
|
||||||
|
|
||||||
|
### Prompt Design Principles
|
||||||
|
|
||||||
|
1. **Structured JSON output** — Haiku is reliable at JSON generation with clear schema
|
||||||
|
2. **Evidence-backed** — `evidence_note_indices` ties the classification to specific notes, enabling the UI to show "why"
|
||||||
|
3. **Conservative default** — "informational" is the fallback, preventing false-positive decisions
|
||||||
|
4. **Limited context window** — Last 5 notes (configurable) keeps token usage low per thread
|
||||||
|
5. **No system prompt tricks** — Straightforward classification task within Haiku's strengths
|
||||||
|
|
||||||
|
### Token Budget Estimation
|
||||||
|
|
||||||
|
| Component | Tokens (approx) |
|
||||||
|
|-----------|-----------------|
|
||||||
|
| System/instruction prompt | ~300 |
|
||||||
|
| Thread metadata | ~50 |
|
||||||
|
| 5 notes (avg 100 words each) | ~750 |
|
||||||
|
| Response | ~100 |
|
||||||
|
| **Total per thread** | **~1,200** |
|
||||||
|
|
||||||
|
At Haiku pricing (~$0.25/1M input, ~$1.25/1M output):
|
||||||
|
- 100 threads ≈ $0.03 input + $0.01 output = **~$0.04**
|
||||||
|
- 1,000 threads ≈ **~$0.40**
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Explain Integration
|
||||||
|
|
||||||
|
### Current Behavior (to be replaced)
|
||||||
|
|
||||||
|
`explain.rs:650` — `extract_key_decisions()` uses the 60-minute same-actor heuristic.
|
||||||
|
|
||||||
|
### New Behavior
|
||||||
|
|
||||||
|
When `discussion_analysis` table has data for the entity's discussions:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
fn fetch_key_decisions_from_enrichment(
|
||||||
|
conn: &Connection,
|
||||||
|
entity_type: &str,
|
||||||
|
entity_id: i64,
|
||||||
|
max_decisions: usize,
|
||||||
|
) -> Result<Vec<KeyDecision>> {
|
||||||
|
let id_col = id_column_for(entity_type);
|
||||||
|
let sql = format!(
|
||||||
|
"SELECT da.analysis_type, da.confidence, da.summary, da.evidence_note_ids,
|
||||||
|
da.analyzed_at, d.gitlab_discussion_id
|
||||||
|
FROM discussion_analysis da
|
||||||
|
JOIN discussions d ON da.discussion_id = d.id
|
||||||
|
WHERE d.{id_col} = ?1
|
||||||
|
AND da.analysis_type IN ('decision', 'question_answered', 'consensus')
|
||||||
|
AND da.confidence >= ?2
|
||||||
|
ORDER BY da.confidence DESC, da.analyzed_at DESC
|
||||||
|
LIMIT ?3"
|
||||||
|
);
|
||||||
|
// ... map to KeyDecision structs
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Fallback Strategy
|
||||||
|
|
||||||
|
```
|
||||||
|
if discussion_analysis table has rows for this entity:
|
||||||
|
use enrichment data → key_decisions
|
||||||
|
else if enrichment is not configured:
|
||||||
|
fall back to heuristic (existing behavior)
|
||||||
|
else:
|
||||||
|
return empty key_decisions with a hint: "Run 'lore enrich discussions' to populate"
|
||||||
|
```
|
||||||
|
|
||||||
|
This preserves backwards compatibility during rollout. The heuristic can be removed entirely once enrichment is the established workflow.
|
||||||
|
|
||||||
|
### KeyDecision Struct Changes
|
||||||
|
|
||||||
|
```rust
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct KeyDecision {
|
||||||
|
pub timestamp: String, // ISO 8601 (analyzed_at or note timestamp)
|
||||||
|
pub actor: Option<String>, // May not be single-actor for consensus
|
||||||
|
pub action: String, // analysis_type: "decision", "question_answered", "consensus"
|
||||||
|
pub summary: String, // LLM-generated summary (replaces context_note)
|
||||||
|
pub confidence: f64, // 0.0-1.0
|
||||||
|
pub discussion_id: Option<String>, // gitlab_discussion_id for linking
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub source: Option<String>, // "enrichment" or "heuristic" (transitional)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Testing Strategy
|
||||||
|
|
||||||
|
### Unit Tests (Mock LLM)
|
||||||
|
|
||||||
|
The LLM provider trait enables deterministic testing with a mock:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
struct MockLlmProvider {
|
||||||
|
responses: Vec<String>, // pre-canned JSON responses
|
||||||
|
call_count: AtomicUsize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LlmProvider for MockLlmProvider {
|
||||||
|
async fn complete(&self, _prompt: &str, _max_tokens: u32) -> Result<LlmResponse> {
|
||||||
|
let idx = self.call_count.fetch_add(1, Ordering::SeqCst);
|
||||||
|
Ok(LlmResponse {
|
||||||
|
content: self.responses[idx].clone(),
|
||||||
|
input_tokens: 100,
|
||||||
|
output_tokens: 50,
|
||||||
|
stop_reason: "end_turn".to_string(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Test Cases
|
||||||
|
|
||||||
|
| Test | What it validates |
|
||||||
|
|------|-------------------|
|
||||||
|
| `test_staleness_hash_changes_on_new_note` | notes_hash differs when note added |
|
||||||
|
| `test_staleness_hash_stable_no_changes` | notes_hash identical on re-computation |
|
||||||
|
| `test_enrichment_skips_unchanged_threads` | Threads with matching hash are not re-enriched |
|
||||||
|
| `test_enrichment_force_ignores_hash` | `--force` re-enriches all threads |
|
||||||
|
| `test_enrichment_stores_analysis` | Results persisted to `discussion_analysis` table |
|
||||||
|
| `test_enrichment_upserts_on_rereun` | Re-enrichment updates existing rows |
|
||||||
|
| `test_enrichment_dry_run_no_writes` | `--dry-run` produces count but writes nothing |
|
||||||
|
| `test_enrichment_respects_max_threads` | Caps at `--max-threads` value |
|
||||||
|
| `test_enrichment_scopes_to_project` | `-p` limits to project's discussions |
|
||||||
|
| `test_enrichment_scopes_to_entity` | `--issue 42` limits to that issue's discussions |
|
||||||
|
| `test_explain_uses_enrichment_data` | explain returns enrichment-sourced key_decisions |
|
||||||
|
| `test_explain_falls_back_to_heuristic` | No enrichment data → heuristic results |
|
||||||
|
| `test_explain_empty_when_no_data` | No enrichment, no heuristic matches → empty array |
|
||||||
|
| `test_prompt_construction` | Prompt includes correct notes, metadata, and instruction |
|
||||||
|
| `test_response_parsing_valid_json` | Well-formed LLM response parsed correctly |
|
||||||
|
| `test_response_parsing_malformed` | Malformed response logged, thread skipped (not crash) |
|
||||||
|
| `test_confidence_filter` | Only analysis above `minConfidence` shown in explain |
|
||||||
|
| `test_provider_config_bedrock` | Bedrock config parsed and provider instantiated |
|
||||||
|
| `test_provider_config_anthropic` | Anthropic API config parsed correctly |
|
||||||
|
| `test_no_enrichment_config_graceful` | Missing enrichment config → informative message, exit 0 |
|
||||||
|
|
||||||
|
### Integration Tests
|
||||||
|
|
||||||
|
- **Real Bedrock call** (gated behind `#[ignore]` + env var `LORE_TEST_BEDROCK=1`): Sends one real prompt to Bedrock, asserts valid JSON response with expected schema.
|
||||||
|
- **Full pipeline**: In-memory DB → insert discussions + notes → enrich with mock → verify `discussion_analysis` populated → run explain → verify key_decisions sourced from enrichment.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Boundaries
|
||||||
|
|
||||||
|
### Always (autonomous)
|
||||||
|
- Run `cargo test` and `cargo clippy` after every code change
|
||||||
|
- Use `MockLlmProvider` in all non-integration tests
|
||||||
|
- Respect `--dry-run` flag — never call LLM in dry-run mode
|
||||||
|
- Log token usage for every enrichment run
|
||||||
|
- Graceful degradation when no enrichment config exists
|
||||||
|
|
||||||
|
### Ask First (needs approval)
|
||||||
|
- Adding AWS SDK or HTTP dependencies to Cargo.toml
|
||||||
|
- Choosing between `aws-sdk-bedrockruntime` crate vs raw HTTP to Bedrock
|
||||||
|
- Modifying the `Config` struct (new `enrichment` field)
|
||||||
|
- Changing `KeyDecision` struct shape (affects robot mode API contract)
|
||||||
|
|
||||||
|
### Never (hard stops)
|
||||||
|
- No LLM calls in `lore explain` path — enrichment is pre-computed only
|
||||||
|
- No storing API keys in config file — use env vars / credential chain
|
||||||
|
- No automatic enrichment during `lore sync` — enrichment is always explicit
|
||||||
|
- No sending discussion content to any service other than the configured LLM provider
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Non-Goals
|
||||||
|
|
||||||
|
- **No real-time streaming** — Enrichment is batch, not streaming
|
||||||
|
- **No multi-model ensemble** — Single model per run, configurable per config
|
||||||
|
- **No custom fine-tuning** — Uses Haiku as-is with prompt engineering
|
||||||
|
- **No enrichment of individual notes** — Thread-level only (the unit of discourse)
|
||||||
|
- **No automatic re-enrichment on sync** — User/agent must explicitly run `lore enrich`
|
||||||
|
- **No modification of discussion/notes tables** — Enrichment data lives in its own table
|
||||||
|
- **No embedding-based approach** — This is classification, not similarity search
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## User Journeys
|
||||||
|
|
||||||
|
### P1 — Critical
|
||||||
|
- **UJ-1: Agent enriches discussions before explain**
|
||||||
|
- Actor: AI agent (via robot mode)
|
||||||
|
- Flow: `lore -J enrich discussions -p group/repo` → JSON summary of enrichment run → `lore -J explain issues 42` → key_decisions populated from enrichment
|
||||||
|
- Error paths: No enrichment config (exit with suggestion), Bedrock auth failure (exit 5), rate limited (exit 7)
|
||||||
|
- Implemented by: Tasks 1-5
|
||||||
|
|
||||||
|
### P2 — Important
|
||||||
|
- **UJ-2: Human runs enrichment and checks results**
|
||||||
|
- Actor: Developer at terminal
|
||||||
|
- Flow: `lore enrich discussions` → progress bar → summary → `lore explain issues 42` → sees decisions in narrative
|
||||||
|
- Error paths: Same as UJ-1 but with human-readable messages
|
||||||
|
- Implemented by: Tasks 1-5
|
||||||
|
|
||||||
|
- **UJ-3: Incremental enrichment after sync**
|
||||||
|
- Actor: AI agent or human
|
||||||
|
- Flow: `lore sync` → new notes ingested → `lore enrich discussions` → only stale threads re-enriched → fast completion
|
||||||
|
- Implemented by: Task 2 (staleness detection)
|
||||||
|
|
||||||
|
### P3 — Nice to Have
|
||||||
|
- **UJ-4: Dry-run to estimate cost**
|
||||||
|
- Actor: Cost-conscious user
|
||||||
|
- Flow: `lore enrich discussions --dry-run` → see thread count and estimated tokens → decide whether to proceed
|
||||||
|
- Implemented by: Task 4
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Tasks
|
||||||
|
|
||||||
|
### Phase 1: Schema & Provider Abstraction
|
||||||
|
|
||||||
|
- [ ] **Task 1:** Database migration + LLM provider trait
|
||||||
|
- **Implements:** Infrastructure (all UJs)
|
||||||
|
- **Files:** `src/core/db.rs` (migration), NEW `src/enrichment/mod.rs`, NEW `src/enrichment/provider.rs`
|
||||||
|
- **Depends on:** Nothing
|
||||||
|
- **Test-first:**
|
||||||
|
1. Write `test_migration_creates_discussion_analysis_table`: run migrations, verify table exists with correct columns
|
||||||
|
2. Write `test_provider_config_bedrock`: parse config JSON with bedrock enrichment section
|
||||||
|
3. Write `test_provider_config_anthropic`: parse config JSON with anthropic enrichment section
|
||||||
|
4. Write `test_no_enrichment_config_graceful`: parse config without enrichment section, verify `None`
|
||||||
|
5. Run tests — all FAIL (red)
|
||||||
|
6. Implement migration + `LlmProvider` trait + `EnrichmentConfig` struct + config parsing
|
||||||
|
7. Run tests — all PASS (green)
|
||||||
|
- **Acceptance:** Migration creates table. Config parses both provider variants. Missing config returns `None`.
|
||||||
|
|
||||||
|
### Phase 2: Staleness & Prompt Pipeline
|
||||||
|
|
||||||
|
- [ ] **Task 2:** Notes hash computation + staleness detection
|
||||||
|
- **Implements:** UJ-3 (incremental enrichment)
|
||||||
|
- **Files:** `src/enrichment/staleness.rs`
|
||||||
|
- **Depends on:** Task 1
|
||||||
|
- **Test-first:**
|
||||||
|
1. Write `test_staleness_hash_changes_on_new_note`
|
||||||
|
2. Write `test_staleness_hash_stable_no_changes`
|
||||||
|
3. Write `test_enrichment_skips_unchanged_threads`
|
||||||
|
4. Run tests — all FAIL (red)
|
||||||
|
5. Implement `compute_notes_hash()` + `find_stale_discussions()` query
|
||||||
|
6. Run tests — all PASS (green)
|
||||||
|
- **Acceptance:** Hash deterministic. Stale detection correct. Unchanged threads skipped.
|
||||||
|
|
||||||
|
- [ ] **Task 3:** Prompt construction + response parsing
|
||||||
|
- **Implements:** Core enrichment logic
|
||||||
|
- **Files:** `src/enrichment/prompt.rs`, `src/enrichment/parser.rs`
|
||||||
|
- **Depends on:** Task 1
|
||||||
|
- **Test-first:**
|
||||||
|
1. Write `test_prompt_construction`: verify prompt includes notes, metadata, instruction
|
||||||
|
2. Write `test_response_parsing_valid_json`: well-formed response parsed
|
||||||
|
3. Write `test_response_parsing_malformed`: malformed response returns error (not panic)
|
||||||
|
4. Run tests — all FAIL (red)
|
||||||
|
5. Implement `build_prompt()` + `parse_analysis_response()`
|
||||||
|
6. Run tests — all PASS (green)
|
||||||
|
- **Acceptance:** Prompt is well-formed. Parser handles valid and invalid responses gracefully.
|
||||||
|
|
||||||
|
### Phase 3: CLI Command & Pipeline
|
||||||
|
|
||||||
|
- [ ] **Task 4:** `lore enrich discussions` command + enrichment pipeline
|
||||||
|
- **Implements:** UJ-1, UJ-2, UJ-4
|
||||||
|
- **Files:** NEW `src/cli/commands/enrich.rs`, `src/cli/mod.rs`, `src/main.rs`
|
||||||
|
- **Depends on:** Tasks 1, 2, 3
|
||||||
|
- **Test-first:**
|
||||||
|
1. Write `test_enrichment_stores_analysis`: mock LLM → verify rows in `discussion_analysis`
|
||||||
|
2. Write `test_enrichment_upserts_on_rerun`: enrich → re-enrich → verify single row updated
|
||||||
|
3. Write `test_enrichment_dry_run_no_writes`: dry-run → verify zero rows written
|
||||||
|
4. Write `test_enrichment_respects_max_threads`: 10 stale, max=3 → only 3 enriched
|
||||||
|
5. Write `test_enrichment_scopes_to_project`: verify project filter
|
||||||
|
6. Write `test_enrichment_scopes_to_entity`: verify --issue/--mr filter
|
||||||
|
7. Run tests — all FAIL (red)
|
||||||
|
8. Implement: command registration, pipeline orchestration, mock-based tests
|
||||||
|
9. Run tests — all PASS (green)
|
||||||
|
- **Acceptance:** Full pipeline works with mock. Dry-run safe. Scoping correct. Robot JSON matches schema.
|
||||||
|
|
||||||
|
### Phase 4: LLM Backend Implementations
|
||||||
|
|
||||||
|
- [ ] **Task 5:** Bedrock + Anthropic API provider implementations
|
||||||
|
- **Implements:** UJ-1, UJ-2 (actual LLM connectivity)
|
||||||
|
- **Files:** `src/enrichment/bedrock.rs`, `src/enrichment/anthropic.rs`
|
||||||
|
- **Depends on:** Task 4
|
||||||
|
- **Test-first:**
|
||||||
|
1. Write `test_bedrock_request_format`: verify request body matches Bedrock InvokeModel schema
|
||||||
|
2. Write `test_anthropic_request_format`: verify request body matches Messages API schema
|
||||||
|
3. Write integration test (gated `#[ignore]`): real Bedrock call, assert valid response
|
||||||
|
4. Run tests — unit FAIL (red), integration skipped
|
||||||
|
5. Implement both providers
|
||||||
|
6. Run tests — all PASS (green)
|
||||||
|
- **Acceptance:** Both providers construct valid requests. Auth works via standard credential chains. Integration test passes when enabled.
|
||||||
|
|
||||||
|
### Phase 5: Explain Integration
|
||||||
|
|
||||||
|
- [ ] **Task 6:** Replace heuristic with enrichment data in explain
|
||||||
|
- **Implements:** UJ-1, UJ-2 (the payoff)
|
||||||
|
- **Files:** `src/cli/commands/explain.rs`
|
||||||
|
- **Depends on:** Task 4
|
||||||
|
- **Test-first:**
|
||||||
|
1. Write `test_explain_uses_enrichment_data`: insert mock enrichment rows → explain returns them as key_decisions
|
||||||
|
2. Write `test_explain_falls_back_to_heuristic`: no enrichment rows → returns heuristic results
|
||||||
|
3. Write `test_confidence_filter`: insert rows with varying confidence → only high-confidence shown
|
||||||
|
4. Run tests — all FAIL (red)
|
||||||
|
5. Implement `fetch_key_decisions_from_enrichment()` + fallback logic
|
||||||
|
6. Run tests — all PASS (green)
|
||||||
|
- **Acceptance:** Explain uses enrichment when available. Falls back gracefully. Confidence threshold respected.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Dependencies (New Crates — Needs Discussion)
|
||||||
|
|
||||||
|
| Crate | Purpose | Alternative |
|
||||||
|
|-------|---------|-------------|
|
||||||
|
| `aws-sdk-bedrockruntime` | Bedrock InvokeModel API | Raw HTTP via existing `HttpClient` |
|
||||||
|
| `sha2` | SHA-256 for notes_hash | Already in dependency tree? Check. |
|
||||||
|
|
||||||
|
**Decision needed:** Use AWS SDK crate (heavier but handles auth/signing) vs. raw HTTP with SigV4 signing (lighter but more implementation work)?
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Session Log
|
||||||
|
|
||||||
|
### Session 1 — 2026-03-11
|
||||||
|
- Identified key_decisions heuristic as fundamentally inadequate (60-min same-actor window)
|
||||||
|
- User vision: LLM-powered discourse analysis, pre-computed for offline explain
|
||||||
|
- Key constraint: Bedrock required for org security compliance
|
||||||
|
- Designed pre-computed enrichment architecture
|
||||||
|
- Wrote initial spec draft for iteration
|
||||||
701
specs/SPEC_explain.md
Normal file
701
specs/SPEC_explain.md
Normal file
@@ -0,0 +1,701 @@
|
|||||||
|
# Spec: lore explain — Auto-Generated Issue/MR Narratives
|
||||||
|
|
||||||
|
**Bead:** bd-9lbr
|
||||||
|
**Created:** 2026-03-10
|
||||||
|
|
||||||
|
## Spec Status
|
||||||
|
| Section | Status | Notes |
|
||||||
|
|---------|--------|-------|
|
||||||
|
| Objective | complete | |
|
||||||
|
| Tech Stack | complete | |
|
||||||
|
| Project Structure | complete | |
|
||||||
|
| Commands | complete | |
|
||||||
|
| Code Style | complete | UX-audited: after_help, --sections, --since, --no-timeline, --max-decisions, singular types |
|
||||||
|
| Boundaries | complete | |
|
||||||
|
| Testing Strategy | complete | 13 test cases (7 original + 5 UX flags + 1 singular type) |
|
||||||
|
| Git Workflow | complete | jj-first |
|
||||||
|
| User Journeys | complete | 3 journeys covering agent, human, pipeline use |
|
||||||
|
| Architecture | complete | ExplainParams + section filtering + time scoping |
|
||||||
|
| Success Criteria | complete | 15 criteria (10 original + 5 UX flags) |
|
||||||
|
| Non-Goals | complete | |
|
||||||
|
| Tasks | complete | 5 tasks across 3 phases, all updated for UX flags |
|
||||||
|
|
||||||
|
**Definition of Complete:** All sections `complete`, Open Questions empty,
|
||||||
|
every user journey has tasks, every task has TDD workflow and acceptance criteria.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Quick Reference
|
||||||
|
- [Entity Detail] (Architecture): reuse show/ query patterns (private — copy, don't import)
|
||||||
|
- [Timeline] (Architecture): import `crate::timeline::seed::seed_timeline_direct` + `collect_events`
|
||||||
|
- [Events] (Architecture): new inline queries against resource_state_events/resource_label_events
|
||||||
|
- [References] (Architecture): new query against entity_references table
|
||||||
|
- [Discussions] (Architecture): adapted from show/ patterns, add resolved/resolvable filter
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Open Questions (Resolve Before Implementation)
|
||||||
|
<!-- All resolved -->
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Objective
|
||||||
|
|
||||||
|
**Goal:** Add `lore explain issues N` / `lore explain mrs N` to auto-generate structured narratives of what happened on an issue or MR.
|
||||||
|
|
||||||
|
**Problem:** Understanding the full story of an issue/MR requires reading dozens of notes, cross-referencing state changes, checking related entities, and piecing together a timeline. This is time-consuming for humans and nearly impossible for AI agents without custom orchestration.
|
||||||
|
|
||||||
|
**Success metrics:**
|
||||||
|
- Produces a complete narrative in <500ms for an issue with 50 notes
|
||||||
|
- All 7 sections populated (entity, description_excerpt, key_decisions, activity, open_threads, related, timeline_excerpt)
|
||||||
|
- Works fully offline (no API calls, no LLM)
|
||||||
|
- Deterministic and reproducible (same input = same output)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Tech Stack & Constraints
|
||||||
|
|
||||||
|
| Layer | Technology | Version |
|
||||||
|
|-------|-----------|---------|
|
||||||
|
| Language | Rust | nightly-2026-03-01 (rust-toolchain.toml) |
|
||||||
|
| Framework | clap (derive) | As in Cargo.toml |
|
||||||
|
| Database | SQLite via rusqlite | Bundled |
|
||||||
|
| Testing | cargo test | Inline #[cfg(test)] |
|
||||||
|
| Async | asupersync | 0.2 |
|
||||||
|
|
||||||
|
**Constraints:**
|
||||||
|
- No LLM dependency — template-based, deterministic
|
||||||
|
- No network calls — all data from local SQLite
|
||||||
|
- Performance: <500ms for 50-note entity
|
||||||
|
- Unsafe code forbidden (`#![forbid(unsafe_code)]`)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Project Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
src/cli/commands/
|
||||||
|
explain.rs # NEW: command module (queries, heuristic, result types)
|
||||||
|
src/cli/
|
||||||
|
mod.rs # EDIT: add Explain variant to Commands enum
|
||||||
|
src/app/
|
||||||
|
handlers.rs # EDIT: add handle_explain dispatch
|
||||||
|
robot_docs.rs # EDIT: register explain in robot-docs manifest
|
||||||
|
src/main.rs # EDIT: add Explain match arm
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Commands
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Build
|
||||||
|
cargo check --all-targets
|
||||||
|
|
||||||
|
# Test
|
||||||
|
cargo test explain
|
||||||
|
|
||||||
|
# Lint
|
||||||
|
cargo clippy --all-targets -- -D warnings
|
||||||
|
|
||||||
|
# Format
|
||||||
|
cargo fmt --check
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Code Style
|
||||||
|
|
||||||
|
**Command registration (from cli/mod.rs):**
|
||||||
|
```rust
|
||||||
|
/// Auto-generate a structured narrative of an issue or MR
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore explain issues 42 # Narrative for issue #42
|
||||||
|
lore explain mrs 99 -p group/repo # Narrative for MR !99 in specific project
|
||||||
|
lore -J explain issues 42 # JSON output for automation
|
||||||
|
lore explain issues 42 --sections key_decisions,open_threads # Specific sections only
|
||||||
|
lore explain issues 42 --since 30d # Narrative scoped to last 30 days
|
||||||
|
lore explain issues 42 --no-timeline # Skip timeline (faster)")]
|
||||||
|
Explain {
|
||||||
|
/// Entity type: "issues" or "mrs" (singular forms also accepted)
|
||||||
|
#[arg(value_parser = ["issues", "mrs", "issue", "mr"])]
|
||||||
|
entity_type: String,
|
||||||
|
|
||||||
|
/// Entity IID
|
||||||
|
iid: i64,
|
||||||
|
|
||||||
|
/// Scope to project (fuzzy match)
|
||||||
|
#[arg(short, long)]
|
||||||
|
project: Option<String>,
|
||||||
|
|
||||||
|
/// Select specific sections (comma-separated)
|
||||||
|
/// Valid: entity, description, key_decisions, activity, open_threads, related, timeline
|
||||||
|
#[arg(long, value_delimiter = ',', help_heading = "Output")]
|
||||||
|
sections: Option<Vec<String>>,
|
||||||
|
|
||||||
|
/// Skip timeline excerpt (faster execution)
|
||||||
|
#[arg(long, help_heading = "Output")]
|
||||||
|
no_timeline: bool,
|
||||||
|
|
||||||
|
/// Maximum key decisions to include
|
||||||
|
#[arg(long, default_value = "10", help_heading = "Output")]
|
||||||
|
max_decisions: usize,
|
||||||
|
|
||||||
|
/// Time scope for events/notes (e.g. 7d, 2w, 1m, or YYYY-MM-DD)
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
since: Option<String>,
|
||||||
|
},
|
||||||
|
```
|
||||||
|
|
||||||
|
**Entity type normalization:** The handler must normalize singular forms: `"issue"` -> `"issues"`, `"mr"` -> `"mrs"`. This prevents common typos from causing errors.
|
||||||
|
|
||||||
|
**Query pattern (from show/issue.rs):**
|
||||||
|
```rust
|
||||||
|
fn find_issue(conn: &Connection, iid: i64, project_filter: Option<&str>) -> Result<IssueRow> {
|
||||||
|
let project_id = resolve_project(conn, project_filter)?;
|
||||||
|
let mut stmt = conn.prepare_cached("SELECT ... FROM issues WHERE iid = ?1 AND project_id = ?2")?;
|
||||||
|
// ...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Robot mode output (from cli/robot.rs):**
|
||||||
|
```rust
|
||||||
|
let response = serde_json::json!({
|
||||||
|
"ok": true,
|
||||||
|
"data": result,
|
||||||
|
"meta": { "elapsed_ms": elapsed.as_millis() }
|
||||||
|
});
|
||||||
|
println!("{}", serde_json::to_string(&response)?);
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Boundaries
|
||||||
|
|
||||||
|
### Always (autonomous)
|
||||||
|
- Run `cargo test explain` and `cargo clippy` after every code change
|
||||||
|
- Follow existing query patterns from show/issue.rs and show/mr.rs
|
||||||
|
- Use `resolve_project()` for project resolution (fuzzy match)
|
||||||
|
- Cap key_decisions at `--max-decisions` (default 10), timeline_excerpt at 20 events
|
||||||
|
- Normalize singular entity types (`issue` -> `issues`, `mr` -> `mrs`)
|
||||||
|
- Respect `--sections` filter: omit unselected sections from output (both robot and human)
|
||||||
|
- Respect `--since` filter: scope events/notes queries with `created_at >= ?` threshold
|
||||||
|
|
||||||
|
### Ask First (needs approval)
|
||||||
|
- Adding new dependencies to Cargo.toml
|
||||||
|
- Modifying existing query functions in show/ or timeline/
|
||||||
|
- Changing the entity_references table schema
|
||||||
|
|
||||||
|
### Never (hard stops)
|
||||||
|
- No LLM calls — explain must be deterministic
|
||||||
|
- No API/network calls — fully offline
|
||||||
|
- No new database migrations — use existing schema only
|
||||||
|
- Do not modify show/ or timeline/ modules (copy patterns instead)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Testing Strategy (TDD — Red-Green)
|
||||||
|
|
||||||
|
**Methodology:** Test-Driven Development. Write tests first, confirm red, implement, confirm green.
|
||||||
|
|
||||||
|
**Framework:** cargo test, inline `#[cfg(test)]`
|
||||||
|
**Location:** `src/cli/commands/explain.rs` (inline test module)
|
||||||
|
|
||||||
|
**Test categories:**
|
||||||
|
- Unit tests: key-decisions heuristic, activity counting, description truncation
|
||||||
|
- Integration tests: full explain pipeline with in-memory DB
|
||||||
|
|
||||||
|
**User journey test mapping:**
|
||||||
|
| Journey | Test | Scenarios |
|
||||||
|
|---------|------|-----------|
|
||||||
|
| UJ-1: Agent explains issue | test_explain_issue_basic | All 7 sections present, robot JSON valid |
|
||||||
|
| UJ-1: Agent explains MR | test_explain_mr | entity.type = "merge_request", merged_at included |
|
||||||
|
| UJ-1: Singular entity type | test_explain_singular_entity_type | `"issue"` normalizes to `"issues"` |
|
||||||
|
| UJ-1: Section filtering | test_explain_sections_filter_robot | Only selected sections in output |
|
||||||
|
| UJ-1: No-timeline flag | test_explain_no_timeline_flag | timeline_excerpt is None |
|
||||||
|
| UJ-2: Human reads narrative | (human render tested manually) | Headers, indentation, color |
|
||||||
|
| UJ-3: Key decisions | test_explain_key_decision_heuristic | Note within 60min of state change by same actor |
|
||||||
|
| UJ-3: No false decisions | test_explain_key_decision_ignores_unrelated_notes | Different author's note excluded |
|
||||||
|
| UJ-3: Max decisions cap | test_explain_max_decisions | Respects `--max-decisions` parameter |
|
||||||
|
| UJ-3: Since scopes events | test_explain_since_scopes_events | Only recent events included |
|
||||||
|
| UJ-3: Open threads | test_explain_open_threads | Only unresolved discussions in output |
|
||||||
|
| UJ-3: Edge case | test_explain_no_notes | Empty sections, no panic |
|
||||||
|
| UJ-3: Activity counts | test_explain_activity_counts | Correct state/label/note counts |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Git Workflow
|
||||||
|
|
||||||
|
- **jj-first** — all VCS via jj, not git
|
||||||
|
- **Commit format:** `feat(explain): <description>`
|
||||||
|
- **No branches** — commit in place, use jj bookmarks to push
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## User Journeys (Prioritized)
|
||||||
|
|
||||||
|
### P1 — Critical
|
||||||
|
- **UJ-1: Agent queries issue/MR narrative**
|
||||||
|
- Actor: AI agent (via robot mode)
|
||||||
|
- Flow: `lore -J explain issues 42` → JSON with 7 sections → agent parses and acts
|
||||||
|
- Error paths: Issue not found (exit 17), ambiguous project (exit 18)
|
||||||
|
- Implemented by: Task 1, 2, 3, 4
|
||||||
|
|
||||||
|
### P2 — Important
|
||||||
|
- **UJ-2: Human reads explain output**
|
||||||
|
- Actor: Developer at terminal
|
||||||
|
- Flow: `lore explain issues 42` → formatted narrative with headers, colors, indentation
|
||||||
|
- Error paths: Same as UJ-1 but with human-readable error messages
|
||||||
|
- Implemented by: Task 5
|
||||||
|
|
||||||
|
### P3 — Nice to Have
|
||||||
|
- **UJ-3: Agent uses key-decisions to understand context**
|
||||||
|
- Actor: AI agent making decisions
|
||||||
|
- Flow: Parse `key_decisions` array → understand who decided what and when → inform action
|
||||||
|
- Error paths: No key decisions found (empty array, not error)
|
||||||
|
- Implemented by: Task 3
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Architecture / Data Model
|
||||||
|
|
||||||
|
### Data Assembly Pipeline (sync, no async needed)
|
||||||
|
|
||||||
|
```
|
||||||
|
1. RESOLVE → resolve_project() + find entity by IID
|
||||||
|
2. PARSE → normalize entity_type, parse --since, validate --sections
|
||||||
|
3. DETAIL → entity metadata (title, state, author, labels, assignees, status)
|
||||||
|
4. EVENTS → resource_state_events + resource_label_events (optionally --since scoped)
|
||||||
|
5. NOTES → non-system notes via discussions join (optionally --since scoped)
|
||||||
|
6. HEURISTIC → key_decisions = events correlated with notes by same actor within 60min
|
||||||
|
7. THREADS → discussions WHERE resolvable=1 AND resolved=0
|
||||||
|
8. REFERENCES → entity_references (both directions: source and target)
|
||||||
|
9. TIMELINE → seed_timeline_direct + collect_events (capped at 20, skip if --no-timeline)
|
||||||
|
10. FILTER → apply --sections filter: drop unselected sections before serialization
|
||||||
|
11. ASSEMBLE → combine into ExplainResult
|
||||||
|
```
|
||||||
|
|
||||||
|
**Section filtering:** When `--sections` is provided, only the listed sections are populated.
|
||||||
|
Unselected sections are set to their zero-value (`None`, empty vec, etc.) and omitted
|
||||||
|
from robot JSON via `#[serde(skip_serializing_if = "...")]`. The `entity` section is always
|
||||||
|
included (needed for identification). Human mode skips rendering unselected sections.
|
||||||
|
|
||||||
|
**Time scoping:** When `--since` is provided, parse it using `crate::core::time::parse_since()`
|
||||||
|
(same function used by timeline, me, file-history). Add `AND created_at >= ?` to events
|
||||||
|
and notes queries. The entity header, references, and open threads are NOT time-scoped
|
||||||
|
(they represent current state, not historical events).
|
||||||
|
|
||||||
|
### Key Types
|
||||||
|
|
||||||
|
```rust
|
||||||
|
/// Parameters controlling explain behavior.
|
||||||
|
pub struct ExplainParams {
|
||||||
|
pub entity_type: String, // "issues" or "mrs" (already normalized)
|
||||||
|
pub iid: i64,
|
||||||
|
pub project: Option<String>,
|
||||||
|
pub sections: Option<Vec<String>>, // None = all sections
|
||||||
|
pub no_timeline: bool,
|
||||||
|
pub max_decisions: usize, // default 10
|
||||||
|
pub since: Option<i64>, // ms epoch threshold from --since parsing
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct ExplainResult {
|
||||||
|
pub entity: EntitySummary,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub description_excerpt: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub key_decisions: Option<Vec<KeyDecision>>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub activity: Option<ActivitySummary>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub open_threads: Option<Vec<OpenThread>>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub related: Option<RelatedEntities>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub timeline_excerpt: Option<Vec<TimelineEventSummary>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct EntitySummary {
|
||||||
|
#[serde(rename = "type")]
|
||||||
|
pub entity_type: String, // "issue" or "merge_request"
|
||||||
|
pub iid: i64,
|
||||||
|
pub title: String,
|
||||||
|
pub state: String,
|
||||||
|
pub author: String,
|
||||||
|
pub assignees: Vec<String>,
|
||||||
|
pub labels: Vec<String>,
|
||||||
|
pub created_at: String, // ISO 8601
|
||||||
|
pub updated_at: String, // ISO 8601
|
||||||
|
pub url: Option<String>,
|
||||||
|
pub status_name: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct KeyDecision {
|
||||||
|
pub timestamp: String, // ISO 8601
|
||||||
|
pub actor: String,
|
||||||
|
pub action: String, // "state: opened -> closed" or "label: +bug"
|
||||||
|
pub context_note: String, // truncated to 500 chars
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct ActivitySummary {
|
||||||
|
pub state_changes: usize,
|
||||||
|
pub label_changes: usize,
|
||||||
|
pub notes: usize, // non-system only
|
||||||
|
pub first_event: Option<String>, // ISO 8601
|
||||||
|
pub last_event: Option<String>, // ISO 8601
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct OpenThread {
|
||||||
|
pub discussion_id: String,
|
||||||
|
pub started_by: String,
|
||||||
|
pub started_at: String, // ISO 8601
|
||||||
|
pub note_count: usize,
|
||||||
|
pub last_note_at: String, // ISO 8601
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct RelatedEntities {
|
||||||
|
pub closing_mrs: Vec<ClosingMrInfo>,
|
||||||
|
pub related_issues: Vec<RelatedEntityInfo>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct TimelineEventSummary {
|
||||||
|
pub timestamp: String, // ISO 8601
|
||||||
|
pub event_type: String,
|
||||||
|
pub actor: Option<String>,
|
||||||
|
pub summary: String,
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Key Decisions Heuristic
|
||||||
|
|
||||||
|
The heuristic identifies notes that explain WHY state/label changes were made:
|
||||||
|
|
||||||
|
1. Collect all `resource_state_events` and `resource_label_events` for the entity
|
||||||
|
2. Merge into unified chronological list with (timestamp, actor, description)
|
||||||
|
3. For each event, find the FIRST non-system note by the SAME actor within 60 minutes AFTER the event
|
||||||
|
4. Pair them as a `KeyDecision`
|
||||||
|
5. Cap at `params.max_decisions` (default 10)
|
||||||
|
|
||||||
|
**SQL for state events:**
|
||||||
|
```sql
|
||||||
|
SELECT state, actor_username, created_at
|
||||||
|
FROM resource_state_events
|
||||||
|
WHERE issue_id = ?1 -- or merge_request_id = ?1
|
||||||
|
AND (?2 IS NULL OR created_at >= ?2) -- --since filter
|
||||||
|
ORDER BY created_at ASC
|
||||||
|
```
|
||||||
|
|
||||||
|
**SQL for label events:**
|
||||||
|
```sql
|
||||||
|
SELECT action, label_name, actor_username, created_at
|
||||||
|
FROM resource_label_events
|
||||||
|
WHERE issue_id = ?1 -- or merge_request_id = ?1
|
||||||
|
AND (?2 IS NULL OR created_at >= ?2) -- --since filter
|
||||||
|
ORDER BY created_at ASC
|
||||||
|
```
|
||||||
|
|
||||||
|
**SQL for non-system notes (for correlation):**
|
||||||
|
```sql
|
||||||
|
SELECT n.body, n.author_username, n.created_at
|
||||||
|
FROM notes n
|
||||||
|
JOIN discussions d ON n.discussion_id = d.id
|
||||||
|
WHERE d.noteable_type = ?1 AND d.issue_id = ?2 -- or d.merge_request_id
|
||||||
|
AND n.is_system = 0
|
||||||
|
AND (?3 IS NULL OR n.created_at >= ?3) -- --since filter
|
||||||
|
ORDER BY n.created_at ASC
|
||||||
|
```
|
||||||
|
|
||||||
|
**Entity ID resolution:** The `discussions` table uses `issue_id` / `merge_request_id` columns (CHECK constraint: exactly one non-NULL). The `resource_state_events` and `resource_label_events` tables use the same pattern.
|
||||||
|
|
||||||
|
### Cross-References Query
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Outgoing references (this entity references others)
|
||||||
|
SELECT target_entity_type, target_entity_id, target_project_path,
|
||||||
|
target_entity_iid, reference_type, source_method
|
||||||
|
FROM entity_references
|
||||||
|
WHERE source_entity_type = ?1 AND source_entity_id = ?2
|
||||||
|
|
||||||
|
-- Incoming references (others reference this entity)
|
||||||
|
SELECT source_entity_type, source_entity_id,
|
||||||
|
reference_type, source_method
|
||||||
|
FROM entity_references
|
||||||
|
WHERE target_entity_type = ?1 AND target_entity_id = ?2
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note:** For closing MRs, reuse the pattern from show/issue.rs `get_closing_mrs()` which queries entity_references with `reference_type = 'closes'`.
|
||||||
|
|
||||||
|
### Open Threads Query
|
||||||
|
|
||||||
|
```sql
|
||||||
|
SELECT d.gitlab_discussion_id, d.first_note_at, d.last_note_at
|
||||||
|
FROM discussions d
|
||||||
|
WHERE d.issue_id = ?1 -- or d.merge_request_id
|
||||||
|
AND d.resolvable = 1
|
||||||
|
AND d.resolved = 0
|
||||||
|
ORDER BY d.last_note_at DESC
|
||||||
|
```
|
||||||
|
|
||||||
|
Then for each discussion, fetch the first note's author:
|
||||||
|
```sql
|
||||||
|
SELECT author_username, created_at
|
||||||
|
FROM notes
|
||||||
|
WHERE discussion_id = ?1
|
||||||
|
ORDER BY created_at ASC
|
||||||
|
LIMIT 1
|
||||||
|
```
|
||||||
|
|
||||||
|
And count notes per discussion:
|
||||||
|
```sql
|
||||||
|
SELECT COUNT(*) FROM notes WHERE discussion_id = ?1 AND is_system = 0
|
||||||
|
```
|
||||||
|
|
||||||
|
### Robot Mode Output Schema
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"ok": true,
|
||||||
|
"data": {
|
||||||
|
"entity": {
|
||||||
|
"type": "issue", "iid": 3864, "title": "...", "state": "opened",
|
||||||
|
"author": "teernisse", "assignees": ["teernisse"],
|
||||||
|
"labels": ["customer:BNSF"], "created_at": "2026-01-10T...",
|
||||||
|
"updated_at": "2026-02-12T...", "url": "...", "status_name": "In progress"
|
||||||
|
},
|
||||||
|
"description_excerpt": "First 500 chars...",
|
||||||
|
"key_decisions": [{
|
||||||
|
"timestamp": "2026-01-15T...",
|
||||||
|
"actor": "teernisse",
|
||||||
|
"action": "state: opened -> closed",
|
||||||
|
"context_note": "Starting work on the integration..."
|
||||||
|
}],
|
||||||
|
"activity": {
|
||||||
|
"state_changes": 3, "label_changes": 5, "notes": 42,
|
||||||
|
"first_event": "2026-01-10T...", "last_event": "2026-02-12T..."
|
||||||
|
},
|
||||||
|
"open_threads": [{
|
||||||
|
"discussion_id": "abc123",
|
||||||
|
"started_by": "cseiber",
|
||||||
|
"started_at": "2026-02-01T...",
|
||||||
|
"note_count": 5,
|
||||||
|
"last_note_at": "2026-02-10T..."
|
||||||
|
}],
|
||||||
|
"related": {
|
||||||
|
"closing_mrs": [{ "iid": 200, "title": "...", "state": "merged" }],
|
||||||
|
"related_issues": [{ "iid": 3800, "title": "Rail Break Card", "type": "related" }]
|
||||||
|
},
|
||||||
|
"timeline_excerpt": [
|
||||||
|
{ "timestamp": "...", "event_type": "state_changed", "actor": "teernisse", "summary": "State changed to closed" }
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"meta": { "elapsed_ms": 350 }
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Success Criteria
|
||||||
|
|
||||||
|
| # | Criterion | Input | Expected Output |
|
||||||
|
|---|-----------|-------|----------------|
|
||||||
|
| 1 | Issue explain produces all 7 sections | `lore -J explain issues N` | JSON with entity, description_excerpt, key_decisions, activity, open_threads, related, timeline_excerpt |
|
||||||
|
| 2 | MR explain produces all 7 sections | `lore -J explain mrs N` | Same shape, entity.type = "merge_request" |
|
||||||
|
| 3 | Key decisions captures correlated notes | State change + note by same actor within 60min | KeyDecision with action + context_note |
|
||||||
|
| 4 | Key decisions ignores unrelated notes | Note by different author near state change | Not in key_decisions array |
|
||||||
|
| 5 | Open threads filters correctly | 2 discussions: 1 resolved, 1 unresolved | Only unresolved in open_threads |
|
||||||
|
| 6 | Activity counts are accurate | 3 state events, 2 label events, 10 notes | Matching counts in activity section |
|
||||||
|
| 7 | Performance | Issue with 50 notes | <500ms |
|
||||||
|
| 8 | Entity not found | Non-existent IID | Exit code 17, suggestion to sync |
|
||||||
|
| 9 | Ambiguous project | IID exists in multiple projects, no -p | Exit code 18, suggestion to use -p |
|
||||||
|
| 10 | Human render | `lore explain issues N` (no -J) | Formatted narrative with headers |
|
||||||
|
| 11 | Singular entity type accepted | `lore explain issue 42` | Same as `lore explain issues 42` |
|
||||||
|
| 12 | Section filtering works | `--sections key_decisions,activity` | Only those 2 sections + entity in JSON |
|
||||||
|
| 13 | No-timeline skips timeline | `--no-timeline` | timeline_excerpt absent, faster execution |
|
||||||
|
| 14 | Max-decisions caps output | `--max-decisions 3` | At most 3 key_decisions |
|
||||||
|
| 15 | Since scopes events/notes | `--since 30d` | Only events/notes from last 30 days in activity, key_decisions |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Non-Goals
|
||||||
|
|
||||||
|
- **No LLM summarization** — This is template-based v1. LLM enhancement is a separate future feature.
|
||||||
|
- **No new database migrations** — Uses existing schema (resource_state_events, resource_label_events, discussions, notes, entity_references tables all exist).
|
||||||
|
- **No modification of show/ or timeline/ modules** — Copy patterns, don't refactor existing code. If we later want to share code, that's a separate refactoring bead.
|
||||||
|
- **No interactive mode** — Output only, no prompts or follow-up questions.
|
||||||
|
- **No MR diff analysis** — No file-level change summaries. Use `file-history` or `trace` for that.
|
||||||
|
- **No assignee/reviewer history** — Activity summary counts events but doesn't track assignment changes over time.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Tasks
|
||||||
|
|
||||||
|
### Phase 1: Setup & Registration
|
||||||
|
|
||||||
|
- [ ] **Task 1:** Register explain command in CLI and wire dispatch
|
||||||
|
- **Implements:** Infrastructure (UJ-1, UJ-2 prerequisite)
|
||||||
|
- **Files:** `src/cli/mod.rs`, `src/cli/commands/mod.rs`, `src/main.rs`, `src/app/handlers.rs`, NEW `src/cli/commands/explain.rs`
|
||||||
|
- **Depends on:** Nothing
|
||||||
|
- **Test-first:**
|
||||||
|
1. Write `test_explain_issue_basic` in explain.rs: insert a minimal issue + project + 1 discussion + 1 note + 1 state event into in-memory DB, call `run_explain()` with default ExplainParams, assert all 7 top-level sections present in result
|
||||||
|
2. Write `test_explain_mr` in explain.rs: insert MR with merged_at, call `run_explain()`, assert `entity.type == "merge_request"` and merged_at is populated
|
||||||
|
3. Write `test_explain_singular_entity_type`: call with `entity_type: "issue"`, assert it resolves same as `"issues"`
|
||||||
|
4. Run tests — all must FAIL (red)
|
||||||
|
5. Implement: Explain variant in Commands enum (with all flags: `--sections`, `--no-timeline`, `--max-decisions`, `--since`, singular entity type acceptance), handle_explain in handlers.rs (normalize entity_type, parse --since, build ExplainParams), skeleton `run_explain()` in explain.rs
|
||||||
|
6. Run tests — all must PASS (green)
|
||||||
|
- **Acceptance:** `cargo test explain::tests::test_explain_issue_basic`, `test_explain_mr`, and `test_explain_singular_entity_type` pass. Command registered in CLI help with after_help examples block.
|
||||||
|
- **Implementation notes:**
|
||||||
|
- Use inline args pattern (like Drift) with all flags from Code Style section
|
||||||
|
- `entity_type` validated by `#[arg(value_parser = ["issues", "mrs", "issue", "mr"])]`
|
||||||
|
- Normalize in handler: `"issue"` -> `"issues"`, `"mr"` -> `"mrs"`
|
||||||
|
- Parse `--since` using `crate::core::time::parse_since()` — returns ms epoch threshold
|
||||||
|
- Validate `--sections` values against allowed set: `["entity", "description", "key_decisions", "activity", "open_threads", "related", "timeline"]`
|
||||||
|
- Copy the `find_issue`/`find_mr` and `get_*` query patterns from show/issue.rs and show/mr.rs — they're private functions so can't be imported
|
||||||
|
- Use `resolve_project()` from `crate::core::project` for project resolution
|
||||||
|
- Use `ms_to_iso()` from `crate::core::time` for timestamp conversion
|
||||||
|
|
||||||
|
### Phase 2: Core Logic
|
||||||
|
|
||||||
|
- [ ] **Task 2:** Implement key-decisions heuristic
|
||||||
|
- **Implements:** UJ-3
|
||||||
|
- **Files:** `src/cli/commands/explain.rs`
|
||||||
|
- **Depends on:** Task 1
|
||||||
|
- **Test-first:**
|
||||||
|
1. Write `test_explain_key_decision_heuristic`: insert state change event at T, insert note by SAME author at T+30min, call `extract_key_decisions()`, assert 1 decision with correct action + context_note
|
||||||
|
2. Write `test_explain_key_decision_ignores_unrelated_notes`: insert state change by alice, insert note by bob at T+30min, assert 0 decisions
|
||||||
|
3. Write `test_explain_key_decision_label_event`: insert label add event + correlated note, assert decision.action starts with "label: +"
|
||||||
|
4. Run tests — all must FAIL (red)
|
||||||
|
4. Write `test_explain_max_decisions`: insert 5 correlated event+note pairs, call with `max_decisions: 3`, assert exactly 3 decisions returned
|
||||||
|
5. Write `test_explain_since_scopes_events`: insert event at T-60d and event at T-10d, call with `since: Some(T-30d)`, assert only recent event appears
|
||||||
|
6. Run tests — all must FAIL (red)
|
||||||
|
7. Implement `extract_key_decisions()` function:
|
||||||
|
- Query resource_state_events and resource_label_events for entity (with optional `--since` filter)
|
||||||
|
- Merge into unified chronological list
|
||||||
|
- For each event, find first non-system note by same actor within 60min (notes also `--since` filtered)
|
||||||
|
- Cap at `params.max_decisions`
|
||||||
|
8. Run tests — all must PASS (green)
|
||||||
|
- **Acceptance:** All 5 tests pass. Heuristic correctly correlates events with explanatory notes. `--max-decisions` and `--since` respected.
|
||||||
|
- **Implementation notes:**
|
||||||
|
- State events query: `SELECT state, actor_username, created_at FROM resource_state_events WHERE {id_col} = ?1 AND (?2 IS NULL OR created_at >= ?2) ORDER BY created_at`
|
||||||
|
- Label events query: `SELECT action, label_name, actor_username, created_at FROM resource_label_events WHERE {id_col} = ?1 AND (?2 IS NULL OR created_at >= ?2) ORDER BY created_at`
|
||||||
|
- Notes query: `SELECT n.body, n.author_username, n.created_at FROM notes n JOIN discussions d ON n.discussion_id = d.id WHERE d.{id_col} = ?1 AND n.is_system = 0 AND (?2 IS NULL OR n.created_at >= ?2) ORDER BY n.created_at`
|
||||||
|
- The `{id_col}` is either `issue_id` or `merge_request_id` based on entity_type
|
||||||
|
- Pass `params.since` (Option<i64>) as the `?2` parameter — NULL means no filter
|
||||||
|
- Use `crate::core::time::ms_to_iso()` for timestamp conversion in output
|
||||||
|
- Truncate context_note to 500 chars using `crate::cli::render::truncate()` or a local helper
|
||||||
|
|
||||||
|
- [ ] **Task 3:** Implement open threads, activity summary, and cross-references
|
||||||
|
- **Implements:** UJ-1
|
||||||
|
- **Files:** `src/cli/commands/explain.rs`
|
||||||
|
- **Depends on:** Task 1
|
||||||
|
- **Test-first:**
|
||||||
|
1. Write `test_explain_open_threads`: insert 2 discussions (1 with resolved=0 resolvable=1, 1 with resolved=1 resolvable=1), assert only unresolved appears in open_threads
|
||||||
|
2. Write `test_explain_activity_counts`: insert 3 state events + 2 label events + 10 non-system notes, assert activity.state_changes=3, label_changes=2, notes=10
|
||||||
|
3. Write `test_explain_no_notes`: insert issue with zero notes and zero events, assert empty key_decisions, empty open_threads, activity all zeros, description_excerpt = "(no description)" if description is NULL
|
||||||
|
4. Run tests — all must FAIL (red)
|
||||||
|
5. Implement:
|
||||||
|
- `fetch_open_threads()`: query discussions WHERE resolvable=1 AND resolved=0, fetch first note author + note count per thread
|
||||||
|
- `build_activity_summary()`: count state events, label events, non-system notes, find min/max timestamps
|
||||||
|
- `fetch_related_entities()`: query entity_references in both directions (source and target)
|
||||||
|
- Description excerpt: first 500 chars of description, or "(no description)" if NULL
|
||||||
|
6. Run tests — all must PASS (green)
|
||||||
|
- **Acceptance:** All 3 tests pass. Open threads correctly filtered. Activity counts accurate. Empty entity handled gracefully.
|
||||||
|
- **Implementation notes:**
|
||||||
|
- Open threads query: `SELECT d.gitlab_discussion_id, d.first_note_at, d.last_note_at FROM discussions d WHERE d.{id_col} = ?1 AND d.resolvable = 1 AND d.resolved = 0 ORDER BY d.last_note_at DESC`
|
||||||
|
- For first note author: `SELECT author_username FROM notes WHERE discussion_id = ?1 ORDER BY created_at ASC LIMIT 1`
|
||||||
|
- For note count: `SELECT COUNT(*) FROM notes WHERE discussion_id = ?1 AND is_system = 0`
|
||||||
|
- Cross-references: both outgoing and incoming from entity_references table
|
||||||
|
- For closing MRs, reuse the query pattern from show/issue.rs `get_closing_mrs()`
|
||||||
|
|
||||||
|
- [ ] **Task 4:** Wire timeline excerpt using existing pipeline
|
||||||
|
- **Implements:** UJ-1
|
||||||
|
- **Files:** `src/cli/commands/explain.rs`
|
||||||
|
- **Depends on:** Task 1
|
||||||
|
- **Test-first:**
|
||||||
|
1. Write `test_explain_timeline_excerpt`: insert issue + state events + notes, call run_explain() with `no_timeline: false`, assert timeline_excerpt is Some and non-empty and capped at 20 events
|
||||||
|
2. Write `test_explain_no_timeline_flag`: call run_explain() with `no_timeline: true`, assert timeline_excerpt is None
|
||||||
|
3. Run tests — both must FAIL (red)
|
||||||
|
4. Implement: when `!params.no_timeline` and `--sections` includes "timeline" (or is None), call `seed_timeline_direct()` with entity type + IID, then `collect_events()`, convert first 20 TimelineEvents into TimelineEventSummary structs. Otherwise set timeline_excerpt to None.
|
||||||
|
5. Run tests — both must PASS (green)
|
||||||
|
- **Acceptance:** Timeline excerpt present with max 20 events when enabled. Skipped entirely when `--no-timeline`. Uses existing timeline pipeline (no reimplementation).
|
||||||
|
- **Implementation notes:**
|
||||||
|
- Import: `use crate::timeline::seed::seed_timeline_direct;` and `use crate::timeline::collect::collect_events;`
|
||||||
|
- `seed_timeline_direct()` takes `(conn, entity_type, iid, project_id)` — verify exact signature before implementing
|
||||||
|
- `collect_events()` returns `Vec<TimelineEvent>` — map to simplified `TimelineEventSummary` (timestamp, event_type string, actor, summary)
|
||||||
|
- Timeline pipeline uses `EntityRef` struct from `crate::timeline::types` — needs entity's local DB id and project_path
|
||||||
|
- Cap at 20 events: `events.truncate(20)` after collection
|
||||||
|
- `--no-timeline` takes precedence over `--sections timeline` (if both specified, skip timeline)
|
||||||
|
|
||||||
|
### Phase 3: Output Rendering
|
||||||
|
|
||||||
|
- [ ] **Task 5:** Robot mode JSON output and human-readable rendering
|
||||||
|
- **Implements:** UJ-1, UJ-2
|
||||||
|
- **Files:** `src/cli/commands/explain.rs`, `src/app/robot_docs.rs`
|
||||||
|
- **Depends on:** Task 1, 2, 3, 4
|
||||||
|
- **Test-first:**
|
||||||
|
1. Write `test_explain_robot_output_shape`: call run_explain() with all sections, serialize to JSON, assert all 7 top-level keys present
|
||||||
|
2. Write `test_explain_sections_filter_robot`: call run_explain() with `sections: Some(vec!["key_decisions", "activity"])`, serialize, assert only `entity` + `key_decisions` + `activity` keys present (entity always included), assert `description_excerpt`, `open_threads`, `related`, `timeline_excerpt` are absent
|
||||||
|
3. Run tests — both must FAIL (red)
|
||||||
|
4. Implement:
|
||||||
|
- Robot mode: `print_explain_json()` wrapping ExplainResult in `{"ok": true, "data": ..., "meta": {...}}` envelope. `#[serde(skip_serializing_if = "Option::is_none")]` on optional sections handles filtering automatically.
|
||||||
|
- Human mode: `print_explain()` with section headers, colored output, indented key decisions, truncated descriptions. Check `params.sections` before rendering each section.
|
||||||
|
- Register in robot-docs manifest (include `--sections`, `--no-timeline`, `--max-decisions`, `--since` flags)
|
||||||
|
5. Run tests — both must PASS (green)
|
||||||
|
- **Acceptance:** Robot JSON matches schema. Section filtering works in both robot and human mode. Command appears in `lore robot-docs`.
|
||||||
|
- **Implementation notes:**
|
||||||
|
- Robot envelope: use `serde_json::json!()` with `RobotMeta` from `crate::cli::robot`
|
||||||
|
- Human rendering: use `Theme::bold()`, `Icons`, `render::truncate()` from `crate::cli::render`
|
||||||
|
- Follow timeline.rs rendering pattern: header with entity info -> separator line -> sections
|
||||||
|
- Register in robot_docs.rs following the existing pattern for other commands
|
||||||
|
- Section filtering: the `run_explain()` function should already return None for unselected sections. The serializer skips them. Human renderer checks `is_some()` before rendering.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Corrections from Original Bead
|
||||||
|
|
||||||
|
The bead (bd-9lbr) was created before a codebase rearchitecture. Key corrections:
|
||||||
|
|
||||||
|
1. **`src/core/events_db.rs` does not exist** — Event storage is in `src/ingestion/storage/events.rs` (insert only). Event queries are inline in `timeline/collect.rs`. Explain needs its own inline queries.
|
||||||
|
|
||||||
|
2. **`ResourceStateEvent` / `ResourceLabelEvent` structs don't exist** — The timeline queries raw rows directly. Explain should define lightweight local structs or use tuples.
|
||||||
|
|
||||||
|
3. **`run_show_issue()` / `run_show_mr()` are private** — They live in `include!()` files inside show/mod.rs. Cannot be imported. Copy the query patterns instead.
|
||||||
|
|
||||||
|
4. **bd-2g50 blocker is CLOSED** — `IssueDetail` already has `closed_at`, `references_full`, `user_notes_count`, `confidential`. No blocker.
|
||||||
|
|
||||||
|
5. **Clap registration pattern** — The bead shows args directly on the enum variant, which is correct for explain's simple args (matches Drift, Related pattern). No need for a separate ExplainArgs struct.
|
||||||
|
|
||||||
|
6. **entity_references has no fetch query** — Only `insert_entity_reference()` and `count_references_for_source()` exist. Explain needs a new SELECT query (inline in explain.rs).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Session Log
|
||||||
|
|
||||||
|
### Session 1 — 2026-03-10
|
||||||
|
- Read bead bd-9lbr thoroughly — exceptionally detailed but written before rearchitecture
|
||||||
|
- Verified infrastructure: show/ (private functions, copy patterns), timeline/ (importable pipeline), events (inline SQL, no typed structs), xref (no fetch query), discussions (resolvable/resolved confirmed in migration 028)
|
||||||
|
- Discovered bd-2g50 blocker is CLOSED — no dependency
|
||||||
|
- Decided: two positional args (`lore explain issues N`) over single query syntax
|
||||||
|
- Decided: formalize + gap-fill approach (bead is thorough, just needs updating)
|
||||||
|
- Documented 6 corrections from original bead to current codebase state
|
||||||
|
- Drafted complete spec with 5 tasks across 3 phases
|
||||||
|
|
||||||
|
### Session 1b — 2026-03-10 (CLI UX Audit)
|
||||||
|
- Audited full CLI surface (30+ commands) against explain's proposed UX
|
||||||
|
- Identified 8 improvements, user selected 6 to incorporate:
|
||||||
|
1. **after_help examples block** — every other lore command has this, explain was missing it
|
||||||
|
2. **--sections flag** — robot token efficiency, skip unselected sections entirely
|
||||||
|
4. **Singular entity type tolerance** — accept `issue`/`mr` alongside `issues`/`mrs`
|
||||||
|
5. **--no-timeline flag** — skip heaviest section for faster execution
|
||||||
|
7. **--max-decisions N flag** — user control over key_decisions cap (default 10)
|
||||||
|
8. **--since flag** — time-scope events/notes for long-lived entities
|
||||||
|
- Skipped: #3 (command aliases ex/narrative), #6 (#42/!99 shorthand)
|
||||||
|
- Updated: Code Style, Boundaries, Architecture (ExplainParams + ExplainResult types, section filtering, time scoping, SQL queries), Success Criteria (+5 new), Testing Strategy (+5 new tests), all 5 Tasks
|
||||||
|
- ExplainResult sections now `Option<T>` with `skip_serializing_if` for section filtering
|
||||||
|
- All sections remain complete — spec is ready for implementation
|
||||||
3
src/app/dispatch.rs
Normal file
3
src/app/dispatch.rs
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
include!("errors.rs");
|
||||||
|
include!("handlers.rs");
|
||||||
|
include!("robot_docs.rs");
|
||||||
486
src/app/errors.rs
Normal file
486
src/app/errors.rs
Normal file
@@ -0,0 +1,486 @@
|
|||||||
|
#[derive(Serialize)]
|
||||||
|
struct FallbackErrorOutput {
|
||||||
|
error: FallbackError,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct FallbackError {
|
||||||
|
code: String,
|
||||||
|
message: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
suggestion: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||||
|
actions: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_error(e: Box<dyn std::error::Error>, robot_mode: bool) -> ! {
|
||||||
|
if let Some(gi_error) = e.downcast_ref::<LoreError>() {
|
||||||
|
if robot_mode {
|
||||||
|
let output = RobotErrorOutput::from(gi_error);
|
||||||
|
eprintln!(
|
||||||
|
"{}",
|
||||||
|
serde_json::to_string(&output).unwrap_or_else(|_| {
|
||||||
|
let fallback = FallbackErrorOutput {
|
||||||
|
error: FallbackError {
|
||||||
|
code: "INTERNAL_ERROR".to_string(),
|
||||||
|
message: gi_error.to_string(),
|
||||||
|
suggestion: None,
|
||||||
|
actions: Vec::new(),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
serde_json::to_string(&fallback)
|
||||||
|
.unwrap_or_else(|_| r#"{"error":{"code":"INTERNAL_ERROR","message":"Serialization failed"}}"#.to_string())
|
||||||
|
})
|
||||||
|
);
|
||||||
|
std::process::exit(gi_error.exit_code());
|
||||||
|
} else {
|
||||||
|
eprintln!();
|
||||||
|
eprintln!(
|
||||||
|
" {} {}",
|
||||||
|
Theme::error().render(Icons::error()),
|
||||||
|
Theme::error().bold().render(&gi_error.to_string())
|
||||||
|
);
|
||||||
|
if let Some(suggestion) = gi_error.suggestion() {
|
||||||
|
eprintln!();
|
||||||
|
eprintln!(" {suggestion}");
|
||||||
|
}
|
||||||
|
let actions = gi_error.actions();
|
||||||
|
if !actions.is_empty() {
|
||||||
|
eprintln!();
|
||||||
|
for action in &actions {
|
||||||
|
eprintln!(
|
||||||
|
" {} {}",
|
||||||
|
Theme::dim().render("\u{2192}"),
|
||||||
|
Theme::bold().render(action)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
eprintln!();
|
||||||
|
std::process::exit(gi_error.exit_code());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if robot_mode {
|
||||||
|
let output = FallbackErrorOutput {
|
||||||
|
error: FallbackError {
|
||||||
|
code: "INTERNAL_ERROR".to_string(),
|
||||||
|
message: e.to_string(),
|
||||||
|
suggestion: None,
|
||||||
|
actions: Vec::new(),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
eprintln!(
|
||||||
|
"{}",
|
||||||
|
serde_json::to_string(&output).unwrap_or_else(|_| {
|
||||||
|
r#"{"error":{"code":"INTERNAL_ERROR","message":"Serialization failed"}}"#
|
||||||
|
.to_string()
|
||||||
|
})
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
eprintln!();
|
||||||
|
eprintln!(
|
||||||
|
" {} {}",
|
||||||
|
Theme::error().render(Icons::error()),
|
||||||
|
Theme::error().bold().render(&e.to_string())
|
||||||
|
);
|
||||||
|
eprintln!();
|
||||||
|
}
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Emit stderr warnings for any corrections applied during Phase 1.5.
|
||||||
|
fn emit_correction_warnings(result: &CorrectionResult, robot_mode: bool) {
|
||||||
|
if robot_mode {
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct CorrectionWarning<'a> {
|
||||||
|
warning: CorrectionWarningInner<'a>,
|
||||||
|
}
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct CorrectionWarningInner<'a> {
|
||||||
|
r#type: &'static str,
|
||||||
|
corrections: &'a [autocorrect::Correction],
|
||||||
|
teaching: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
let teaching: Vec<String> = result
|
||||||
|
.corrections
|
||||||
|
.iter()
|
||||||
|
.map(autocorrect::format_teaching_note)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let warning = CorrectionWarning {
|
||||||
|
warning: CorrectionWarningInner {
|
||||||
|
r#type: "ARG_CORRECTED",
|
||||||
|
corrections: &result.corrections,
|
||||||
|
teaching,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
if let Ok(json) = serde_json::to_string(&warning) {
|
||||||
|
eprintln!("{json}");
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for c in &result.corrections {
|
||||||
|
eprintln!(
|
||||||
|
"{} {}",
|
||||||
|
Theme::warning().render("Auto-corrected:"),
|
||||||
|
autocorrect::format_teaching_note(c)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Phase 1 & 4: Handle clap parsing errors with structured JSON output in robot mode.
|
||||||
|
/// Also includes fuzzy command matching and flag-level suggestions.
|
||||||
|
fn handle_clap_error(e: clap::Error, robot_mode: bool, corrections: &CorrectionResult) -> ! {
|
||||||
|
use clap::error::ErrorKind;
|
||||||
|
|
||||||
|
// Always let clap handle --help and --version normally (print and exit 0).
|
||||||
|
// These are intentional user actions, not errors, even when stdout is redirected.
|
||||||
|
if matches!(e.kind(), ErrorKind::DisplayHelp | ErrorKind::DisplayVersion) {
|
||||||
|
e.exit()
|
||||||
|
}
|
||||||
|
|
||||||
|
if robot_mode {
|
||||||
|
let error_code = map_clap_error_kind(e.kind());
|
||||||
|
let full_msg = e.to_string();
|
||||||
|
let message = full_msg
|
||||||
|
.lines()
|
||||||
|
.take(3)
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join("; ")
|
||||||
|
.trim()
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let (suggestion, correction, valid_values) = match e.kind() {
|
||||||
|
// Phase 4: Suggest similar command for unknown subcommands
|
||||||
|
ErrorKind::InvalidSubcommand => {
|
||||||
|
let suggestion = if let Some(invalid_cmd) = extract_invalid_subcommand(&e) {
|
||||||
|
suggest_similar_command(&invalid_cmd)
|
||||||
|
} else {
|
||||||
|
"Run 'lore robot-docs' for valid commands".to_string()
|
||||||
|
};
|
||||||
|
(suggestion, None, None)
|
||||||
|
}
|
||||||
|
// Flag-level fuzzy matching for unknown flags
|
||||||
|
ErrorKind::UnknownArgument => {
|
||||||
|
let invalid_flag = extract_invalid_flag(&e);
|
||||||
|
let similar = invalid_flag
|
||||||
|
.as_deref()
|
||||||
|
.and_then(|flag| autocorrect::suggest_similar_flag(flag, &corrections.args));
|
||||||
|
let suggestion = if let Some(ref s) = similar {
|
||||||
|
format!("Did you mean '{s}'? Run 'lore robot-docs' for all flags")
|
||||||
|
} else {
|
||||||
|
"Run 'lore robot-docs' for valid flags".to_string()
|
||||||
|
};
|
||||||
|
(suggestion, similar, None)
|
||||||
|
}
|
||||||
|
// Value-level suggestions for invalid enum values
|
||||||
|
ErrorKind::InvalidValue => {
|
||||||
|
let (flag, valid_vals) = extract_invalid_value_context(&e);
|
||||||
|
let suggestion = if let Some(vals) = &valid_vals {
|
||||||
|
format!(
|
||||||
|
"Valid values: {}. Run 'lore robot-docs' for details",
|
||||||
|
vals.join(", ")
|
||||||
|
)
|
||||||
|
} else if let Some(ref f) = flag {
|
||||||
|
if let Some(vals) = autocorrect::valid_values_for_flag(f) {
|
||||||
|
format!("Valid values for {f}: {}", vals.join(", "))
|
||||||
|
} else {
|
||||||
|
"Run 'lore robot-docs' for valid values".to_string()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
"Run 'lore robot-docs' for valid values".to_string()
|
||||||
|
};
|
||||||
|
let vals_vec = valid_vals.or_else(|| {
|
||||||
|
flag.as_deref()
|
||||||
|
.and_then(autocorrect::valid_values_for_flag)
|
||||||
|
.map(|v| v.iter().map(|s| (*s).to_string()).collect())
|
||||||
|
});
|
||||||
|
(suggestion, None, vals_vec)
|
||||||
|
}
|
||||||
|
ErrorKind::MissingRequiredArgument => {
|
||||||
|
let suggestion = format!(
|
||||||
|
"A required argument is missing. {}",
|
||||||
|
if let Some(subcmd) = extract_subcommand_from_context(&e) {
|
||||||
|
format!(
|
||||||
|
"Example: {}. Run 'lore {subcmd} --help' for required arguments",
|
||||||
|
command_example(&subcmd)
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
"Run 'lore robot-docs' for command reference".to_string()
|
||||||
|
}
|
||||||
|
);
|
||||||
|
(suggestion, None, None)
|
||||||
|
}
|
||||||
|
ErrorKind::MissingSubcommand => {
|
||||||
|
let suggestion =
|
||||||
|
"No command specified. Common commands: issues, mrs, search, sync, \
|
||||||
|
timeline, who, me. Run 'lore robot-docs' for the full list"
|
||||||
|
.to_string();
|
||||||
|
(suggestion, None, None)
|
||||||
|
}
|
||||||
|
ErrorKind::TooFewValues | ErrorKind::TooManyValues => {
|
||||||
|
let suggestion = if let Some(subcmd) = extract_subcommand_from_context(&e) {
|
||||||
|
format!(
|
||||||
|
"Example: {}. Run 'lore {subcmd} --help' for usage",
|
||||||
|
command_example(&subcmd)
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
"Run 'lore robot-docs' for command reference".to_string()
|
||||||
|
};
|
||||||
|
(suggestion, None, None)
|
||||||
|
}
|
||||||
|
_ => (
|
||||||
|
"Run 'lore robot-docs' for valid commands".to_string(),
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
),
|
||||||
|
};
|
||||||
|
|
||||||
|
let output = RobotErrorWithSuggestion {
|
||||||
|
error: RobotErrorSuggestionData {
|
||||||
|
code: error_code.to_string(),
|
||||||
|
message,
|
||||||
|
suggestion,
|
||||||
|
correction,
|
||||||
|
valid_values,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
eprintln!(
|
||||||
|
"{}",
|
||||||
|
serde_json::to_string(&output).unwrap_or_else(|_| {
|
||||||
|
r#"{"error":{"code":"PARSE_ERROR","message":"Parse error"}}"#.to_string()
|
||||||
|
})
|
||||||
|
);
|
||||||
|
std::process::exit(2);
|
||||||
|
} else {
|
||||||
|
e.exit()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Map clap ErrorKind to semantic error codes
|
||||||
|
fn map_clap_error_kind(kind: clap::error::ErrorKind) -> &'static str {
|
||||||
|
use clap::error::ErrorKind;
|
||||||
|
match kind {
|
||||||
|
ErrorKind::InvalidSubcommand => "UNKNOWN_COMMAND",
|
||||||
|
ErrorKind::UnknownArgument => "UNKNOWN_FLAG",
|
||||||
|
ErrorKind::MissingRequiredArgument => "MISSING_REQUIRED",
|
||||||
|
ErrorKind::InvalidValue => "INVALID_VALUE",
|
||||||
|
ErrorKind::ValueValidation => "INVALID_VALUE",
|
||||||
|
ErrorKind::TooManyValues => "TOO_MANY_VALUES",
|
||||||
|
ErrorKind::TooFewValues => "TOO_FEW_VALUES",
|
||||||
|
ErrorKind::ArgumentConflict => "ARGUMENT_CONFLICT",
|
||||||
|
ErrorKind::MissingSubcommand => "MISSING_COMMAND",
|
||||||
|
ErrorKind::DisplayHelp | ErrorKind::DisplayVersion => "HELP_REQUESTED",
|
||||||
|
_ => "PARSE_ERROR",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract the invalid subcommand from a clap error (Phase 4)
|
||||||
|
fn extract_invalid_subcommand(e: &clap::Error) -> Option<String> {
|
||||||
|
// Parse the error message to find the invalid subcommand
|
||||||
|
// Format is typically: "error: unrecognized subcommand 'foo'"
|
||||||
|
let msg = e.to_string();
|
||||||
|
if let Some(start) = msg.find('\'')
|
||||||
|
&& let Some(end) = msg[start + 1..].find('\'')
|
||||||
|
{
|
||||||
|
return Some(msg[start + 1..start + 1 + end].to_string());
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract the invalid flag from a clap UnknownArgument error.
|
||||||
|
/// Format is typically: "error: unexpected argument '--xyzzy' found"
|
||||||
|
fn extract_invalid_flag(e: &clap::Error) -> Option<String> {
|
||||||
|
let msg = e.to_string();
|
||||||
|
if let Some(start) = msg.find('\'')
|
||||||
|
&& let Some(end) = msg[start + 1..].find('\'')
|
||||||
|
{
|
||||||
|
let value = &msg[start + 1..start + 1 + end];
|
||||||
|
if value.starts_with('-') {
|
||||||
|
return Some(value.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract flag name and valid values from a clap InvalidValue error.
|
||||||
|
/// Returns (flag_name, valid_values_if_listed_in_error).
|
||||||
|
fn extract_invalid_value_context(e: &clap::Error) -> (Option<String>, Option<Vec<String>>) {
|
||||||
|
let msg = e.to_string();
|
||||||
|
|
||||||
|
// Try to find the flag name from "[possible values: ...]" pattern or from the arg info
|
||||||
|
// Clap format: "error: invalid value 'opend' for '--state <STATE>'"
|
||||||
|
let flag = if let Some(for_pos) = msg.find("for '") {
|
||||||
|
let after_for = &msg[for_pos + 5..];
|
||||||
|
if let Some(end) = after_for.find('\'') {
|
||||||
|
let raw = &after_for[..end];
|
||||||
|
// Strip angle-bracket value placeholder: "--state <STATE>" -> "--state"
|
||||||
|
Some(raw.split_whitespace().next().unwrap_or(raw).to_string())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
// Try to extract possible values from the error message
|
||||||
|
// Clap format: "[possible values: opened, closed, merged, locked, all]"
|
||||||
|
let valid_values = if let Some(pv_pos) = msg.find("[possible values: ") {
|
||||||
|
let after_pv = &msg[pv_pos + 18..];
|
||||||
|
after_pv.find(']').map(|end| {
|
||||||
|
after_pv[..end]
|
||||||
|
.split(", ")
|
||||||
|
.map(|s| s.trim().to_string())
|
||||||
|
.collect()
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
// Fall back to our static registry
|
||||||
|
flag.as_deref()
|
||||||
|
.and_then(autocorrect::valid_values_for_flag)
|
||||||
|
.map(|v| v.iter().map(|s| (*s).to_string()).collect())
|
||||||
|
};
|
||||||
|
|
||||||
|
(flag, valid_values)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract the subcommand context from a clap error for better suggestions.
|
||||||
|
/// Looks at the error message to find which command was being invoked.
|
||||||
|
fn extract_subcommand_from_context(e: &clap::Error) -> Option<String> {
|
||||||
|
let msg = e.to_string();
|
||||||
|
|
||||||
|
let known = [
|
||||||
|
"issues",
|
||||||
|
"mrs",
|
||||||
|
"notes",
|
||||||
|
"search",
|
||||||
|
"sync",
|
||||||
|
"ingest",
|
||||||
|
"count",
|
||||||
|
"status",
|
||||||
|
"auth",
|
||||||
|
"doctor",
|
||||||
|
"stats",
|
||||||
|
"timeline",
|
||||||
|
"who",
|
||||||
|
"me",
|
||||||
|
"drift",
|
||||||
|
"related",
|
||||||
|
"trace",
|
||||||
|
"file-history",
|
||||||
|
"generate-docs",
|
||||||
|
"embed",
|
||||||
|
"token",
|
||||||
|
"cron",
|
||||||
|
"init",
|
||||||
|
"migrate",
|
||||||
|
];
|
||||||
|
for cmd in known {
|
||||||
|
if msg.contains(&format!("lore {cmd}")) || msg.contains(&format!("'{cmd}'")) {
|
||||||
|
return Some(cmd.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Phase 4: Suggest similar command using fuzzy matching
|
||||||
|
fn suggest_similar_command(invalid: &str) -> String {
|
||||||
|
// Primary commands + common aliases for fuzzy matching
|
||||||
|
const VALID_COMMANDS: &[(&str, &str)] = &[
|
||||||
|
("issues", "issues"),
|
||||||
|
("issue", "issues"),
|
||||||
|
("mrs", "mrs"),
|
||||||
|
("mr", "mrs"),
|
||||||
|
("merge-requests", "mrs"),
|
||||||
|
("search", "search"),
|
||||||
|
("find", "search"),
|
||||||
|
("query", "search"),
|
||||||
|
("sync", "sync"),
|
||||||
|
("ingest", "ingest"),
|
||||||
|
("count", "count"),
|
||||||
|
("status", "status"),
|
||||||
|
("auth", "auth"),
|
||||||
|
("doctor", "doctor"),
|
||||||
|
("version", "version"),
|
||||||
|
("init", "init"),
|
||||||
|
("stats", "stats"),
|
||||||
|
("stat", "stats"),
|
||||||
|
("generate-docs", "generate-docs"),
|
||||||
|
("embed", "embed"),
|
||||||
|
("migrate", "migrate"),
|
||||||
|
("health", "health"),
|
||||||
|
("robot-docs", "robot-docs"),
|
||||||
|
("completions", "completions"),
|
||||||
|
("timeline", "timeline"),
|
||||||
|
("who", "who"),
|
||||||
|
("notes", "notes"),
|
||||||
|
("note", "notes"),
|
||||||
|
("drift", "drift"),
|
||||||
|
("file-history", "file-history"),
|
||||||
|
("trace", "trace"),
|
||||||
|
("related", "related"),
|
||||||
|
("me", "me"),
|
||||||
|
("token", "token"),
|
||||||
|
("cron", "cron"),
|
||||||
|
// Hidden but may be known to agents
|
||||||
|
("list", "list"),
|
||||||
|
("show", "show"),
|
||||||
|
("reset", "reset"),
|
||||||
|
("backup", "backup"),
|
||||||
|
];
|
||||||
|
|
||||||
|
let invalid_lower = invalid.to_lowercase();
|
||||||
|
|
||||||
|
// Find the best match using Jaro-Winkler similarity
|
||||||
|
let best_match = VALID_COMMANDS
|
||||||
|
.iter()
|
||||||
|
.map(|(alias, canonical)| (*canonical, jaro_winkler(&invalid_lower, alias)))
|
||||||
|
.max_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal));
|
||||||
|
|
||||||
|
if let Some((cmd, score)) = best_match
|
||||||
|
&& score > 0.7
|
||||||
|
{
|
||||||
|
let example = command_example(cmd);
|
||||||
|
return format!(
|
||||||
|
"Did you mean 'lore {cmd}'? Example: {example}. Run 'lore robot-docs' for all commands"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
"Run 'lore robot-docs' for valid commands. Common: issues, mrs, search, sync, timeline, who"
|
||||||
|
.to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return a contextual usage example for a command.
|
||||||
|
fn command_example(cmd: &str) -> &'static str {
|
||||||
|
match cmd {
|
||||||
|
"issues" => "lore --robot issues -n 10",
|
||||||
|
"mrs" => "lore --robot mrs -n 10",
|
||||||
|
"search" => "lore --robot search \"auth bug\"",
|
||||||
|
"sync" => "lore --robot sync",
|
||||||
|
"ingest" => "lore --robot ingest issues",
|
||||||
|
"notes" => "lore --robot notes --for-issue 123",
|
||||||
|
"count" => "lore --robot count issues",
|
||||||
|
"status" => "lore --robot status",
|
||||||
|
"stats" => "lore --robot stats",
|
||||||
|
"timeline" => "lore --robot timeline \"auth flow\"",
|
||||||
|
"who" => "lore --robot who --path src/",
|
||||||
|
"health" => "lore --robot health",
|
||||||
|
"generate-docs" => "lore --robot generate-docs",
|
||||||
|
"embed" => "lore --robot embed",
|
||||||
|
"robot-docs" => "lore robot-docs",
|
||||||
|
"trace" => "lore --robot trace src/main.rs",
|
||||||
|
"init" => "lore init",
|
||||||
|
"related" => "lore --robot related issues 42 -n 5",
|
||||||
|
"me" => "lore --robot me",
|
||||||
|
"drift" => "lore --robot drift issues 42",
|
||||||
|
"file-history" => "lore --robot file-history src/main.rs",
|
||||||
|
"token" => "lore --robot token show",
|
||||||
|
"cron" => "lore --robot cron status",
|
||||||
|
"auth" => "lore --robot auth",
|
||||||
|
"doctor" => "lore --robot doctor",
|
||||||
|
"migrate" => "lore --robot migrate",
|
||||||
|
"completions" => "lore completions bash",
|
||||||
|
_ => "lore --robot <command>",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
2013
src/app/handlers.rs
Normal file
2013
src/app/handlers.rs
Normal file
File diff suppressed because it is too large
Load Diff
795
src/app/robot_docs.rs
Normal file
795
src/app/robot_docs.rs
Normal file
@@ -0,0 +1,795 @@
|
|||||||
|
#[derive(Serialize)]
|
||||||
|
struct RobotDocsOutput {
|
||||||
|
ok: bool,
|
||||||
|
data: RobotDocsData,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct RobotDocsData {
|
||||||
|
name: String,
|
||||||
|
version: String,
|
||||||
|
description: String,
|
||||||
|
activation: RobotDocsActivation,
|
||||||
|
quick_start: serde_json::Value,
|
||||||
|
commands: serde_json::Value,
|
||||||
|
/// Deprecated command aliases (old -> new)
|
||||||
|
aliases: serde_json::Value,
|
||||||
|
/// Pre-clap error tolerance: what the CLI auto-corrects
|
||||||
|
error_tolerance: serde_json::Value,
|
||||||
|
exit_codes: serde_json::Value,
|
||||||
|
/// Error codes emitted by clap parse failures
|
||||||
|
clap_error_codes: serde_json::Value,
|
||||||
|
error_format: String,
|
||||||
|
workflows: serde_json::Value,
|
||||||
|
config_notes: serde_json::Value,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct RobotDocsActivation {
|
||||||
|
flags: Vec<String>,
|
||||||
|
env: String,
|
||||||
|
auto: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_robot_docs(robot_mode: bool, brief: bool) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
let version = env!("CARGO_PKG_VERSION").to_string();
|
||||||
|
|
||||||
|
let commands = serde_json::json!({
|
||||||
|
"init": {
|
||||||
|
"description": "Initialize configuration and database",
|
||||||
|
"flags": ["--force", "--non-interactive", "--gitlab-url <URL>", "--token-env-var <VAR>", "--projects <paths>", "--default-project <path>"],
|
||||||
|
"robot_flags": ["--gitlab-url", "--token-env-var", "--projects", "--default-project"],
|
||||||
|
"example": "lore --robot init --gitlab-url https://gitlab.com --token-env-var GITLAB_TOKEN --projects group/project,other/repo --default-project group/project",
|
||||||
|
"response_schema": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {"config_path": "string", "data_dir": "string", "user": {"username": "string", "name": "string"}, "projects": "[{path:string, name:string}]", "default_project": "string?"},
|
||||||
|
"meta": {"elapsed_ms": "int"}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"health": {
|
||||||
|
"description": "Quick pre-flight check: config, database, schema version",
|
||||||
|
"flags": [],
|
||||||
|
"example": "lore --robot health",
|
||||||
|
"response_schema": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {"healthy": "bool", "config_found": "bool", "db_found": "bool", "schema_current": "bool", "schema_version": "int"},
|
||||||
|
"meta": {"elapsed_ms": "int"}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"auth": {
|
||||||
|
"description": "Verify GitLab authentication",
|
||||||
|
"flags": [],
|
||||||
|
"example": "lore --robot auth",
|
||||||
|
"response_schema": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {"authenticated": "bool", "username": "string", "name": "string", "gitlab_url": "string"},
|
||||||
|
"meta": {"elapsed_ms": "int"}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"doctor": {
|
||||||
|
"description": "Full environment health check (config, auth, DB, Ollama)",
|
||||||
|
"flags": [],
|
||||||
|
"example": "lore --robot doctor",
|
||||||
|
"response_schema": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {"success": "bool", "checks": "{config:object, auth:object, database:object, ollama:object}"},
|
||||||
|
"meta": {"elapsed_ms": "int"}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"ingest": {
|
||||||
|
"description": "Sync data from GitLab",
|
||||||
|
"flags": ["--project <path>", "--force", "--no-force", "--full", "--no-full", "--dry-run", "--no-dry-run", "<entity: issues|mrs>"],
|
||||||
|
"example": "lore --robot ingest issues --project group/repo",
|
||||||
|
"response_schema": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {"resource_type": "string", "projects_synced": "int", "issues_fetched?": "int", "mrs_fetched?": "int", "upserted": "int", "labels_created": "int", "discussions_fetched": "int", "notes_upserted": "int"},
|
||||||
|
"meta": {"elapsed_ms": "int"}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"sync": {
|
||||||
|
"description": "Full sync pipeline: ingest -> generate-docs -> embed. Supports surgical per-IID mode.",
|
||||||
|
"flags": ["--full", "--no-full", "--force", "--no-force", "--no-embed", "--no-docs", "--no-events", "--no-file-changes", "--no-status", "--dry-run", "--no-dry-run", "-t/--timings", "--lock", "--issue <IID>", "--mr <IID>", "-p/--project <path>", "--preflight-only"],
|
||||||
|
"example": "lore --robot sync",
|
||||||
|
"surgical_mode": {
|
||||||
|
"description": "Sync specific issues or MRs by IID. Runs a scoped pipeline: preflight -> TOCTOU check -> ingest -> dependents -> docs -> embed.",
|
||||||
|
"flags": ["--issue <IID> (repeatable)", "--mr <IID> (repeatable)", "-p/--project <path> (required)", "--preflight-only"],
|
||||||
|
"examples": [
|
||||||
|
"lore --robot sync --issue 7 -p group/project",
|
||||||
|
"lore --robot sync --issue 7 --issue 42 --mr 10 -p group/project",
|
||||||
|
"lore --robot sync --issue 7 -p group/project --preflight-only"
|
||||||
|
],
|
||||||
|
"constraints": ["--issue/--mr requires -p/--project (or defaultProject in config)", "--full and --issue/--mr are incompatible", "--preflight-only requires --issue or --mr", "Max 100 total targets"],
|
||||||
|
"entity_result_outcomes": ["synced", "skipped_stale", "not_found", "preflight_failed", "error"]
|
||||||
|
},
|
||||||
|
"response_schema": {
|
||||||
|
"normal": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {"issues_updated": "int", "mrs_updated": "int", "documents_regenerated": "int", "documents_embedded": "int", "resource_events_synced": "int", "resource_events_failed": "int"},
|
||||||
|
"meta": {"elapsed_ms": "int", "stages?": "[{name:string, elapsed_ms:int, items_processed:int}]"}
|
||||||
|
},
|
||||||
|
"surgical": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {"surgical_mode": "true", "surgical_iids": "{issues:[int], merge_requests:[int]}", "entity_results": "[{entity_type:string, iid:int, outcome:string, error?:string, toctou_reason?:string}]", "preflight_only?": "bool", "issues_updated": "int", "mrs_updated": "int", "documents_regenerated": "int", "documents_embedded": "int", "discussions_fetched": "int"},
|
||||||
|
"meta": {"elapsed_ms": "int"}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"issues": {
|
||||||
|
"description": "List issues, or view detail with <IID>",
|
||||||
|
"flags": ["<IID>", "-n/--limit", "--fields <list>", "-s/--state", "--status <name>", "-p/--project", "-a/--author", "-A/--assignee", "-l/--label", "-m/--milestone", "--since", "--due-before", "--has-due", "--no-has-due", "--sort", "--asc", "--no-asc", "-o/--open", "--no-open"],
|
||||||
|
"example": "lore --robot issues --state opened --limit 10",
|
||||||
|
"notes": {
|
||||||
|
"status_filter": "--status filters by work item status NAME (case-insensitive). Valid values are in meta.available_statuses of any issues list response.",
|
||||||
|
"status_name": "status_name is the board column label (e.g. 'In review', 'Blocked'). This is the canonical status identifier for filtering."
|
||||||
|
},
|
||||||
|
"response_schema": {
|
||||||
|
"list": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {"issues": "[{iid:int, title:string, state:string, author_username:string, labels:[string], assignees:[string], discussion_count:int, unresolved_count:int, created_at_iso:string, updated_at_iso:string, web_url:string?, project_path:string, status_name:string?}]", "total_count": "int", "showing": "int"},
|
||||||
|
"meta": {"elapsed_ms": "int", "available_statuses": "[string] — all distinct status names in the database, for use with --status filter"}
|
||||||
|
},
|
||||||
|
"detail": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": "IssueDetail (full entity with description, discussions, notes, events)",
|
||||||
|
"meta": {"elapsed_ms": "int"}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"example_output": {"list": {"ok":true,"data":{"issues":[{"iid":3864,"title":"Switch Health Card","state":"opened","status_name":"In progress","labels":["customer:BNSF"],"assignees":["teernisse"],"discussion_count":12,"updated_at_iso":"2026-02-12T..."}],"total_count":1,"showing":1},"meta":{"elapsed_ms":42}}},
|
||||||
|
"fields_presets": {"minimal": ["iid", "title", "state", "updated_at_iso"]}
|
||||||
|
},
|
||||||
|
"mrs": {
|
||||||
|
"description": "List merge requests, or view detail with <IID>",
|
||||||
|
"flags": ["<IID>", "-n/--limit", "--fields <list>", "-s/--state", "-p/--project", "-a/--author", "-A/--assignee", "-r/--reviewer", "-l/--label", "--since", "-d/--draft", "-D/--no-draft", "--target", "--source", "--sort", "--asc", "--no-asc", "-o/--open", "--no-open"],
|
||||||
|
"example": "lore --robot mrs --state opened",
|
||||||
|
"response_schema": {
|
||||||
|
"list": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {"mrs": "[{iid:int, title:string, state:string, author_username:string, labels:[string], draft:bool, target_branch:string, source_branch:string, discussion_count:int, unresolved_count:int, created_at_iso:string, updated_at_iso:string, web_url:string?, project_path:string, reviewers:[string]}]", "total_count": "int", "showing": "int"},
|
||||||
|
"meta": {"elapsed_ms": "int"}
|
||||||
|
},
|
||||||
|
"detail": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": "MrDetail (full entity with description, discussions, notes, events)",
|
||||||
|
"meta": {"elapsed_ms": "int"}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"example_output": {"list": {"ok":true,"data":{"mrs":[{"iid":200,"title":"Add throw time chart","state":"opened","draft":false,"author_username":"teernisse","target_branch":"main","source_branch":"feat/throw-time","reviewers":["cseiber"],"discussion_count":5,"updated_at_iso":"2026-02-11T..."}],"total_count":1,"showing":1},"meta":{"elapsed_ms":38}}},
|
||||||
|
"fields_presets": {"minimal": ["iid", "title", "state", "updated_at_iso"]}
|
||||||
|
},
|
||||||
|
"search": {
|
||||||
|
"description": "Search indexed documents (lexical, hybrid, semantic)",
|
||||||
|
"flags": ["<QUERY>", "--mode", "--type", "--author", "-p/--project", "--label", "--path", "--since", "--updated-since", "-n/--limit", "--fields <list>", "--explain", "--no-explain", "--fts-mode"],
|
||||||
|
"example": "lore --robot search 'authentication bug' --mode hybrid --limit 10",
|
||||||
|
"response_schema": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {"results": "[{document_id:int, source_type:string, title:string, snippet:string, score:float, url:string?, author:string?, created_at:string?, updated_at:string?, project_path:string, labels:[string], paths:[string]}]", "total_results": "int", "query": "string", "mode": "string", "warnings": "[string]"},
|
||||||
|
"meta": {"elapsed_ms": "int"}
|
||||||
|
},
|
||||||
|
"example_output": {"ok":true,"data":{"query":"throw time","mode":"hybrid","total_results":3,"results":[{"document_id":42,"source_type":"issue","title":"Switch Health Card","score":0.92,"snippet":"...throw time data from BNSF...","project_path":"vs/typescript-code"}],"warnings":[]},"meta":{"elapsed_ms":85}},
|
||||||
|
"fields_presets": {"minimal": ["document_id", "title", "source_type", "score"]}
|
||||||
|
},
|
||||||
|
"count": {
|
||||||
|
"description": "Count entities in local database",
|
||||||
|
"flags": ["<entity: issues|mrs|discussions|notes|events>", "-f/--for <issue|mr>"],
|
||||||
|
"example": "lore --robot count issues",
|
||||||
|
"response_schema": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {"entity": "string", "count": "int", "system_excluded?": "int", "breakdown?": {"opened": "int", "closed": "int", "merged?": "int", "locked?": "int"}},
|
||||||
|
"meta": {"elapsed_ms": "int"}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"stats": {
|
||||||
|
"description": "Show document and index statistics",
|
||||||
|
"flags": ["--check", "--no-check", "--repair", "--dry-run", "--no-dry-run"],
|
||||||
|
"example": "lore --robot stats",
|
||||||
|
"response_schema": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {"total_documents": "int", "indexed_documents": "int", "embedded_documents": "int", "stale_documents": "int", "integrity?": "object"},
|
||||||
|
"meta": {"elapsed_ms": "int"}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"status": {
|
||||||
|
"description": "Show sync state (cursors, last sync times)",
|
||||||
|
"flags": [],
|
||||||
|
"example": "lore --robot status",
|
||||||
|
"response_schema": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {"projects": "[{path:string, issues_cursor:string?, mrs_cursor:string?, last_sync:string?}]"},
|
||||||
|
"meta": {"elapsed_ms": "int"}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"generate-docs": {
|
||||||
|
"description": "Generate searchable documents from ingested data",
|
||||||
|
"flags": ["--full", "-p/--project <path>"],
|
||||||
|
"example": "lore --robot generate-docs --full",
|
||||||
|
"response_schema": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {"generated": "int", "updated": "int", "unchanged": "int", "deleted": "int"},
|
||||||
|
"meta": {"elapsed_ms": "int"}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"embed": {
|
||||||
|
"description": "Generate vector embeddings for documents via Ollama",
|
||||||
|
"flags": ["--full", "--no-full", "--retry-failed", "--no-retry-failed"],
|
||||||
|
"example": "lore --robot embed",
|
||||||
|
"response_schema": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {"embedded": "int", "skipped": "int", "failed": "int", "total_chunks": "int"},
|
||||||
|
"meta": {"elapsed_ms": "int"}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"migrate": {
|
||||||
|
"description": "Run pending database migrations",
|
||||||
|
"flags": [],
|
||||||
|
"example": "lore --robot migrate",
|
||||||
|
"response_schema": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {"before_version": "int", "after_version": "int", "migrated": "bool"},
|
||||||
|
"meta": {"elapsed_ms": "int"}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"version": {
|
||||||
|
"description": "Show version information",
|
||||||
|
"flags": [],
|
||||||
|
"example": "lore --robot version",
|
||||||
|
"response_schema": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {"version": "string", "git_hash?": "string"},
|
||||||
|
"meta": {"elapsed_ms": "int"}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"completions": {
|
||||||
|
"description": "Generate shell completions",
|
||||||
|
"flags": ["<shell: bash|zsh|fish|powershell>"],
|
||||||
|
"example": "lore completions bash > ~/.local/share/bash-completion/completions/lore"
|
||||||
|
},
|
||||||
|
"timeline": {
|
||||||
|
"description": "Chronological timeline of events matching a keyword query or entity reference",
|
||||||
|
"flags": ["<QUERY>", "-p/--project", "--since <duration>", "--depth <n>", "--no-mentions", "-n/--limit", "--fields <list>", "--max-seeds", "--max-entities", "--max-evidence"],
|
||||||
|
"query_syntax": {
|
||||||
|
"search": "Any text -> hybrid search seeding (FTS5 + vector)",
|
||||||
|
"entity_direct": "issue:N, i:N, mr:N, m:N -> direct entity seeding (no search, no Ollama)"
|
||||||
|
},
|
||||||
|
"example": "lore --robot timeline issue:42",
|
||||||
|
"response_schema": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {"entities": "[{type:string, iid:int, title:string, project_path:string}]", "events": "[{timestamp:string, type:string, entity_type:string, entity_iid:int, detail:string}]", "total_events": "int"},
|
||||||
|
"meta": {"elapsed_ms": "int", "search_mode": "string (hybrid|lexical|direct)"}
|
||||||
|
},
|
||||||
|
"fields_presets": {"minimal": ["timestamp", "type", "entity_iid", "detail"]}
|
||||||
|
},
|
||||||
|
"who": {
|
||||||
|
"description": "People intelligence: experts, workload, active discussions, overlap, review patterns",
|
||||||
|
"flags": ["<target>", "--path <path>", "--active", "--overlap <path>", "--reviews", "--since <duration>", "-p/--project", "-n/--limit", "--fields <list>", "--detail", "--no-detail", "--as-of <date>", "--explain-score", "--include-bots", "--include-closed", "--all-history"],
|
||||||
|
"modes": {
|
||||||
|
"expert": "lore who <file-path> -- Who knows about this area? (also: --path for root files)",
|
||||||
|
"workload": "lore who <username> -- What is someone working on?",
|
||||||
|
"reviews": "lore who <username> --reviews -- Review pattern analysis",
|
||||||
|
"active": "lore who --active -- Active unresolved discussions",
|
||||||
|
"overlap": "lore who --overlap <path> -- Who else is touching these files?"
|
||||||
|
},
|
||||||
|
"example": "lore --robot who src/features/auth/",
|
||||||
|
"response_schema": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {
|
||||||
|
"mode": "string",
|
||||||
|
"input": {"target": "string|null", "path": "string|null", "project": "string|null", "since": "string|null", "limit": "int"},
|
||||||
|
"resolved_input": {"mode": "string", "project_id": "int|null", "project_path": "string|null", "since_ms": "int", "since_iso": "string", "since_mode": "string (default|explicit|none)", "limit": "int"},
|
||||||
|
"...": "mode-specific fields"
|
||||||
|
},
|
||||||
|
"meta": {"elapsed_ms": "int"}
|
||||||
|
},
|
||||||
|
"example_output": {"expert": {"ok":true,"data":{"mode":"expert","result":{"experts":[{"username":"teernisse","score":42,"note_count":15,"diff_note_count":8}]}},"meta":{"elapsed_ms":65}}},
|
||||||
|
"fields_presets": {
|
||||||
|
"expert_minimal": ["username", "score"],
|
||||||
|
"workload_minimal": ["entity_type", "iid", "title", "state"],
|
||||||
|
"active_minimal": ["entity_type", "iid", "title", "participants"]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"trace": {
|
||||||
|
"description": "Trace why code was introduced: file -> MR -> issue -> discussion. Follows rename chains by default.",
|
||||||
|
"flags": ["<path>", "-p/--project <path>", "--discussions", "--no-follow-renames", "-n/--limit <N>"],
|
||||||
|
"example": "lore --robot trace src/main.rs -p group/repo",
|
||||||
|
"response_schema": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {"path": "string", "resolved_paths": "[string]", "trace_chains": "[{mr_iid:int, mr_title:string, mr_state:string, mr_author:string, change_type:string, merged_at_iso:string?, updated_at_iso:string, web_url:string?, issues:[{iid:int, title:string, state:string, reference_type:string, web_url:string?}], discussions:[{discussion_id:string, mr_iid:int, author_username:string, body_snippet:string, path:string, created_at_iso:string}]}]"},
|
||||||
|
"meta": {"tier": "string (api_only)", "line_requested": "int?", "elapsed_ms": "int", "total_chains": "int", "renames_followed": "bool"}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"file-history": {
|
||||||
|
"description": "Show MRs that touched a file, with rename chain resolution and optional DiffNote discussions",
|
||||||
|
"flags": ["<path>", "-p/--project <path>", "--discussions", "--no-follow-renames", "--merged", "-n/--limit <N>"],
|
||||||
|
"example": "lore --robot file-history src/main.rs -p group/repo",
|
||||||
|
"response_schema": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {"path": "string", "rename_chain": "[string]?", "merge_requests": "[{iid:int, title:string, state:string, author_username:string, change_type:string, merged_at_iso:string?, updated_at_iso:string, merge_commit_sha:string?, web_url:string?}]", "discussions": "[{discussion_id:string, author_username:string, body_snippet:string, path:string, created_at_iso:string}]?"},
|
||||||
|
"meta": {"elapsed_ms": "int", "total_mrs": "int", "renames_followed": "bool", "paths_searched": "int"}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"drift": {
|
||||||
|
"description": "Detect discussion divergence from original issue intent",
|
||||||
|
"flags": ["<entity_type: issues>", "<IID>", "--threshold <0.0-1.0>", "-p/--project <path>"],
|
||||||
|
"example": "lore --robot drift issues 42 --threshold 0.4",
|
||||||
|
"response_schema": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {"entity_type": "string", "iid": "int", "title": "string", "threshold": "float", "divergent_discussions": "[{discussion_id:string, similarity:float, snippet:string}]"},
|
||||||
|
"meta": {"elapsed_ms": "int"}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"explain": {
|
||||||
|
"description": "Auto-generate a structured narrative of an issue or MR",
|
||||||
|
"flags": ["<entity_type: issues|mrs>", "<IID>", "-p/--project <path>", "--sections <comma-list>", "--no-timeline", "--max-decisions <N>", "--since <period>"],
|
||||||
|
"valid_sections": ["entity", "description", "key_decisions", "activity", "open_threads", "related", "timeline"],
|
||||||
|
"example": "lore --robot explain issues 42 --sections key_decisions,activity --since 30d",
|
||||||
|
"response_schema": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {"entity": "{type:string, iid:int, title:string, state:string, author:string, assignees:[string], labels:[string], created_at:string, updated_at:string, url:string?, status_name:string?}", "description_excerpt": "string?", "key_decisions": "[{timestamp:string, actor:string, action:string, context_note:string}]?", "activity": "{state_changes:int, label_changes:int, notes:int, first_event:string?, last_event:string?}?", "open_threads": "[{discussion_id:string, started_by:string, started_at:string, note_count:int, last_note_at:string}]?", "related": "{closing_mrs:[{iid:int, title:string, state:string, web_url:string?}], related_issues:[{entity_type:string, iid:int, title:string?, reference_type:string}]}?", "timeline_excerpt": "[{timestamp:string, event_type:string, actor:string?, summary:string}]?"},
|
||||||
|
"meta": {"elapsed_ms": "int"}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"notes": {
|
||||||
|
"description": "List notes from discussions with rich filtering",
|
||||||
|
"flags": ["--limit/-n <N>", "--author/-a <username>", "--note-type <type>", "--contains <text>", "--for-issue <iid>", "--for-mr <iid>", "-p/--project <path>", "--since <period>", "--until <period>", "--path <filepath>", "--resolution <any|unresolved|resolved>", "--sort <created|updated>", "--asc", "--include-system", "--note-id <id>", "--gitlab-note-id <id>", "--discussion-id <id>", "--fields <list|minimal>", "--open"],
|
||||||
|
"robot_flags": ["--format json", "--fields minimal"],
|
||||||
|
"example": "lore --robot notes --author jdefting --since 1y --format json --fields minimal",
|
||||||
|
"response_schema": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {"notes": "[NoteListRowJson]", "total_count": "int", "showing": "int"},
|
||||||
|
"meta": {"elapsed_ms": "int"}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"cron": {
|
||||||
|
"description": "Manage cron-based automatic syncing (Unix only)",
|
||||||
|
"subcommands": {
|
||||||
|
"install": {"flags": ["--interval <minutes>"], "default_interval": 8},
|
||||||
|
"uninstall": {"flags": []},
|
||||||
|
"status": {"flags": []}
|
||||||
|
},
|
||||||
|
"example": "lore --robot cron status",
|
||||||
|
"response_schema": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {"action": "string (install|uninstall|status)", "installed?": "bool", "interval_minutes?": "int", "entry?": "string", "log_path?": "string", "replaced?": "bool", "was_installed?": "bool", "last_run_iso?": "string"},
|
||||||
|
"meta": {"elapsed_ms": "int"}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"token": {
|
||||||
|
"description": "Manage stored GitLab token",
|
||||||
|
"subcommands": {
|
||||||
|
"set": {"flags": ["--token <value>"], "note": "Reads from stdin if --token omitted in non-interactive mode"},
|
||||||
|
"show": {"flags": ["--unmask"]}
|
||||||
|
},
|
||||||
|
"example": "lore --robot token show",
|
||||||
|
"response_schema": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {"action": "string (set|show)", "token_masked?": "string", "token?": "string", "valid?": "bool", "username?": "string"},
|
||||||
|
"meta": {"elapsed_ms": "int"}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"me": {
|
||||||
|
"description": "Personal work dashboard: open issues, authored/reviewing MRs, @mentioned-in items, activity feed, and cursor-based since-last-check inbox with computed attention states",
|
||||||
|
"flags": ["--issues", "--mrs", "--mentions", "--activity", "--since <period>", "-p/--project <path>", "--all", "--user <username>", "--fields <list|minimal>", "--reset-cursor"],
|
||||||
|
"example": "lore --robot me",
|
||||||
|
"response_schema": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {
|
||||||
|
"username": "string",
|
||||||
|
"since_iso": "string?",
|
||||||
|
"summary": {"project_count": "int", "open_issue_count": "int", "authored_mr_count": "int", "reviewing_mr_count": "int", "mentioned_in_count": "int", "needs_attention_count": "int"},
|
||||||
|
"since_last_check": "{cursor_iso:string, total_event_count:int, groups:[{entity_type:string, entity_iid:int, entity_title:string, project:string, events:[{timestamp_iso:string, event_type:string, actor:string?, summary:string, body_preview:string?}]}]}?",
|
||||||
|
"open_issues": "[{project:string, iid:int, title:string, state:string, attention_state:string, attention_reason:string, status_name:string?, labels:[string], updated_at_iso:string, web_url:string?}]",
|
||||||
|
"open_mrs_authored": "[{project:string, iid:int, title:string, state:string, attention_state:string, attention_reason:string, draft:bool, detailed_merge_status:string?, author_username:string?, labels:[string], updated_at_iso:string, web_url:string?}]",
|
||||||
|
"reviewing_mrs": "[same as open_mrs_authored]",
|
||||||
|
"mentioned_in": "[{entity_type:string, project:string, iid:int, title:string, state:string, attention_state:string, attention_reason:string, updated_at_iso:string, web_url:string?}]",
|
||||||
|
"activity": "[{timestamp_iso:string, event_type:string, entity_type:string, entity_iid:int, project:string, actor:string?, is_own:bool, summary:string, body_preview:string?}]"
|
||||||
|
},
|
||||||
|
"meta": {"elapsed_ms": "int", "gitlab_base_url": "string (GitLab instance URL for constructing entity links: {base_url}/{project}/-/issues/{iid})"}
|
||||||
|
},
|
||||||
|
"fields_presets": {
|
||||||
|
"me_items_minimal": ["iid", "title", "attention_state", "attention_reason", "updated_at_iso"],
|
||||||
|
"me_mentions_minimal": ["entity_type", "iid", "title", "state", "attention_state", "attention_reason", "updated_at_iso"],
|
||||||
|
"me_activity_minimal": ["timestamp_iso", "event_type", "entity_iid", "actor"]
|
||||||
|
},
|
||||||
|
"notes": {
|
||||||
|
"attention_states": "needs_attention | not_started | awaiting_response | stale | not_ready",
|
||||||
|
"event_types": "note | status_change | label_change | assign | unassign | review_request | milestone_change",
|
||||||
|
"section_flags": "If none of --issues/--mrs/--mentions/--activity specified, all sections returned",
|
||||||
|
"since_default": "1d for activity feed",
|
||||||
|
"issue_filter": "Only In Progress / In Review status issues shown",
|
||||||
|
"since_last_check": "Cursor-based inbox showing events since last run. Null on first run (no cursor yet). Groups events by entity (issue/MR). Sources: others' comments on your items, @mentions, assignment/review-request notes. Cursor auto-advances after each run. Use --reset-cursor to clear.",
|
||||||
|
"cursor_persistence": "Stored per user in ~/.local/share/lore/me_cursor_<username>.json. --project filters display only for since-last-check; cursor still advances for all projects for that user.",
|
||||||
|
"url_construction": "Use meta.gitlab_base_url + project + entity_type + iid to build links: {gitlab_base_url}/{project}/-/{issues|merge_requests}/{iid}"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"robot-docs": {
|
||||||
|
"description": "This command (agent self-discovery manifest)",
|
||||||
|
"flags": ["--brief"],
|
||||||
|
"example": "lore robot-docs --brief"
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
let quick_start = serde_json::json!({
|
||||||
|
"glab_equivalents": [
|
||||||
|
{ "glab": "glab issue list", "lore": "lore -J issues -n 50", "note": "Richer: includes labels, status, closing MRs, discussion counts" },
|
||||||
|
{ "glab": "glab issue view 123", "lore": "lore -J issues 123", "note": "Includes full discussions, work-item status, cross-references" },
|
||||||
|
{ "glab": "glab issue list -l bug", "lore": "lore -J issues --label bug", "note": "AND logic for multiple --label flags" },
|
||||||
|
{ "glab": "glab mr list", "lore": "lore -J mrs", "note": "Includes draft status, reviewers, discussion counts" },
|
||||||
|
{ "glab": "glab mr view 456", "lore": "lore -J mrs 456", "note": "Includes discussions, review threads, source/target branches" },
|
||||||
|
{ "glab": "glab mr list -s opened", "lore": "lore -J mrs -s opened", "note": "States: opened, merged, closed, locked, all" },
|
||||||
|
{ "glab": "glab api '/projects/:id/issues'", "lore": "lore -J issues -p project", "note": "Fuzzy project matching (suffix or substring)" }
|
||||||
|
],
|
||||||
|
"lore_exclusive": [
|
||||||
|
"search: FTS5 + vector hybrid search across all entities",
|
||||||
|
"who: Expert/workload/reviews analysis per file path or person",
|
||||||
|
"timeline: Chronological event reconstruction across entities",
|
||||||
|
"trace: Code provenance chains (file -> MR -> issue -> discussion)",
|
||||||
|
"file-history: MR history per file with rename resolution",
|
||||||
|
"notes: Rich note listing with author, type, resolution, path, and discussion filters",
|
||||||
|
"stats: Database statistics with document/note/discussion counts",
|
||||||
|
"count: Entity counts with state breakdowns",
|
||||||
|
"embed: Generate vector embeddings for semantic search via Ollama",
|
||||||
|
"cron: Automated sync scheduling (Unix)",
|
||||||
|
"token: Secure token management with masked display",
|
||||||
|
"me: Personal work dashboard with attention states, activity feed, cursor-based since-last-check inbox, and needs-attention triage"
|
||||||
|
],
|
||||||
|
"read_write_split": "lore = ALL reads (issues, MRs, search, who, timeline, intelligence). glab = ALL writes (create, update, approve, merge, CI/CD)."
|
||||||
|
});
|
||||||
|
|
||||||
|
// --brief: strip response_schema and example_output from every command (~60% smaller)
|
||||||
|
let mut commands = commands;
|
||||||
|
if brief {
|
||||||
|
strip_schemas(&mut commands);
|
||||||
|
}
|
||||||
|
|
||||||
|
let exit_codes = serde_json::json!({
|
||||||
|
"0": "Success",
|
||||||
|
"1": "Internal error",
|
||||||
|
"2": "Usage error (invalid flags or arguments)",
|
||||||
|
"3": "Config invalid",
|
||||||
|
"4": "Token not set",
|
||||||
|
"5": "GitLab auth failed",
|
||||||
|
"6": "Resource not found",
|
||||||
|
"7": "Rate limited",
|
||||||
|
"8": "Network error",
|
||||||
|
"9": "Database locked",
|
||||||
|
"10": "Database error",
|
||||||
|
"11": "Migration failed",
|
||||||
|
"12": "I/O error",
|
||||||
|
"13": "Transform error",
|
||||||
|
"14": "Ollama unavailable",
|
||||||
|
"15": "Ollama model not found",
|
||||||
|
"16": "Embedding failed",
|
||||||
|
"17": "Not found",
|
||||||
|
"18": "Ambiguous match",
|
||||||
|
"19": "Health check failed",
|
||||||
|
"20": "Config not found",
|
||||||
|
"21": "Embeddings not built"
|
||||||
|
});
|
||||||
|
|
||||||
|
let workflows = serde_json::json!({
|
||||||
|
"first_setup": [
|
||||||
|
"lore --robot init --gitlab-url https://gitlab.com --token-env-var GITLAB_TOKEN --projects group/project",
|
||||||
|
"lore --robot doctor",
|
||||||
|
"lore --robot sync"
|
||||||
|
],
|
||||||
|
"daily_sync": [
|
||||||
|
"lore --robot sync"
|
||||||
|
],
|
||||||
|
"search": [
|
||||||
|
"lore --robot search 'query' --mode hybrid"
|
||||||
|
],
|
||||||
|
"pre_flight": [
|
||||||
|
"lore --robot health"
|
||||||
|
],
|
||||||
|
"temporal_intelligence": [
|
||||||
|
"lore --robot sync",
|
||||||
|
"lore --robot timeline '<keyword>' --since 30d",
|
||||||
|
"lore --robot timeline '<keyword>' --depth 2"
|
||||||
|
],
|
||||||
|
"people_intelligence": [
|
||||||
|
"lore --robot who src/path/to/feature/",
|
||||||
|
"lore --robot who @username",
|
||||||
|
"lore --robot who @username --reviews",
|
||||||
|
"lore --robot who --active --since 7d",
|
||||||
|
"lore --robot who --overlap src/path/",
|
||||||
|
"lore --robot who --path README.md"
|
||||||
|
],
|
||||||
|
"surgical_sync": [
|
||||||
|
"lore --robot sync --issue 7 -p group/project",
|
||||||
|
"lore --robot sync --issue 7 --mr 10 -p group/project",
|
||||||
|
"lore --robot sync --issue 7 -p group/project --preflight-only"
|
||||||
|
],
|
||||||
|
"personal_dashboard": [
|
||||||
|
"lore --robot me",
|
||||||
|
"lore --robot me --issues",
|
||||||
|
"lore --robot me --activity --since 7d",
|
||||||
|
"lore --robot me --project group/repo",
|
||||||
|
"lore --robot me --fields minimal",
|
||||||
|
"lore --robot me --reset-cursor"
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
// Phase 3: Deprecated command aliases
|
||||||
|
let aliases = serde_json::json!({
|
||||||
|
"deprecated_commands": {
|
||||||
|
"list issues": "issues",
|
||||||
|
"list mrs": "mrs",
|
||||||
|
"show issue <IID>": "issues <IID>",
|
||||||
|
"show mr <IID>": "mrs <IID>",
|
||||||
|
"auth-test": "auth",
|
||||||
|
"sync-status": "status"
|
||||||
|
},
|
||||||
|
"command_aliases": {
|
||||||
|
"issue": "issues",
|
||||||
|
"mr": "mrs",
|
||||||
|
"merge-requests": "mrs",
|
||||||
|
"merge-request": "mrs",
|
||||||
|
"mergerequests": "mrs",
|
||||||
|
"mergerequest": "mrs",
|
||||||
|
"generate-docs": "generate-docs",
|
||||||
|
"generatedocs": "generate-docs",
|
||||||
|
"gendocs": "generate-docs",
|
||||||
|
"gen-docs": "generate-docs",
|
||||||
|
"robot-docs": "robot-docs",
|
||||||
|
"robotdocs": "robot-docs"
|
||||||
|
},
|
||||||
|
"pre_clap_aliases": {
|
||||||
|
"note": "Underscore/no-separator forms auto-corrected before parsing",
|
||||||
|
"merge_requests": "mrs",
|
||||||
|
"merge_request": "mrs",
|
||||||
|
"mergerequests": "mrs",
|
||||||
|
"mergerequest": "mrs",
|
||||||
|
"generate_docs": "generate-docs",
|
||||||
|
"generatedocs": "generate-docs",
|
||||||
|
"gendocs": "generate-docs",
|
||||||
|
"gen-docs": "generate-docs",
|
||||||
|
"robot-docs": "robot-docs",
|
||||||
|
"robotdocs": "robot-docs"
|
||||||
|
},
|
||||||
|
"prefix_matching": "Enabled via infer_subcommands. Unambiguous prefixes work: 'iss' -> issues, 'time' -> timeline, 'sea' -> search"
|
||||||
|
});
|
||||||
|
|
||||||
|
let error_tolerance = serde_json::json!({
|
||||||
|
"note": "The CLI auto-corrects common mistakes before parsing. Corrections are applied silently with a teaching note on stderr.",
|
||||||
|
"auto_corrections": [
|
||||||
|
{"type": "single_dash_long_flag", "example": "-robot -> --robot", "mode": "all"},
|
||||||
|
{"type": "case_normalization", "example": "--Robot -> --robot, --State -> --state", "mode": "all"},
|
||||||
|
{"type": "flag_prefix", "example": "--proj -> --project (when unambiguous)", "mode": "all"},
|
||||||
|
{"type": "fuzzy_flag", "example": "--projct -> --project", "mode": "all (threshold 0.9 in robot, 0.8 in human)"},
|
||||||
|
{"type": "subcommand_alias", "example": "merge_requests -> mrs, robotdocs -> robot-docs", "mode": "all"},
|
||||||
|
{"type": "subcommand_fuzzy", "example": "issuess -> issues, timline -> timeline, serach -> search", "mode": "all (threshold 0.85)"},
|
||||||
|
{"type": "flag_as_subcommand", "example": "--robot-docs -> robot-docs, --generate-docs -> generate-docs", "mode": "all"},
|
||||||
|
{"type": "value_normalization", "example": "--state Opened -> --state opened", "mode": "all"},
|
||||||
|
{"type": "value_fuzzy", "example": "--state opend -> --state opened", "mode": "all"},
|
||||||
|
{"type": "prefix_matching", "example": "lore iss -> lore issues, lore time -> lore timeline", "mode": "all (via clap infer_subcommands)"}
|
||||||
|
],
|
||||||
|
"teaching_notes": "Auto-corrections emit a JSON warning on stderr: {\"warning\":{\"type\":\"ARG_CORRECTED\",\"corrections\":[...],\"teaching\":[...]}}"
|
||||||
|
});
|
||||||
|
|
||||||
|
// Phase 3: Clap error codes (emitted by handle_clap_error)
|
||||||
|
let clap_error_codes = serde_json::json!({
|
||||||
|
"UNKNOWN_COMMAND": "Unrecognized subcommand (includes fuzzy suggestion)",
|
||||||
|
"UNKNOWN_FLAG": "Unrecognized command-line flag",
|
||||||
|
"MISSING_REQUIRED": "Required argument not provided",
|
||||||
|
"INVALID_VALUE": "Invalid value for argument",
|
||||||
|
"TOO_MANY_VALUES": "Too many values provided",
|
||||||
|
"TOO_FEW_VALUES": "Too few values provided",
|
||||||
|
"ARGUMENT_CONFLICT": "Conflicting arguments",
|
||||||
|
"MISSING_COMMAND": "No subcommand provided (in non-robot mode, shows help)",
|
||||||
|
"HELP_REQUESTED": "Help or version flag used",
|
||||||
|
"PARSE_ERROR": "General parse error"
|
||||||
|
});
|
||||||
|
|
||||||
|
let config_notes = serde_json::json!({
|
||||||
|
"defaultProject": {
|
||||||
|
"type": "string?",
|
||||||
|
"description": "Fallback project path used when -p/--project is omitted. Must match a configured project path (exact or suffix). CLI -p always overrides.",
|
||||||
|
"example": "group/project"
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
let output = RobotDocsOutput {
|
||||||
|
ok: true,
|
||||||
|
data: RobotDocsData {
|
||||||
|
name: "lore".to_string(),
|
||||||
|
version,
|
||||||
|
description: "Local GitLab data management with semantic search".to_string(),
|
||||||
|
activation: RobotDocsActivation {
|
||||||
|
flags: vec!["--robot".to_string(), "-J".to_string(), "--json".to_string()],
|
||||||
|
env: "LORE_ROBOT=1".to_string(),
|
||||||
|
auto: "Non-TTY stdout".to_string(),
|
||||||
|
},
|
||||||
|
quick_start,
|
||||||
|
commands,
|
||||||
|
aliases,
|
||||||
|
error_tolerance,
|
||||||
|
exit_codes,
|
||||||
|
clap_error_codes,
|
||||||
|
error_format: "stderr JSON: {\"error\":{\"code\":\"...\",\"message\":\"...\",\"suggestion\":\"...\",\"actions\":[\"...\"]}}".to_string(),
|
||||||
|
workflows,
|
||||||
|
config_notes,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
if robot_mode {
|
||||||
|
println!("{}", serde_json::to_string(&output)?);
|
||||||
|
} else {
|
||||||
|
println!("{}", serde_json::to_string_pretty(&output)?);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_who(
|
||||||
|
config_override: Option<&str>,
|
||||||
|
mut args: WhoArgs,
|
||||||
|
robot_mode: bool,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
let start = std::time::Instant::now();
|
||||||
|
let config = Config::load(config_override)?;
|
||||||
|
if args.project.is_none() {
|
||||||
|
args.project = config.default_project.clone();
|
||||||
|
}
|
||||||
|
let run = run_who(&config, &args)?;
|
||||||
|
let elapsed_ms = start.elapsed().as_millis() as u64;
|
||||||
|
|
||||||
|
if robot_mode {
|
||||||
|
print_who_json(&run, &args, elapsed_ms);
|
||||||
|
} else {
|
||||||
|
print_who_human(&run.result, run.resolved_input.project_path.as_deref());
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_me(
|
||||||
|
config_override: Option<&str>,
|
||||||
|
args: MeArgs,
|
||||||
|
robot_mode: bool,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
let config = Config::load(config_override)?;
|
||||||
|
run_me(&config, &args, robot_mode)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn handle_drift(
|
||||||
|
config_override: Option<&str>,
|
||||||
|
entity_type: &str,
|
||||||
|
iid: i64,
|
||||||
|
threshold: f32,
|
||||||
|
project: Option<&str>,
|
||||||
|
robot_mode: bool,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
let start = std::time::Instant::now();
|
||||||
|
let config = Config::load(config_override)?;
|
||||||
|
let effective_project = config.effective_project(project);
|
||||||
|
let response = run_drift(&config, entity_type, iid, threshold, effective_project).await?;
|
||||||
|
let elapsed_ms = start.elapsed().as_millis() as u64;
|
||||||
|
|
||||||
|
if robot_mode {
|
||||||
|
print_drift_json(&response, elapsed_ms);
|
||||||
|
} else {
|
||||||
|
print_drift_human(&response);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn handle_related(
|
||||||
|
config_override: Option<&str>,
|
||||||
|
query_or_type: &str,
|
||||||
|
iid: Option<i64>,
|
||||||
|
limit: usize,
|
||||||
|
project: Option<&str>,
|
||||||
|
robot_mode: bool,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
let start = std::time::Instant::now();
|
||||||
|
let config = Config::load(config_override)?;
|
||||||
|
let effective_project = config.effective_project(project);
|
||||||
|
let response = run_related(&config, query_or_type, iid, limit, effective_project).await?;
|
||||||
|
let elapsed_ms = start.elapsed().as_millis() as u64;
|
||||||
|
|
||||||
|
if robot_mode {
|
||||||
|
print_related_json(&response, elapsed_ms);
|
||||||
|
} else {
|
||||||
|
print_related_human(&response);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
|
async fn handle_list_compat(
|
||||||
|
config_override: Option<&str>,
|
||||||
|
entity: &str,
|
||||||
|
limit: usize,
|
||||||
|
project_filter: Option<&str>,
|
||||||
|
state_filter: Option<&str>,
|
||||||
|
author_filter: Option<&str>,
|
||||||
|
assignee_filter: Option<&str>,
|
||||||
|
label_filter: Option<&[String]>,
|
||||||
|
milestone_filter: Option<&str>,
|
||||||
|
since_filter: Option<&str>,
|
||||||
|
due_before_filter: Option<&str>,
|
||||||
|
has_due_date: bool,
|
||||||
|
sort: &str,
|
||||||
|
order: &str,
|
||||||
|
open_browser: bool,
|
||||||
|
json_output: bool,
|
||||||
|
draft: bool,
|
||||||
|
no_draft: bool,
|
||||||
|
reviewer_filter: Option<&str>,
|
||||||
|
target_branch_filter: Option<&str>,
|
||||||
|
source_branch_filter: Option<&str>,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
let start = std::time::Instant::now();
|
||||||
|
let config = Config::load(config_override)?;
|
||||||
|
let project_filter = config.effective_project(project_filter);
|
||||||
|
|
||||||
|
let state_normalized = state_filter.map(str::to_lowercase);
|
||||||
|
match entity {
|
||||||
|
"issues" => {
|
||||||
|
let filters = ListFilters {
|
||||||
|
limit,
|
||||||
|
project: project_filter,
|
||||||
|
state: state_normalized.as_deref(),
|
||||||
|
author: author_filter,
|
||||||
|
assignee: assignee_filter,
|
||||||
|
labels: label_filter,
|
||||||
|
milestone: milestone_filter,
|
||||||
|
since: since_filter,
|
||||||
|
due_before: due_before_filter,
|
||||||
|
has_due_date,
|
||||||
|
statuses: &[],
|
||||||
|
sort,
|
||||||
|
order,
|
||||||
|
};
|
||||||
|
|
||||||
|
let result = run_list_issues(&config, filters)?;
|
||||||
|
|
||||||
|
if open_browser {
|
||||||
|
open_issue_in_browser(&result);
|
||||||
|
} else if json_output {
|
||||||
|
print_list_issues_json(&result, start.elapsed().as_millis() as u64, None);
|
||||||
|
} else {
|
||||||
|
print_list_issues(&result);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
"mrs" => {
|
||||||
|
let filters = MrListFilters {
|
||||||
|
limit,
|
||||||
|
project: project_filter,
|
||||||
|
state: state_normalized.as_deref(),
|
||||||
|
author: author_filter,
|
||||||
|
assignee: assignee_filter,
|
||||||
|
reviewer: reviewer_filter,
|
||||||
|
labels: label_filter,
|
||||||
|
since: since_filter,
|
||||||
|
draft,
|
||||||
|
no_draft,
|
||||||
|
target_branch: target_branch_filter,
|
||||||
|
source_branch: source_branch_filter,
|
||||||
|
sort,
|
||||||
|
order,
|
||||||
|
};
|
||||||
|
|
||||||
|
let result = run_list_mrs(&config, filters)?;
|
||||||
|
|
||||||
|
if open_browser {
|
||||||
|
open_mr_in_browser(&result);
|
||||||
|
} else if json_output {
|
||||||
|
print_list_mrs_json(&result, start.elapsed().as_millis() as u64, None);
|
||||||
|
} else {
|
||||||
|
print_list_mrs(&result);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
eprintln!(
|
||||||
|
"{}",
|
||||||
|
Theme::error().render(&format!("Unknown entity: {entity}"))
|
||||||
|
);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
870
src/cli/args.rs
Normal file
870
src/cli/args.rs
Normal file
@@ -0,0 +1,870 @@
|
|||||||
|
use clap::{Args, Parser, Subcommand};
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore issues -n 10 # List 10 most recently updated issues
|
||||||
|
lore issues -s opened -l bug # Open issues labeled 'bug'
|
||||||
|
lore issues 42 -p group/repo # Show issue #42 in a specific project
|
||||||
|
lore issues --since 7d -a jsmith # Issues updated in last 7 days by jsmith")]
|
||||||
|
pub struct IssuesArgs {
|
||||||
|
/// Issue IID (omit to list, provide to show details)
|
||||||
|
pub iid: Option<i64>,
|
||||||
|
|
||||||
|
/// Maximum results
|
||||||
|
#[arg(
|
||||||
|
short = 'n',
|
||||||
|
long = "limit",
|
||||||
|
default_value = "50",
|
||||||
|
help_heading = "Output"
|
||||||
|
)]
|
||||||
|
pub limit: usize,
|
||||||
|
|
||||||
|
/// Select output fields (comma-separated, or 'minimal' preset: iid,title,state,updated_at_iso)
|
||||||
|
#[arg(long, help_heading = "Output", value_delimiter = ',')]
|
||||||
|
pub fields: Option<Vec<String>>,
|
||||||
|
|
||||||
|
/// Filter by state (opened, closed, all)
|
||||||
|
#[arg(short = 's', long, help_heading = "Filters", value_parser = ["opened", "closed", "all"])]
|
||||||
|
pub state: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by project path
|
||||||
|
#[arg(short = 'p', long, help_heading = "Filters")]
|
||||||
|
pub project: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by author username
|
||||||
|
#[arg(short = 'a', long, help_heading = "Filters")]
|
||||||
|
pub author: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by assignee username
|
||||||
|
#[arg(short = 'A', long, help_heading = "Filters")]
|
||||||
|
pub assignee: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by label (repeatable, AND logic)
|
||||||
|
#[arg(short = 'l', long, help_heading = "Filters")]
|
||||||
|
pub label: Option<Vec<String>>,
|
||||||
|
|
||||||
|
/// Filter by milestone title
|
||||||
|
#[arg(short = 'm', long, help_heading = "Filters")]
|
||||||
|
pub milestone: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by work-item status name (repeatable, OR logic)
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub status: Vec<String>,
|
||||||
|
|
||||||
|
/// Filter by time (7d, 2w, 1m, or YYYY-MM-DD)
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub since: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by due date (before this date, YYYY-MM-DD)
|
||||||
|
#[arg(long = "due-before", help_heading = "Filters")]
|
||||||
|
pub due_before: Option<String>,
|
||||||
|
|
||||||
|
/// Show only issues with a due date
|
||||||
|
#[arg(
|
||||||
|
long = "has-due",
|
||||||
|
help_heading = "Filters",
|
||||||
|
overrides_with = "no_has_due"
|
||||||
|
)]
|
||||||
|
pub has_due: bool,
|
||||||
|
|
||||||
|
#[arg(long = "no-has-due", hide = true, overrides_with = "has_due")]
|
||||||
|
pub no_has_due: bool,
|
||||||
|
|
||||||
|
/// Sort field (updated, created, iid)
|
||||||
|
#[arg(long, value_parser = ["updated", "created", "iid"], default_value = "updated", help_heading = "Sorting")]
|
||||||
|
pub sort: String,
|
||||||
|
|
||||||
|
/// Sort ascending (default: descending)
|
||||||
|
#[arg(long, help_heading = "Sorting", overrides_with = "no_asc")]
|
||||||
|
pub asc: bool,
|
||||||
|
|
||||||
|
#[arg(long = "no-asc", hide = true, overrides_with = "asc")]
|
||||||
|
pub no_asc: bool,
|
||||||
|
|
||||||
|
/// Open first matching item in browser
|
||||||
|
#[arg(
|
||||||
|
short = 'o',
|
||||||
|
long,
|
||||||
|
help_heading = "Actions",
|
||||||
|
overrides_with = "no_open"
|
||||||
|
)]
|
||||||
|
pub open: bool,
|
||||||
|
|
||||||
|
#[arg(long = "no-open", hide = true, overrides_with = "open")]
|
||||||
|
pub no_open: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore mrs -s opened # List open merge requests
|
||||||
|
lore mrs -s merged --since 2w # MRs merged in the last 2 weeks
|
||||||
|
lore mrs 99 -p group/repo # Show MR !99 in a specific project
|
||||||
|
lore mrs -D --reviewer jsmith # Non-draft MRs reviewed by jsmith")]
|
||||||
|
pub struct MrsArgs {
|
||||||
|
/// MR IID (omit to list, provide to show details)
|
||||||
|
pub iid: Option<i64>,
|
||||||
|
|
||||||
|
/// Maximum results
|
||||||
|
#[arg(
|
||||||
|
short = 'n',
|
||||||
|
long = "limit",
|
||||||
|
default_value = "50",
|
||||||
|
help_heading = "Output"
|
||||||
|
)]
|
||||||
|
pub limit: usize,
|
||||||
|
|
||||||
|
/// Select output fields (comma-separated, or 'minimal' preset: iid,title,state,updated_at_iso)
|
||||||
|
#[arg(long, help_heading = "Output", value_delimiter = ',')]
|
||||||
|
pub fields: Option<Vec<String>>,
|
||||||
|
|
||||||
|
/// Filter by state (opened, merged, closed, locked, all)
|
||||||
|
#[arg(short = 's', long, help_heading = "Filters", value_parser = ["opened", "merged", "closed", "locked", "all"])]
|
||||||
|
pub state: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by project path
|
||||||
|
#[arg(short = 'p', long, help_heading = "Filters")]
|
||||||
|
pub project: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by author username
|
||||||
|
#[arg(short = 'a', long, help_heading = "Filters")]
|
||||||
|
pub author: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by assignee username
|
||||||
|
#[arg(short = 'A', long, help_heading = "Filters")]
|
||||||
|
pub assignee: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by reviewer username
|
||||||
|
#[arg(short = 'r', long, help_heading = "Filters")]
|
||||||
|
pub reviewer: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by label (repeatable, AND logic)
|
||||||
|
#[arg(short = 'l', long, help_heading = "Filters")]
|
||||||
|
pub label: Option<Vec<String>>,
|
||||||
|
|
||||||
|
/// Filter by time (7d, 2w, 1m, or YYYY-MM-DD)
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub since: Option<String>,
|
||||||
|
|
||||||
|
/// Show only draft MRs
|
||||||
|
#[arg(
|
||||||
|
short = 'd',
|
||||||
|
long,
|
||||||
|
conflicts_with = "no_draft",
|
||||||
|
help_heading = "Filters"
|
||||||
|
)]
|
||||||
|
pub draft: bool,
|
||||||
|
|
||||||
|
/// Exclude draft MRs
|
||||||
|
#[arg(
|
||||||
|
short = 'D',
|
||||||
|
long = "no-draft",
|
||||||
|
conflicts_with = "draft",
|
||||||
|
help_heading = "Filters"
|
||||||
|
)]
|
||||||
|
pub no_draft: bool,
|
||||||
|
|
||||||
|
/// Filter by target branch
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub target: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by source branch
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub source: Option<String>,
|
||||||
|
|
||||||
|
/// Sort field (updated, created, iid)
|
||||||
|
#[arg(long, value_parser = ["updated", "created", "iid"], default_value = "updated", help_heading = "Sorting")]
|
||||||
|
pub sort: String,
|
||||||
|
|
||||||
|
/// Sort ascending (default: descending)
|
||||||
|
#[arg(long, help_heading = "Sorting", overrides_with = "no_asc")]
|
||||||
|
pub asc: bool,
|
||||||
|
|
||||||
|
#[arg(long = "no-asc", hide = true, overrides_with = "asc")]
|
||||||
|
pub no_asc: bool,
|
||||||
|
|
||||||
|
/// Open first matching item in browser
|
||||||
|
#[arg(
|
||||||
|
short = 'o',
|
||||||
|
long,
|
||||||
|
help_heading = "Actions",
|
||||||
|
overrides_with = "no_open"
|
||||||
|
)]
|
||||||
|
pub open: bool,
|
||||||
|
|
||||||
|
#[arg(long = "no-open", hide = true, overrides_with = "open")]
|
||||||
|
pub no_open: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore notes # List 50 most recent notes
|
||||||
|
lore notes --author alice --since 7d # Notes by alice in last 7 days
|
||||||
|
lore notes --for-issue 42 -p group/repo # Notes on issue #42
|
||||||
|
lore notes --path src/ --resolution unresolved # Unresolved diff notes in src/")]
|
||||||
|
pub struct NotesArgs {
|
||||||
|
/// Maximum results
|
||||||
|
#[arg(
|
||||||
|
short = 'n',
|
||||||
|
long = "limit",
|
||||||
|
default_value = "50",
|
||||||
|
help_heading = "Output"
|
||||||
|
)]
|
||||||
|
pub limit: usize,
|
||||||
|
|
||||||
|
/// Select output fields (comma-separated, or 'minimal' preset: id,author_username,body,created_at_iso)
|
||||||
|
#[arg(long, help_heading = "Output", value_delimiter = ',')]
|
||||||
|
pub fields: Option<Vec<String>>,
|
||||||
|
|
||||||
|
/// Filter by author username
|
||||||
|
#[arg(short = 'a', long, help_heading = "Filters")]
|
||||||
|
pub author: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by note type (DiffNote, DiscussionNote)
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub note_type: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by body text (substring match)
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub contains: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by internal note ID
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub note_id: Option<i64>,
|
||||||
|
|
||||||
|
/// Filter by GitLab note ID
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub gitlab_note_id: Option<i64>,
|
||||||
|
|
||||||
|
/// Filter by discussion ID
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub discussion_id: Option<String>,
|
||||||
|
|
||||||
|
/// Include system notes (excluded by default)
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub include_system: bool,
|
||||||
|
|
||||||
|
/// Filter to notes on a specific issue IID (requires --project or default_project)
|
||||||
|
#[arg(long, conflicts_with = "for_mr", help_heading = "Filters")]
|
||||||
|
pub for_issue: Option<i64>,
|
||||||
|
|
||||||
|
/// Filter to notes on a specific MR IID (requires --project or default_project)
|
||||||
|
#[arg(long, conflicts_with = "for_issue", help_heading = "Filters")]
|
||||||
|
pub for_mr: Option<i64>,
|
||||||
|
|
||||||
|
/// Filter by project path
|
||||||
|
#[arg(short = 'p', long, help_heading = "Filters")]
|
||||||
|
pub project: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by time (7d, 2w, 1m, or YYYY-MM-DD)
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub since: Option<String>,
|
||||||
|
|
||||||
|
/// Filter until date (YYYY-MM-DD, inclusive end-of-day)
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub until: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by file path (exact match or prefix with trailing /)
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub path: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by resolution status (any, unresolved, resolved)
|
||||||
|
#[arg(
|
||||||
|
long,
|
||||||
|
value_parser = ["any", "unresolved", "resolved"],
|
||||||
|
help_heading = "Filters"
|
||||||
|
)]
|
||||||
|
pub resolution: Option<String>,
|
||||||
|
|
||||||
|
/// Sort field (created, updated)
|
||||||
|
#[arg(
|
||||||
|
long,
|
||||||
|
value_parser = ["created", "updated"],
|
||||||
|
default_value = "created",
|
||||||
|
help_heading = "Sorting"
|
||||||
|
)]
|
||||||
|
pub sort: String,
|
||||||
|
|
||||||
|
/// Sort ascending (default: descending)
|
||||||
|
#[arg(long, help_heading = "Sorting")]
|
||||||
|
pub asc: bool,
|
||||||
|
|
||||||
|
/// Open first matching item in browser
|
||||||
|
#[arg(long, help_heading = "Actions")]
|
||||||
|
pub open: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
pub struct IngestArgs {
|
||||||
|
/// Entity to ingest (issues, mrs). Omit to ingest everything
|
||||||
|
#[arg(value_parser = ["issues", "mrs"])]
|
||||||
|
pub entity: Option<String>,
|
||||||
|
|
||||||
|
/// Filter to single project
|
||||||
|
#[arg(short = 'p', long)]
|
||||||
|
pub project: Option<String>,
|
||||||
|
|
||||||
|
/// Override stale sync lock
|
||||||
|
#[arg(short = 'f', long, overrides_with = "no_force")]
|
||||||
|
pub force: bool,
|
||||||
|
|
||||||
|
#[arg(long = "no-force", hide = true, overrides_with = "force")]
|
||||||
|
pub no_force: bool,
|
||||||
|
|
||||||
|
/// Full re-sync: reset cursors and fetch all data from scratch
|
||||||
|
#[arg(long, overrides_with = "no_full")]
|
||||||
|
pub full: bool,
|
||||||
|
|
||||||
|
#[arg(long = "no-full", hide = true, overrides_with = "full")]
|
||||||
|
pub no_full: bool,
|
||||||
|
|
||||||
|
/// Preview what would be synced without making changes
|
||||||
|
#[arg(long, overrides_with = "no_dry_run")]
|
||||||
|
pub dry_run: bool,
|
||||||
|
|
||||||
|
#[arg(long = "no-dry-run", hide = true, overrides_with = "dry_run")]
|
||||||
|
pub no_dry_run: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore stats # Show document and index statistics
|
||||||
|
lore stats --check # Run integrity checks
|
||||||
|
lore stats --repair --dry-run # Preview what repair would fix
|
||||||
|
lore --robot stats # JSON output for automation")]
|
||||||
|
pub struct StatsArgs {
|
||||||
|
/// Run integrity checks
|
||||||
|
#[arg(long, overrides_with = "no_check")]
|
||||||
|
pub check: bool,
|
||||||
|
|
||||||
|
#[arg(long = "no-check", hide = true, overrides_with = "check")]
|
||||||
|
pub no_check: bool,
|
||||||
|
|
||||||
|
/// Repair integrity issues (auto-enables --check)
|
||||||
|
#[arg(long)]
|
||||||
|
pub repair: bool,
|
||||||
|
|
||||||
|
/// Preview what would be repaired without making changes (requires --repair)
|
||||||
|
#[arg(long, overrides_with = "no_dry_run")]
|
||||||
|
pub dry_run: bool,
|
||||||
|
|
||||||
|
#[arg(long = "no-dry-run", hide = true, overrides_with = "dry_run")]
|
||||||
|
pub no_dry_run: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore search 'authentication bug' # Hybrid search (default)
|
||||||
|
lore search 'deploy' --mode lexical --type mr # Lexical search, MRs only
|
||||||
|
lore search 'API rate limit' --since 30d # Recent results only
|
||||||
|
lore search 'config' -p group/repo --explain # With ranking explanation")]
|
||||||
|
pub struct SearchArgs {
|
||||||
|
/// Search query string
|
||||||
|
pub query: String,
|
||||||
|
|
||||||
|
/// Search mode (lexical, hybrid, semantic)
|
||||||
|
#[arg(long, default_value = "hybrid", value_parser = ["lexical", "hybrid", "semantic"], help_heading = "Mode")]
|
||||||
|
pub mode: String,
|
||||||
|
|
||||||
|
/// Filter by source type (issue, mr, discussion, note)
|
||||||
|
#[arg(long = "type", value_name = "TYPE", value_parser = ["issue", "mr", "discussion", "note"], help_heading = "Filters")]
|
||||||
|
pub source_type: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by author username
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub author: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by project path
|
||||||
|
#[arg(short = 'p', long, help_heading = "Filters")]
|
||||||
|
pub project: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by label (repeatable, AND logic)
|
||||||
|
#[arg(long, action = clap::ArgAction::Append, help_heading = "Filters")]
|
||||||
|
pub label: Vec<String>,
|
||||||
|
|
||||||
|
/// Filter by file path (trailing / for prefix match)
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub path: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by created since (7d, 2w, or YYYY-MM-DD)
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub since: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by updated since (7d, 2w, or YYYY-MM-DD)
|
||||||
|
#[arg(long = "updated-since", help_heading = "Filters")]
|
||||||
|
pub updated_since: Option<String>,
|
||||||
|
|
||||||
|
/// Maximum results (default 20, max 100)
|
||||||
|
#[arg(
|
||||||
|
short = 'n',
|
||||||
|
long = "limit",
|
||||||
|
default_value = "20",
|
||||||
|
help_heading = "Output"
|
||||||
|
)]
|
||||||
|
pub limit: usize,
|
||||||
|
|
||||||
|
/// Select output fields (comma-separated, or 'minimal' preset: document_id,title,source_type,score)
|
||||||
|
#[arg(long, help_heading = "Output", value_delimiter = ',')]
|
||||||
|
pub fields: Option<Vec<String>>,
|
||||||
|
|
||||||
|
/// Show ranking explanation per result
|
||||||
|
#[arg(long, help_heading = "Output", overrides_with = "no_explain")]
|
||||||
|
pub explain: bool,
|
||||||
|
|
||||||
|
#[arg(long = "no-explain", hide = true, overrides_with = "explain")]
|
||||||
|
pub no_explain: bool,
|
||||||
|
|
||||||
|
/// FTS query mode: safe (default) or raw
|
||||||
|
#[arg(long = "fts-mode", default_value = "safe", value_parser = ["safe", "raw"], help_heading = "Mode")]
|
||||||
|
pub fts_mode: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore generate-docs # Generate docs for dirty entities
|
||||||
|
lore generate-docs --full # Full rebuild of all documents
|
||||||
|
lore generate-docs --full -p group/repo # Full rebuild for one project")]
|
||||||
|
pub struct GenerateDocsArgs {
|
||||||
|
/// Full rebuild: seed all entities into dirty queue, then drain
|
||||||
|
#[arg(long)]
|
||||||
|
pub full: bool,
|
||||||
|
|
||||||
|
/// Filter to single project
|
||||||
|
#[arg(short = 'p', long)]
|
||||||
|
pub project: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore sync # Full pipeline: ingest + docs + embed
|
||||||
|
lore sync --no-embed # Skip embedding step
|
||||||
|
lore sync --no-status # Skip work-item status enrichment
|
||||||
|
lore sync --full --force # Full re-sync, override stale lock
|
||||||
|
lore sync --dry-run # Preview what would change
|
||||||
|
lore sync --issue 42 -p group/repo # Surgically sync one issue
|
||||||
|
lore sync --mr 10 --mr 20 -p g/r # Surgically sync two MRs")]
|
||||||
|
pub struct SyncArgs {
|
||||||
|
/// Reset cursors, fetch everything
|
||||||
|
#[arg(long, overrides_with = "no_full")]
|
||||||
|
pub full: bool,
|
||||||
|
|
||||||
|
#[arg(long = "no-full", hide = true, overrides_with = "full")]
|
||||||
|
pub no_full: bool,
|
||||||
|
|
||||||
|
/// Override stale lock
|
||||||
|
#[arg(long, overrides_with = "no_force")]
|
||||||
|
pub force: bool,
|
||||||
|
|
||||||
|
#[arg(long = "no-force", hide = true, overrides_with = "force")]
|
||||||
|
pub no_force: bool,
|
||||||
|
|
||||||
|
/// Skip embedding step
|
||||||
|
#[arg(long)]
|
||||||
|
pub no_embed: bool,
|
||||||
|
|
||||||
|
/// Skip document regeneration
|
||||||
|
#[arg(long)]
|
||||||
|
pub no_docs: bool,
|
||||||
|
|
||||||
|
/// Skip resource event fetching (overrides config)
|
||||||
|
#[arg(long = "no-events")]
|
||||||
|
pub no_events: bool,
|
||||||
|
|
||||||
|
/// Skip MR file change fetching (overrides config)
|
||||||
|
#[arg(long = "no-file-changes")]
|
||||||
|
pub no_file_changes: bool,
|
||||||
|
|
||||||
|
/// Skip work-item status enrichment via GraphQL (overrides config)
|
||||||
|
#[arg(long = "no-status")]
|
||||||
|
pub no_status: bool,
|
||||||
|
|
||||||
|
/// Preview what would be synced without making changes
|
||||||
|
#[arg(long, overrides_with = "no_dry_run")]
|
||||||
|
pub dry_run: bool,
|
||||||
|
|
||||||
|
#[arg(long = "no-dry-run", hide = true, overrides_with = "dry_run")]
|
||||||
|
pub no_dry_run: bool,
|
||||||
|
|
||||||
|
/// Show detailed timing breakdown for sync stages
|
||||||
|
#[arg(short = 't', long = "timings")]
|
||||||
|
pub timings: bool,
|
||||||
|
|
||||||
|
/// Acquire file lock before syncing (skip if another sync is running)
|
||||||
|
#[arg(long)]
|
||||||
|
pub lock: bool,
|
||||||
|
|
||||||
|
/// Surgically sync specific issues by IID (repeatable, must be positive)
|
||||||
|
#[arg(long, value_parser = clap::value_parser!(u64).range(1..), action = clap::ArgAction::Append)]
|
||||||
|
pub issue: Vec<u64>,
|
||||||
|
|
||||||
|
/// Surgically sync specific merge requests by IID (repeatable, must be positive)
|
||||||
|
#[arg(long, value_parser = clap::value_parser!(u64).range(1..), action = clap::ArgAction::Append)]
|
||||||
|
pub mr: Vec<u64>,
|
||||||
|
|
||||||
|
/// Scope to a single project (required when --issue or --mr is used)
|
||||||
|
#[arg(short = 'p', long)]
|
||||||
|
pub project: Option<String>,
|
||||||
|
|
||||||
|
/// Validate remote entities exist without DB writes (preflight only)
|
||||||
|
#[arg(long)]
|
||||||
|
pub preflight_only: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore embed # Embed new/changed documents
|
||||||
|
lore embed --full # Re-embed all documents from scratch
|
||||||
|
lore embed --retry-failed # Retry previously failed embeddings")]
|
||||||
|
pub struct EmbedArgs {
|
||||||
|
/// Re-embed all documents (clears existing embeddings first)
|
||||||
|
#[arg(long, overrides_with = "no_full")]
|
||||||
|
pub full: bool,
|
||||||
|
|
||||||
|
#[arg(long = "no-full", hide = true, overrides_with = "full")]
|
||||||
|
pub no_full: bool,
|
||||||
|
|
||||||
|
/// Retry previously failed embeddings
|
||||||
|
#[arg(long, overrides_with = "no_retry_failed")]
|
||||||
|
pub retry_failed: bool,
|
||||||
|
|
||||||
|
#[arg(long = "no-retry-failed", hide = true, overrides_with = "retry_failed")]
|
||||||
|
pub no_retry_failed: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore timeline 'deployment' # Search-based seeding
|
||||||
|
lore timeline issue:42 # Direct: issue #42 and related entities
|
||||||
|
lore timeline i:42 # Shorthand for issue:42
|
||||||
|
lore timeline mr:99 # Direct: MR !99 and related entities
|
||||||
|
lore timeline 'auth' --since 30d -p group/repo # Scoped to project and time
|
||||||
|
lore timeline 'migration' --depth 2 # Deep cross-reference expansion
|
||||||
|
lore timeline 'auth' --no-mentions # Only 'closes' and 'related' edges")]
|
||||||
|
pub struct TimelineArgs {
|
||||||
|
/// Search text or entity reference (issue:N, i:N, mr:N, m:N)
|
||||||
|
pub query: String,
|
||||||
|
|
||||||
|
/// Scope to a specific project (fuzzy match)
|
||||||
|
#[arg(short = 'p', long, help_heading = "Filters")]
|
||||||
|
pub project: Option<String>,
|
||||||
|
|
||||||
|
/// Only show events after this date (e.g. "6m", "2w", "2024-01-01")
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub since: Option<String>,
|
||||||
|
|
||||||
|
/// Cross-reference expansion depth (0 = no expansion)
|
||||||
|
#[arg(long, default_value = "1", help_heading = "Expansion")]
|
||||||
|
pub depth: u32,
|
||||||
|
|
||||||
|
/// Skip 'mentioned' edges during expansion (only follow 'closes' and 'related')
|
||||||
|
#[arg(long = "no-mentions", help_heading = "Expansion")]
|
||||||
|
pub no_mentions: bool,
|
||||||
|
|
||||||
|
/// Maximum number of events to display
|
||||||
|
#[arg(
|
||||||
|
short = 'n',
|
||||||
|
long = "limit",
|
||||||
|
default_value = "100",
|
||||||
|
help_heading = "Output"
|
||||||
|
)]
|
||||||
|
pub limit: usize,
|
||||||
|
|
||||||
|
/// Select output fields (comma-separated, or 'minimal' preset: timestamp,type,entity_iid,detail)
|
||||||
|
#[arg(long, help_heading = "Output", value_delimiter = ',')]
|
||||||
|
pub fields: Option<Vec<String>>,
|
||||||
|
|
||||||
|
/// Maximum seed entities from search
|
||||||
|
#[arg(long = "max-seeds", default_value = "10", help_heading = "Expansion")]
|
||||||
|
pub max_seeds: usize,
|
||||||
|
|
||||||
|
/// Maximum expanded entities via cross-references
|
||||||
|
#[arg(
|
||||||
|
long = "max-entities",
|
||||||
|
default_value = "50",
|
||||||
|
help_heading = "Expansion"
|
||||||
|
)]
|
||||||
|
pub max_entities: usize,
|
||||||
|
|
||||||
|
/// Maximum evidence notes included
|
||||||
|
#[arg(
|
||||||
|
long = "max-evidence",
|
||||||
|
default_value = "10",
|
||||||
|
help_heading = "Expansion"
|
||||||
|
)]
|
||||||
|
pub max_evidence: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore who src/features/auth/ # Who knows about this area?
|
||||||
|
lore who @asmith # What is asmith working on?
|
||||||
|
lore who @asmith --reviews # What review patterns does asmith have?
|
||||||
|
lore who --active # What discussions need attention?
|
||||||
|
lore who --overlap src/features/auth/ # Who else is touching these files?
|
||||||
|
lore who --path README.md # Expert lookup for a root file
|
||||||
|
lore who --path Makefile # Expert lookup for a dotless root file")]
|
||||||
|
pub struct WhoArgs {
|
||||||
|
/// Username or file path (path if contains /)
|
||||||
|
pub target: Option<String>,
|
||||||
|
|
||||||
|
/// Force expert mode for a file/directory path.
|
||||||
|
/// Root files (README.md, LICENSE, Makefile) are treated as exact matches.
|
||||||
|
/// Use a trailing `/` to force directory-prefix matching.
|
||||||
|
#[arg(long, help_heading = "Mode", conflicts_with_all = ["active", "overlap", "reviews"])]
|
||||||
|
pub path: Option<String>,
|
||||||
|
|
||||||
|
/// Show active unresolved discussions
|
||||||
|
#[arg(long, help_heading = "Mode", conflicts_with_all = ["target", "overlap", "reviews", "path"])]
|
||||||
|
pub active: bool,
|
||||||
|
|
||||||
|
/// Find users with MRs/notes touching this file path
|
||||||
|
#[arg(long, help_heading = "Mode", conflicts_with_all = ["target", "active", "reviews", "path"])]
|
||||||
|
pub overlap: Option<String>,
|
||||||
|
|
||||||
|
/// Show review pattern analysis (requires username target)
|
||||||
|
#[arg(long, help_heading = "Mode", requires = "target", conflicts_with_all = ["active", "overlap", "path"])]
|
||||||
|
pub reviews: bool,
|
||||||
|
|
||||||
|
/// Time window (7d, 2w, 6m, YYYY-MM-DD). Default varies by mode.
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub since: Option<String>,
|
||||||
|
|
||||||
|
/// Scope to a project (supports fuzzy matching)
|
||||||
|
#[arg(short = 'p', long, help_heading = "Filters")]
|
||||||
|
pub project: Option<String>,
|
||||||
|
|
||||||
|
/// Maximum results per section (1..=500); omit for unlimited
|
||||||
|
#[arg(
|
||||||
|
short = 'n',
|
||||||
|
long = "limit",
|
||||||
|
value_parser = clap::value_parser!(u16).range(1..=500),
|
||||||
|
help_heading = "Output"
|
||||||
|
)]
|
||||||
|
pub limit: Option<u16>,
|
||||||
|
|
||||||
|
/// Select output fields (comma-separated, or 'minimal' preset; varies by mode)
|
||||||
|
#[arg(long, help_heading = "Output", value_delimiter = ',')]
|
||||||
|
pub fields: Option<Vec<String>>,
|
||||||
|
|
||||||
|
/// Show per-MR detail breakdown (expert mode only)
|
||||||
|
#[arg(
|
||||||
|
long,
|
||||||
|
help_heading = "Output",
|
||||||
|
overrides_with = "no_detail",
|
||||||
|
conflicts_with = "explain_score"
|
||||||
|
)]
|
||||||
|
pub detail: bool,
|
||||||
|
|
||||||
|
#[arg(long = "no-detail", hide = true, overrides_with = "detail")]
|
||||||
|
pub no_detail: bool,
|
||||||
|
|
||||||
|
/// Score as if "now" is this date (ISO 8601 or duration like 30d). Expert mode only.
|
||||||
|
#[arg(long = "as-of", help_heading = "Scoring")]
|
||||||
|
pub as_of: Option<String>,
|
||||||
|
|
||||||
|
/// Show per-component score breakdown in output. Expert mode only.
|
||||||
|
#[arg(long = "explain-score", help_heading = "Scoring")]
|
||||||
|
pub explain_score: bool,
|
||||||
|
|
||||||
|
/// Include bot users in results (normally excluded via scoring.excluded_usernames).
|
||||||
|
#[arg(long = "include-bots", help_heading = "Scoring")]
|
||||||
|
pub include_bots: bool,
|
||||||
|
|
||||||
|
/// Include discussions on closed issues and merged/closed MRs
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub include_closed: bool,
|
||||||
|
|
||||||
|
/// Remove the default time window (query all history). Conflicts with --since.
|
||||||
|
#[arg(
|
||||||
|
long = "all-history",
|
||||||
|
help_heading = "Filters",
|
||||||
|
conflicts_with = "since"
|
||||||
|
)]
|
||||||
|
pub all_history: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore me # Full dashboard (default project or all)
|
||||||
|
lore me --issues # Issues section only
|
||||||
|
lore me --mrs # MRs section only
|
||||||
|
lore me --activity # Activity feed only
|
||||||
|
lore me --all # All synced projects
|
||||||
|
lore me --since 2d # Activity window (default: 30d)
|
||||||
|
lore me --project group/repo # Scope to one project
|
||||||
|
lore me --user jdoe # Override configured username")]
|
||||||
|
pub struct MeArgs {
|
||||||
|
/// Show open issues section
|
||||||
|
#[arg(long, help_heading = "Sections")]
|
||||||
|
pub issues: bool,
|
||||||
|
|
||||||
|
/// Show authored + reviewing MRs section
|
||||||
|
#[arg(long, help_heading = "Sections")]
|
||||||
|
pub mrs: bool,
|
||||||
|
|
||||||
|
/// Show activity feed section
|
||||||
|
#[arg(long, help_heading = "Sections")]
|
||||||
|
pub activity: bool,
|
||||||
|
|
||||||
|
/// Show items you're @mentioned in (not assigned/authored/reviewing)
|
||||||
|
#[arg(long, help_heading = "Sections")]
|
||||||
|
pub mentions: bool,
|
||||||
|
|
||||||
|
/// Activity window (e.g. 7d, 2w, 30d). Default: 30d. Only affects activity section.
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub since: Option<String>,
|
||||||
|
|
||||||
|
/// Scope to a project (supports fuzzy matching)
|
||||||
|
#[arg(short = 'p', long, help_heading = "Filters", conflicts_with = "all")]
|
||||||
|
pub project: Option<String>,
|
||||||
|
|
||||||
|
/// Show all synced projects (overrides default_project)
|
||||||
|
#[arg(long, help_heading = "Filters", conflicts_with = "project")]
|
||||||
|
pub all: bool,
|
||||||
|
|
||||||
|
/// Override configured username
|
||||||
|
#[arg(long = "user", help_heading = "Filters")]
|
||||||
|
pub user: Option<String>,
|
||||||
|
|
||||||
|
/// Select output fields (comma-separated, or 'minimal' preset)
|
||||||
|
#[arg(long, help_heading = "Output", value_delimiter = ',')]
|
||||||
|
pub fields: Option<Vec<String>>,
|
||||||
|
|
||||||
|
/// Reset the since-last-check cursor (next run shows no new events)
|
||||||
|
#[arg(long, help_heading = "Output")]
|
||||||
|
pub reset_cursor: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MeArgs {
|
||||||
|
/// Returns true if no section flags were passed (show all sections).
|
||||||
|
pub fn show_all_sections(&self) -> bool {
|
||||||
|
!self.issues && !self.mrs && !self.activity && !self.mentions
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore file-history src/main.rs # MRs that touched this file
|
||||||
|
lore file-history src/auth/ -p group/repo # Scoped to project
|
||||||
|
lore file-history src/foo.rs --discussions # Include DiffNote snippets
|
||||||
|
lore file-history src/bar.rs --no-follow-renames # Skip rename chain")]
|
||||||
|
pub struct FileHistoryArgs {
|
||||||
|
/// File path to trace history for
|
||||||
|
pub path: String,
|
||||||
|
|
||||||
|
/// Scope to a specific project (fuzzy match)
|
||||||
|
#[arg(short = 'p', long, help_heading = "Filters")]
|
||||||
|
pub project: Option<String>,
|
||||||
|
|
||||||
|
/// Include discussion snippets from DiffNotes on this file
|
||||||
|
#[arg(long, help_heading = "Output")]
|
||||||
|
pub discussions: bool,
|
||||||
|
|
||||||
|
/// Disable rename chain resolution
|
||||||
|
#[arg(long = "no-follow-renames", help_heading = "Filters")]
|
||||||
|
pub no_follow_renames: bool,
|
||||||
|
|
||||||
|
/// Only show merged MRs
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub merged: bool,
|
||||||
|
|
||||||
|
/// Maximum results
|
||||||
|
#[arg(
|
||||||
|
short = 'n',
|
||||||
|
long = "limit",
|
||||||
|
default_value = "50",
|
||||||
|
help_heading = "Output"
|
||||||
|
)]
|
||||||
|
pub limit: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore trace src/main.rs # Why was this file changed?
|
||||||
|
lore trace src/auth/ -p group/repo # Scoped to project
|
||||||
|
lore trace src/foo.rs --discussions # Include DiffNote context
|
||||||
|
lore trace src/bar.rs:42 # Line hint (Tier 2 warning)")]
|
||||||
|
pub struct TraceArgs {
|
||||||
|
/// File path to trace (supports :line suffix for future Tier 2)
|
||||||
|
pub path: String,
|
||||||
|
|
||||||
|
/// Scope to a specific project (fuzzy match)
|
||||||
|
#[arg(short = 'p', long, help_heading = "Filters")]
|
||||||
|
pub project: Option<String>,
|
||||||
|
|
||||||
|
/// Include DiffNote discussion snippets
|
||||||
|
#[arg(long, help_heading = "Output")]
|
||||||
|
pub discussions: bool,
|
||||||
|
|
||||||
|
/// Disable rename chain resolution
|
||||||
|
#[arg(long = "no-follow-renames", help_heading = "Filters")]
|
||||||
|
pub no_follow_renames: bool,
|
||||||
|
|
||||||
|
/// Maximum trace chains to display
|
||||||
|
#[arg(
|
||||||
|
short = 'n',
|
||||||
|
long = "limit",
|
||||||
|
default_value = "20",
|
||||||
|
help_heading = "Output"
|
||||||
|
)]
|
||||||
|
pub limit: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore count issues # Total issues in local database
|
||||||
|
lore count notes --for mr # Notes on merge requests only
|
||||||
|
lore count discussions --for issue # Discussions on issues only")]
|
||||||
|
pub struct CountArgs {
|
||||||
|
/// Entity type to count (issues, mrs, discussions, notes, events)
|
||||||
|
#[arg(value_parser = ["issues", "mrs", "discussions", "notes", "events"])]
|
||||||
|
pub entity: String,
|
||||||
|
|
||||||
|
/// Parent type filter: issue or mr (for discussions/notes)
|
||||||
|
#[arg(short = 'f', long = "for", value_parser = ["issue", "mr"])]
|
||||||
|
pub for_entity: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
pub struct CronArgs {
|
||||||
|
#[command(subcommand)]
|
||||||
|
pub action: CronAction,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Subcommand)]
|
||||||
|
pub enum CronAction {
|
||||||
|
/// Install cron job for automatic syncing
|
||||||
|
Install {
|
||||||
|
/// Sync interval in minutes (default: 8)
|
||||||
|
#[arg(long, default_value = "8")]
|
||||||
|
interval: u32,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Remove cron job
|
||||||
|
Uninstall,
|
||||||
|
|
||||||
|
/// Show current cron configuration
|
||||||
|
Status,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Args)]
|
||||||
|
pub struct TokenArgs {
|
||||||
|
#[command(subcommand)]
|
||||||
|
pub action: TokenAction,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Subcommand)]
|
||||||
|
pub enum TokenAction {
|
||||||
|
/// Store a GitLab token in the config file
|
||||||
|
Set {
|
||||||
|
/// Token value (reads from stdin if omitted in non-interactive mode)
|
||||||
|
#[arg(long)]
|
||||||
|
token: Option<String>,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Show the current token (masked by default)
|
||||||
|
Show {
|
||||||
|
/// Show the full unmasked token
|
||||||
|
#[arg(long)]
|
||||||
|
unmask: bool,
|
||||||
|
},
|
||||||
|
}
|
||||||
@@ -209,6 +209,16 @@ const COMMAND_FLAGS: &[(&str, &[&str])] = &[
|
|||||||
],
|
],
|
||||||
),
|
),
|
||||||
("drift", &["--threshold", "--project"]),
|
("drift", &["--threshold", "--project"]),
|
||||||
|
(
|
||||||
|
"explain",
|
||||||
|
&[
|
||||||
|
"--project",
|
||||||
|
"--sections",
|
||||||
|
"--no-timeline",
|
||||||
|
"--max-decisions",
|
||||||
|
"--since",
|
||||||
|
],
|
||||||
|
),
|
||||||
(
|
(
|
||||||
"notes",
|
"notes",
|
||||||
&[
|
&[
|
||||||
@@ -290,7 +300,6 @@ const COMMAND_FLAGS: &[(&str, &[&str])] = &[
|
|||||||
"--source-branch",
|
"--source-branch",
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
("show", &["--project"]),
|
|
||||||
("reset", &["--yes"]),
|
("reset", &["--yes"]),
|
||||||
(
|
(
|
||||||
"me",
|
"me",
|
||||||
@@ -389,6 +398,7 @@ const CANONICAL_SUBCOMMANDS: &[&str] = &[
|
|||||||
"file-history",
|
"file-history",
|
||||||
"trace",
|
"trace",
|
||||||
"drift",
|
"drift",
|
||||||
|
"explain",
|
||||||
"related",
|
"related",
|
||||||
"cron",
|
"cron",
|
||||||
"token",
|
"token",
|
||||||
@@ -396,7 +406,6 @@ const CANONICAL_SUBCOMMANDS: &[&str] = &[
|
|||||||
"backup",
|
"backup",
|
||||||
"reset",
|
"reset",
|
||||||
"list",
|
"list",
|
||||||
"show",
|
|
||||||
"auth-test",
|
"auth-test",
|
||||||
"sync-status",
|
"sync-status",
|
||||||
];
|
];
|
||||||
|
|||||||
@@ -6,8 +6,8 @@ use crate::Config;
|
|||||||
use crate::cli::robot::RobotMeta;
|
use crate::cli::robot::RobotMeta;
|
||||||
use crate::core::db::create_connection;
|
use crate::core::db::create_connection;
|
||||||
use crate::core::error::Result;
|
use crate::core::error::Result;
|
||||||
use crate::core::events_db::{self, EventCounts};
|
|
||||||
use crate::core::paths::get_db_path;
|
use crate::core::paths::get_db_path;
|
||||||
|
use crate::ingestion::storage::events::{EventCounts, count_events};
|
||||||
|
|
||||||
pub struct CountResult {
|
pub struct CountResult {
|
||||||
pub entity: String,
|
pub entity: String,
|
||||||
@@ -208,7 +208,7 @@ struct CountJsonBreakdown {
|
|||||||
pub fn run_count_events(config: &Config) -> Result<EventCounts> {
|
pub fn run_count_events(config: &Config) -> Result<EventCounts> {
|
||||||
let db_path = get_db_path(config.storage.db_path.as_deref());
|
let db_path = get_db_path(config.storage.db_path.as_deref());
|
||||||
let conn = create_connection(&db_path)?;
|
let conn = create_connection(&db_path)?;
|
||||||
events_db::count_events(&conn)
|
count_events(&conn)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
@@ -254,7 +254,7 @@ pub fn print_event_count_json(counts: &EventCounts, elapsed_ms: u64) {
|
|||||||
},
|
},
|
||||||
total: counts.total(),
|
total: counts.total(),
|
||||||
},
|
},
|
||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta::new(elapsed_ms),
|
||||||
};
|
};
|
||||||
|
|
||||||
match serde_json::to_string(&output) {
|
match serde_json::to_string(&output) {
|
||||||
@@ -325,7 +325,7 @@ pub fn print_count_json(result: &CountResult, elapsed_ms: u64) {
|
|||||||
system_excluded: result.system_count,
|
system_excluded: result.system_count,
|
||||||
breakdown,
|
breakdown,
|
||||||
},
|
},
|
||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta::new(elapsed_ms),
|
||||||
};
|
};
|
||||||
|
|
||||||
match serde_json::to_string(&output) {
|
match serde_json::to_string(&output) {
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ use crate::core::cron::{
|
|||||||
};
|
};
|
||||||
use crate::core::db::create_connection;
|
use crate::core::db::create_connection;
|
||||||
use crate::core::error::Result;
|
use crate::core::error::Result;
|
||||||
|
use crate::core::ollama_mgmt::{OllamaStatusBrief, ollama_status_brief};
|
||||||
use crate::core::paths::get_db_path;
|
use crate::core::paths::get_db_path;
|
||||||
use crate::core::time::ms_to_iso;
|
use crate::core::time::ms_to_iso;
|
||||||
|
|
||||||
@@ -80,7 +81,7 @@ pub fn print_cron_install_json(result: &CronInstallResult, elapsed_ms: u64) {
|
|||||||
log_path: result.log_path.display().to_string(),
|
log_path: result.log_path.display().to_string(),
|
||||||
replaced: result.replaced,
|
replaced: result.replaced,
|
||||||
},
|
},
|
||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta::new(elapsed_ms),
|
||||||
};
|
};
|
||||||
if let Ok(json) = serde_json::to_string(&output) {
|
if let Ok(json) = serde_json::to_string(&output) {
|
||||||
println!("{json}");
|
println!("{json}");
|
||||||
@@ -128,7 +129,7 @@ pub fn print_cron_uninstall_json(result: &CronUninstallResult, elapsed_ms: u64)
|
|||||||
action: "uninstall",
|
action: "uninstall",
|
||||||
was_installed: result.was_installed,
|
was_installed: result.was_installed,
|
||||||
},
|
},
|
||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta::new(elapsed_ms),
|
||||||
};
|
};
|
||||||
if let Ok(json) = serde_json::to_string(&output) {
|
if let Ok(json) = serde_json::to_string(&output) {
|
||||||
println!("{json}");
|
println!("{json}");
|
||||||
@@ -143,12 +144,20 @@ pub fn run_cron_status(config: &Config) -> Result<CronStatusInfo> {
|
|||||||
// Query last sync run from DB
|
// Query last sync run from DB
|
||||||
let last_sync = get_last_sync_time(config).unwrap_or_default();
|
let last_sync = get_last_sync_time(config).unwrap_or_default();
|
||||||
|
|
||||||
Ok(CronStatusInfo { status, last_sync })
|
// Quick ollama health check
|
||||||
|
let ollama = ollama_status_brief(&config.embedding.base_url);
|
||||||
|
|
||||||
|
Ok(CronStatusInfo {
|
||||||
|
status,
|
||||||
|
last_sync,
|
||||||
|
ollama,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct CronStatusInfo {
|
pub struct CronStatusInfo {
|
||||||
pub status: CronStatusResult,
|
pub status: CronStatusResult,
|
||||||
pub last_sync: Option<LastSyncInfo>,
|
pub last_sync: Option<LastSyncInfo>,
|
||||||
|
pub ollama: OllamaStatusBrief,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct LastSyncInfo {
|
pub struct LastSyncInfo {
|
||||||
@@ -236,6 +245,32 @@ pub fn print_cron_status(info: &CronStatusInfo) {
|
|||||||
last.status
|
last.status
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Ollama status
|
||||||
|
if info.ollama.installed {
|
||||||
|
if info.ollama.running {
|
||||||
|
println!(
|
||||||
|
" {} running (auto-started by cron if needed)",
|
||||||
|
Theme::dim().render("ollama:")
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
println!(
|
||||||
|
" {} {}",
|
||||||
|
Theme::warning().render("ollama:"),
|
||||||
|
Theme::warning()
|
||||||
|
.render("installed but not running (will attempt start on next sync)")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
println!(
|
||||||
|
" {} {}",
|
||||||
|
Theme::error().render("ollama:"),
|
||||||
|
Theme::error().render("not installed — embeddings unavailable")
|
||||||
|
);
|
||||||
|
if let Some(ref hint) = info.ollama.install_hint {
|
||||||
|
println!(" {hint}");
|
||||||
|
}
|
||||||
|
}
|
||||||
println!();
|
println!();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -264,6 +299,7 @@ struct CronStatusData {
|
|||||||
last_sync_at: Option<String>,
|
last_sync_at: Option<String>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
last_sync_status: Option<String>,
|
last_sync_status: Option<String>,
|
||||||
|
ollama: OllamaStatusBrief,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_cron_status_json(info: &CronStatusInfo, elapsed_ms: u64) {
|
pub fn print_cron_status_json(info: &CronStatusInfo, elapsed_ms: u64) {
|
||||||
@@ -283,8 +319,9 @@ pub fn print_cron_status_json(info: &CronStatusInfo, elapsed_ms: u64) {
|
|||||||
cron_entry: info.status.cron_entry.clone(),
|
cron_entry: info.status.cron_entry.clone(),
|
||||||
last_sync_at: info.last_sync.as_ref().map(|s| s.started_at_iso.clone()),
|
last_sync_at: info.last_sync.as_ref().map(|s| s.started_at_iso.clone()),
|
||||||
last_sync_status: info.last_sync.as_ref().map(|s| s.status.clone()),
|
last_sync_status: info.last_sync.as_ref().map(|s| s.status.clone()),
|
||||||
|
ollama: info.ollama.clone(),
|
||||||
},
|
},
|
||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta::new(elapsed_ms),
|
||||||
};
|
};
|
||||||
if let Ok(json) = serde_json::to_string(&output) {
|
if let Ok(json) = serde_json::to_string(&output) {
|
||||||
println!("{json}");
|
println!("{json}");
|
||||||
|
|||||||
@@ -385,25 +385,11 @@ async fn check_ollama(config: Option<&Config>) -> OllamaCheck {
|
|||||||
let base_url = &config.embedding.base_url;
|
let base_url = &config.embedding.base_url;
|
||||||
let model = &config.embedding.model;
|
let model = &config.embedding.model;
|
||||||
|
|
||||||
let client = match reqwest::Client::builder()
|
let client = crate::http::Client::with_timeout(std::time::Duration::from_secs(2));
|
||||||
.timeout(std::time::Duration::from_secs(2))
|
let url = format!("{base_url}/api/tags");
|
||||||
.build()
|
|
||||||
{
|
|
||||||
Ok(client) => client,
|
|
||||||
Err(e) => {
|
|
||||||
return OllamaCheck {
|
|
||||||
result: CheckResult {
|
|
||||||
status: CheckStatus::Warning,
|
|
||||||
message: Some(format!("Failed to build HTTP client: {e}")),
|
|
||||||
},
|
|
||||||
url: Some(base_url.clone()),
|
|
||||||
model: Some(model.clone()),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
match client.get(format!("{base_url}/api/tags")).send().await {
|
match client.get(&url, &[]).await {
|
||||||
Ok(response) if response.status().is_success() => {
|
Ok(response) if response.is_success() => {
|
||||||
#[derive(serde::Deserialize)]
|
#[derive(serde::Deserialize)]
|
||||||
struct TagsResponse {
|
struct TagsResponse {
|
||||||
models: Option<Vec<ModelInfo>>,
|
models: Option<Vec<ModelInfo>>,
|
||||||
@@ -413,7 +399,7 @@ async fn check_ollama(config: Option<&Config>) -> OllamaCheck {
|
|||||||
name: String,
|
name: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
match response.json::<TagsResponse>().await {
|
match response.json::<TagsResponse>() {
|
||||||
Ok(data) => {
|
Ok(data) => {
|
||||||
let models = data.models.unwrap_or_default();
|
let models = data.models.unwrap_or_default();
|
||||||
let model_names: Vec<&str> = models
|
let model_names: Vec<&str> = models
|
||||||
@@ -462,7 +448,7 @@ async fn check_ollama(config: Option<&Config>) -> OllamaCheck {
|
|||||||
Ok(response) => OllamaCheck {
|
Ok(response) => OllamaCheck {
|
||||||
result: CheckResult {
|
result: CheckResult {
|
||||||
status: CheckStatus::Warning,
|
status: CheckStatus::Warning,
|
||||||
message: Some(format!("Ollama responded with {}", response.status())),
|
message: Some(format!("Ollama responded with {}", response.status)),
|
||||||
},
|
},
|
||||||
url: Some(base_url.clone()),
|
url: Some(base_url.clone()),
|
||||||
model: Some(model.clone()),
|
model: Some(model.clone()),
|
||||||
|
|||||||
@@ -468,7 +468,7 @@ pub fn print_drift_human(response: &DriftResponse) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_drift_json(response: &DriftResponse, elapsed_ms: u64) {
|
pub fn print_drift_json(response: &DriftResponse, elapsed_ms: u64) {
|
||||||
let meta = RobotMeta { elapsed_ms };
|
let meta = RobotMeta::new(elapsed_ms);
|
||||||
let output = serde_json::json!({
|
let output = serde_json::json!({
|
||||||
"ok": true,
|
"ok": true,
|
||||||
"data": response,
|
"data": response,
|
||||||
|
|||||||
@@ -135,7 +135,7 @@ pub fn print_embed_json(result: &EmbedCommandResult, elapsed_ms: u64) {
|
|||||||
let output = EmbedJsonOutput {
|
let output = EmbedJsonOutput {
|
||||||
ok: true,
|
ok: true,
|
||||||
data: result,
|
data: result,
|
||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta::new(elapsed_ms),
|
||||||
};
|
};
|
||||||
match serde_json::to_string(&output) {
|
match serde_json::to_string(&output) {
|
||||||
Ok(json) => println!("{json}"),
|
Ok(json) => println!("{json}"),
|
||||||
|
|||||||
2135
src/cli/commands/explain.rs
Normal file
2135
src/cli/commands/explain.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -5,7 +5,7 @@ use tracing::info;
|
|||||||
use crate::Config;
|
use crate::Config;
|
||||||
use crate::cli::render::{self, Icons, Theme};
|
use crate::cli::render::{self, Icons, Theme};
|
||||||
use crate::core::db::create_connection;
|
use crate::core::db::create_connection;
|
||||||
use crate::core::error::Result;
|
use crate::core::error::{LoreError, Result};
|
||||||
use crate::core::file_history::resolve_rename_chain;
|
use crate::core::file_history::resolve_rename_chain;
|
||||||
use crate::core::paths::get_db_path;
|
use crate::core::paths::get_db_path;
|
||||||
use crate::core::project::resolve_project;
|
use crate::core::project::resolve_project;
|
||||||
@@ -391,7 +391,7 @@ pub fn print_file_history(result: &FileHistoryResult) {
|
|||||||
|
|
||||||
// ── Robot (JSON) output ─────────────────────────────────────────────────────
|
// ── Robot (JSON) output ─────────────────────────────────────────────────────
|
||||||
|
|
||||||
pub fn print_file_history_json(result: &FileHistoryResult, elapsed_ms: u64) {
|
pub fn print_file_history_json(result: &FileHistoryResult, elapsed_ms: u64) -> Result<()> {
|
||||||
let output = serde_json::json!({
|
let output = serde_json::json!({
|
||||||
"ok": true,
|
"ok": true,
|
||||||
"data": {
|
"data": {
|
||||||
@@ -409,5 +409,10 @@ pub fn print_file_history_json(result: &FileHistoryResult, elapsed_ms: u64) {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
println!("{}", serde_json::to_string(&output).unwrap_or_default());
|
println!(
|
||||||
|
"{}",
|
||||||
|
serde_json::to_string(&output)
|
||||||
|
.map_err(|e| LoreError::Other(format!("JSON serialization failed: {e}")))?
|
||||||
|
);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -257,7 +257,7 @@ pub fn print_generate_docs_json(result: &GenerateDocsResult, elapsed_ms: u64) {
|
|||||||
unchanged: result.unchanged,
|
unchanged: result.unchanged,
|
||||||
errored: result.errored,
|
errored: result.errored,
|
||||||
},
|
},
|
||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta::new(elapsed_ms),
|
||||||
};
|
};
|
||||||
match serde_json::to_string(&output) {
|
match serde_json::to_string(&output) {
|
||||||
Ok(json) => println!("{json}"),
|
Ok(json) => println!("{json}"),
|
||||||
|
|||||||
26
src/cli/commands/ingest/mod.rs
Normal file
26
src/cli/commands/ingest/mod.rs
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||||
|
|
||||||
|
use crate::cli::render::Theme;
|
||||||
|
use indicatif::{ProgressBar, ProgressStyle};
|
||||||
|
use rusqlite::Connection;
|
||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
|
use tracing::Instrument;
|
||||||
|
|
||||||
|
use crate::Config;
|
||||||
|
use crate::cli::robot::RobotMeta;
|
||||||
|
use crate::core::db::create_connection;
|
||||||
|
use crate::core::error::{LoreError, Result};
|
||||||
|
use crate::core::lock::{AppLock, LockOptions};
|
||||||
|
use crate::core::paths::get_db_path;
|
||||||
|
use crate::core::project::resolve_project;
|
||||||
|
use crate::core::shutdown::ShutdownSignal;
|
||||||
|
use crate::gitlab::GitLabClient;
|
||||||
|
use crate::ingestion::{
|
||||||
|
IngestMrProjectResult, IngestProjectResult, ProgressEvent, ingest_project_issues_with_progress,
|
||||||
|
ingest_project_merge_requests_with_progress,
|
||||||
|
};
|
||||||
|
|
||||||
|
include!("run.rs");
|
||||||
|
include!("render.rs");
|
||||||
331
src/cli/commands/ingest/render.rs
Normal file
331
src/cli/commands/ingest/render.rs
Normal file
@@ -0,0 +1,331 @@
|
|||||||
|
fn print_issue_project_summary(path: &str, result: &IngestProjectResult) {
|
||||||
|
let labels_str = if result.labels_created > 0 {
|
||||||
|
format!(", {} new labels", result.labels_created)
|
||||||
|
} else {
|
||||||
|
String::new()
|
||||||
|
};
|
||||||
|
|
||||||
|
println!(
|
||||||
|
" {}: {} issues fetched{}",
|
||||||
|
Theme::info().render(path),
|
||||||
|
result.issues_upserted,
|
||||||
|
labels_str
|
||||||
|
);
|
||||||
|
|
||||||
|
if result.issues_synced_discussions > 0 {
|
||||||
|
println!(
|
||||||
|
" {} issues -> {} discussions, {} notes",
|
||||||
|
result.issues_synced_discussions, result.discussions_fetched, result.notes_upserted
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if result.issues_skipped_discussion_sync > 0 {
|
||||||
|
println!(
|
||||||
|
" {} unchanged issues (discussion sync skipped)",
|
||||||
|
Theme::dim().render(&result.issues_skipped_discussion_sync.to_string())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn print_mr_project_summary(path: &str, result: &IngestMrProjectResult) {
|
||||||
|
let labels_str = if result.labels_created > 0 {
|
||||||
|
format!(", {} new labels", result.labels_created)
|
||||||
|
} else {
|
||||||
|
String::new()
|
||||||
|
};
|
||||||
|
|
||||||
|
let assignees_str = if result.assignees_linked > 0 || result.reviewers_linked > 0 {
|
||||||
|
format!(
|
||||||
|
", {} assignees, {} reviewers",
|
||||||
|
result.assignees_linked, result.reviewers_linked
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
String::new()
|
||||||
|
};
|
||||||
|
|
||||||
|
println!(
|
||||||
|
" {}: {} MRs fetched{}{}",
|
||||||
|
Theme::info().render(path),
|
||||||
|
result.mrs_upserted,
|
||||||
|
labels_str,
|
||||||
|
assignees_str
|
||||||
|
);
|
||||||
|
|
||||||
|
if result.mrs_synced_discussions > 0 {
|
||||||
|
let diffnotes_str = if result.diffnotes_count > 0 {
|
||||||
|
format!(" ({} diff notes)", result.diffnotes_count)
|
||||||
|
} else {
|
||||||
|
String::new()
|
||||||
|
};
|
||||||
|
println!(
|
||||||
|
" {} MRs -> {} discussions, {} notes{}",
|
||||||
|
result.mrs_synced_discussions,
|
||||||
|
result.discussions_fetched,
|
||||||
|
result.notes_upserted,
|
||||||
|
diffnotes_str
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if result.mrs_skipped_discussion_sync > 0 {
|
||||||
|
println!(
|
||||||
|
" {} unchanged MRs (discussion sync skipped)",
|
||||||
|
Theme::dim().render(&result.mrs_skipped_discussion_sync.to_string())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct IngestJsonOutput {
|
||||||
|
ok: bool,
|
||||||
|
data: IngestJsonData,
|
||||||
|
meta: RobotMeta,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct IngestJsonData {
|
||||||
|
resource_type: String,
|
||||||
|
projects_synced: usize,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
issues: Option<IngestIssueStats>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
merge_requests: Option<IngestMrStats>,
|
||||||
|
labels_created: usize,
|
||||||
|
discussions_fetched: usize,
|
||||||
|
notes_upserted: usize,
|
||||||
|
resource_events_fetched: usize,
|
||||||
|
resource_events_failed: usize,
|
||||||
|
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||||
|
status_enrichment: Vec<StatusEnrichmentJson>,
|
||||||
|
status_enrichment_errors: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct StatusEnrichmentJson {
|
||||||
|
mode: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
reason: Option<String>,
|
||||||
|
seen: usize,
|
||||||
|
enriched: usize,
|
||||||
|
cleared: usize,
|
||||||
|
without_widget: usize,
|
||||||
|
partial_errors: usize,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
first_partial_error: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
error: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct IngestIssueStats {
|
||||||
|
fetched: usize,
|
||||||
|
upserted: usize,
|
||||||
|
synced_discussions: usize,
|
||||||
|
skipped_discussion_sync: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct IngestMrStats {
|
||||||
|
fetched: usize,
|
||||||
|
upserted: usize,
|
||||||
|
synced_discussions: usize,
|
||||||
|
skipped_discussion_sync: usize,
|
||||||
|
assignees_linked: usize,
|
||||||
|
reviewers_linked: usize,
|
||||||
|
diffnotes_count: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_ingest_summary_json(result: &IngestResult, elapsed_ms: u64) {
|
||||||
|
let (issues, merge_requests) = if result.resource_type == "issues" {
|
||||||
|
(
|
||||||
|
Some(IngestIssueStats {
|
||||||
|
fetched: result.issues_fetched,
|
||||||
|
upserted: result.issues_upserted,
|
||||||
|
synced_discussions: result.issues_synced_discussions,
|
||||||
|
skipped_discussion_sync: result.issues_skipped_discussion_sync,
|
||||||
|
}),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
(
|
||||||
|
None,
|
||||||
|
Some(IngestMrStats {
|
||||||
|
fetched: result.mrs_fetched,
|
||||||
|
upserted: result.mrs_upserted,
|
||||||
|
synced_discussions: result.mrs_synced_discussions,
|
||||||
|
skipped_discussion_sync: result.mrs_skipped_discussion_sync,
|
||||||
|
assignees_linked: result.assignees_linked,
|
||||||
|
reviewers_linked: result.reviewers_linked,
|
||||||
|
diffnotes_count: result.diffnotes_count,
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
let status_enrichment: Vec<StatusEnrichmentJson> = result
|
||||||
|
.status_enrichment_projects
|
||||||
|
.iter()
|
||||||
|
.map(|p| StatusEnrichmentJson {
|
||||||
|
mode: p.mode.clone(),
|
||||||
|
reason: p.reason.clone(),
|
||||||
|
seen: p.seen,
|
||||||
|
enriched: p.enriched,
|
||||||
|
cleared: p.cleared,
|
||||||
|
without_widget: p.without_widget,
|
||||||
|
partial_errors: p.partial_errors,
|
||||||
|
first_partial_error: p.first_partial_error.clone(),
|
||||||
|
error: p.error.clone(),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let output = IngestJsonOutput {
|
||||||
|
ok: true,
|
||||||
|
data: IngestJsonData {
|
||||||
|
resource_type: result.resource_type.clone(),
|
||||||
|
projects_synced: result.projects_synced,
|
||||||
|
issues,
|
||||||
|
merge_requests,
|
||||||
|
labels_created: result.labels_created,
|
||||||
|
discussions_fetched: result.discussions_fetched,
|
||||||
|
notes_upserted: result.notes_upserted,
|
||||||
|
resource_events_fetched: result.resource_events_fetched,
|
||||||
|
resource_events_failed: result.resource_events_failed,
|
||||||
|
status_enrichment,
|
||||||
|
status_enrichment_errors: result.status_enrichment_errors,
|
||||||
|
},
|
||||||
|
meta: RobotMeta::new(elapsed_ms),
|
||||||
|
};
|
||||||
|
|
||||||
|
match serde_json::to_string(&output) {
|
||||||
|
Ok(json) => println!("{json}"),
|
||||||
|
Err(e) => eprintln!("Error serializing to JSON: {e}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_ingest_summary(result: &IngestResult) {
|
||||||
|
println!();
|
||||||
|
|
||||||
|
if result.resource_type == "issues" {
|
||||||
|
println!(
|
||||||
|
"{}",
|
||||||
|
Theme::success().render(&format!(
|
||||||
|
"Total: {} issues, {} discussions, {} notes",
|
||||||
|
result.issues_upserted, result.discussions_fetched, result.notes_upserted
|
||||||
|
))
|
||||||
|
);
|
||||||
|
|
||||||
|
if result.issues_skipped_discussion_sync > 0 {
|
||||||
|
println!(
|
||||||
|
"{}",
|
||||||
|
Theme::dim().render(&format!(
|
||||||
|
"Skipped discussion sync for {} unchanged issues.",
|
||||||
|
result.issues_skipped_discussion_sync
|
||||||
|
))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let diffnotes_str = if result.diffnotes_count > 0 {
|
||||||
|
format!(" ({} diff notes)", result.diffnotes_count)
|
||||||
|
} else {
|
||||||
|
String::new()
|
||||||
|
};
|
||||||
|
|
||||||
|
println!(
|
||||||
|
"{}",
|
||||||
|
Theme::success().render(&format!(
|
||||||
|
"Total: {} MRs, {} discussions, {} notes{}",
|
||||||
|
result.mrs_upserted,
|
||||||
|
result.discussions_fetched,
|
||||||
|
result.notes_upserted,
|
||||||
|
diffnotes_str
|
||||||
|
))
|
||||||
|
);
|
||||||
|
|
||||||
|
if result.mrs_skipped_discussion_sync > 0 {
|
||||||
|
println!(
|
||||||
|
"{}",
|
||||||
|
Theme::dim().render(&format!(
|
||||||
|
"Skipped discussion sync for {} unchanged MRs.",
|
||||||
|
result.mrs_skipped_discussion_sync
|
||||||
|
))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if result.resource_events_fetched > 0 || result.resource_events_failed > 0 {
|
||||||
|
println!(
|
||||||
|
" Resource events: {} fetched{}",
|
||||||
|
result.resource_events_fetched,
|
||||||
|
if result.resource_events_failed > 0 {
|
||||||
|
format!(", {} failed", result.resource_events_failed)
|
||||||
|
} else {
|
||||||
|
String::new()
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_dry_run_preview(preview: &DryRunPreview) {
|
||||||
|
println!(
|
||||||
|
"{} {}",
|
||||||
|
Theme::info().bold().render("Dry Run Preview"),
|
||||||
|
Theme::warning().render("(no changes will be made)")
|
||||||
|
);
|
||||||
|
println!();
|
||||||
|
|
||||||
|
let type_label = if preview.resource_type == "issues" {
|
||||||
|
"issues"
|
||||||
|
} else {
|
||||||
|
"merge requests"
|
||||||
|
};
|
||||||
|
|
||||||
|
println!(" Resource type: {}", Theme::bold().render(type_label));
|
||||||
|
println!(
|
||||||
|
" Sync mode: {}",
|
||||||
|
if preview.sync_mode == "full" {
|
||||||
|
Theme::warning().render("full (all data will be re-fetched)")
|
||||||
|
} else {
|
||||||
|
Theme::success().render("incremental (only changes since last sync)")
|
||||||
|
}
|
||||||
|
);
|
||||||
|
println!(" Projects: {}", preview.projects.len());
|
||||||
|
println!();
|
||||||
|
|
||||||
|
println!("{}", Theme::info().bold().render("Projects to sync:"));
|
||||||
|
for project in &preview.projects {
|
||||||
|
let sync_status = if !project.has_cursor {
|
||||||
|
Theme::warning().render("initial sync")
|
||||||
|
} else {
|
||||||
|
Theme::success().render("incremental")
|
||||||
|
};
|
||||||
|
|
||||||
|
println!(
|
||||||
|
" {} ({})",
|
||||||
|
Theme::bold().render(&project.path),
|
||||||
|
sync_status
|
||||||
|
);
|
||||||
|
println!(" Existing {}: {}", type_label, project.existing_count);
|
||||||
|
|
||||||
|
if let Some(ref last_synced) = project.last_synced {
|
||||||
|
println!(" Last synced: {}", last_synced);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct DryRunJsonOutput {
|
||||||
|
ok: bool,
|
||||||
|
dry_run: bool,
|
||||||
|
data: DryRunPreview,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_dry_run_preview_json(preview: &DryRunPreview) {
|
||||||
|
let output = DryRunJsonOutput {
|
||||||
|
ok: true,
|
||||||
|
dry_run: true,
|
||||||
|
data: preview.clone(),
|
||||||
|
};
|
||||||
|
|
||||||
|
match serde_json::to_string(&output) {
|
||||||
|
Ok(json) => println!("{json}"),
|
||||||
|
Err(e) => eprintln!("Error serializing to JSON: {e}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,27 +1,3 @@
|
|||||||
use std::sync::Arc;
|
|
||||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
|
||||||
|
|
||||||
use crate::cli::render::Theme;
|
|
||||||
use indicatif::{ProgressBar, ProgressStyle};
|
|
||||||
use rusqlite::Connection;
|
|
||||||
use serde::Serialize;
|
|
||||||
|
|
||||||
use tracing::Instrument;
|
|
||||||
|
|
||||||
use crate::Config;
|
|
||||||
use crate::cli::robot::RobotMeta;
|
|
||||||
use crate::core::db::create_connection;
|
|
||||||
use crate::core::error::{LoreError, Result};
|
|
||||||
use crate::core::lock::{AppLock, LockOptions};
|
|
||||||
use crate::core::paths::get_db_path;
|
|
||||||
use crate::core::project::resolve_project;
|
|
||||||
use crate::core::shutdown::ShutdownSignal;
|
|
||||||
use crate::gitlab::GitLabClient;
|
|
||||||
use crate::ingestion::{
|
|
||||||
IngestMrProjectResult, IngestProjectResult, ProgressEvent, ingest_project_issues_with_progress,
|
|
||||||
ingest_project_merge_requests_with_progress,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct IngestResult {
|
pub struct IngestResult {
|
||||||
pub resource_type: String,
|
pub resource_type: String,
|
||||||
@@ -295,11 +271,11 @@ async fn run_ingest_inner(
|
|||||||
|
|
||||||
let token = config.gitlab.resolve_token()?;
|
let token = config.gitlab.resolve_token()?;
|
||||||
|
|
||||||
let client = GitLabClient::new(
|
let client = Arc::new(GitLabClient::new(
|
||||||
&config.gitlab.base_url,
|
&config.gitlab.base_url,
|
||||||
&token,
|
&token,
|
||||||
Some(config.sync.requests_per_second),
|
Some(config.sync.requests_per_second),
|
||||||
);
|
));
|
||||||
|
|
||||||
let projects = get_projects_to_sync(&conn, &config.projects, project_filter)?;
|
let projects = get_projects_to_sync(&conn, &config.projects, project_filter)?;
|
||||||
|
|
||||||
@@ -376,7 +352,7 @@ async fn run_ingest_inner(
|
|||||||
|
|
||||||
let project_results: Vec<Result<ProjectIngestOutcome>> = stream::iter(projects.iter())
|
let project_results: Vec<Result<ProjectIngestOutcome>> = stream::iter(projects.iter())
|
||||||
.map(|(local_project_id, gitlab_project_id, path)| {
|
.map(|(local_project_id, gitlab_project_id, path)| {
|
||||||
let client = client.clone();
|
let client = Arc::clone(&client);
|
||||||
let db_path = db_path.clone();
|
let db_path = db_path.clone();
|
||||||
let config = config.clone();
|
let config = config.clone();
|
||||||
let resource_type = resource_type_owned.clone();
|
let resource_type = resource_type_owned.clone();
|
||||||
@@ -783,334 +759,3 @@ fn get_projects_to_sync(
|
|||||||
Ok(projects)
|
Ok(projects)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_issue_project_summary(path: &str, result: &IngestProjectResult) {
|
|
||||||
let labels_str = if result.labels_created > 0 {
|
|
||||||
format!(", {} new labels", result.labels_created)
|
|
||||||
} else {
|
|
||||||
String::new()
|
|
||||||
};
|
|
||||||
|
|
||||||
println!(
|
|
||||||
" {}: {} issues fetched{}",
|
|
||||||
Theme::info().render(path),
|
|
||||||
result.issues_upserted,
|
|
||||||
labels_str
|
|
||||||
);
|
|
||||||
|
|
||||||
if result.issues_synced_discussions > 0 {
|
|
||||||
println!(
|
|
||||||
" {} issues -> {} discussions, {} notes",
|
|
||||||
result.issues_synced_discussions, result.discussions_fetched, result.notes_upserted
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if result.issues_skipped_discussion_sync > 0 {
|
|
||||||
println!(
|
|
||||||
" {} unchanged issues (discussion sync skipped)",
|
|
||||||
Theme::dim().render(&result.issues_skipped_discussion_sync.to_string())
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn print_mr_project_summary(path: &str, result: &IngestMrProjectResult) {
|
|
||||||
let labels_str = if result.labels_created > 0 {
|
|
||||||
format!(", {} new labels", result.labels_created)
|
|
||||||
} else {
|
|
||||||
String::new()
|
|
||||||
};
|
|
||||||
|
|
||||||
let assignees_str = if result.assignees_linked > 0 || result.reviewers_linked > 0 {
|
|
||||||
format!(
|
|
||||||
", {} assignees, {} reviewers",
|
|
||||||
result.assignees_linked, result.reviewers_linked
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
String::new()
|
|
||||||
};
|
|
||||||
|
|
||||||
println!(
|
|
||||||
" {}: {} MRs fetched{}{}",
|
|
||||||
Theme::info().render(path),
|
|
||||||
result.mrs_upserted,
|
|
||||||
labels_str,
|
|
||||||
assignees_str
|
|
||||||
);
|
|
||||||
|
|
||||||
if result.mrs_synced_discussions > 0 {
|
|
||||||
let diffnotes_str = if result.diffnotes_count > 0 {
|
|
||||||
format!(" ({} diff notes)", result.diffnotes_count)
|
|
||||||
} else {
|
|
||||||
String::new()
|
|
||||||
};
|
|
||||||
println!(
|
|
||||||
" {} MRs -> {} discussions, {} notes{}",
|
|
||||||
result.mrs_synced_discussions,
|
|
||||||
result.discussions_fetched,
|
|
||||||
result.notes_upserted,
|
|
||||||
diffnotes_str
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if result.mrs_skipped_discussion_sync > 0 {
|
|
||||||
println!(
|
|
||||||
" {} unchanged MRs (discussion sync skipped)",
|
|
||||||
Theme::dim().render(&result.mrs_skipped_discussion_sync.to_string())
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize)]
|
|
||||||
struct IngestJsonOutput {
|
|
||||||
ok: bool,
|
|
||||||
data: IngestJsonData,
|
|
||||||
meta: RobotMeta,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize)]
|
|
||||||
struct IngestJsonData {
|
|
||||||
resource_type: String,
|
|
||||||
projects_synced: usize,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
issues: Option<IngestIssueStats>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
merge_requests: Option<IngestMrStats>,
|
|
||||||
labels_created: usize,
|
|
||||||
discussions_fetched: usize,
|
|
||||||
notes_upserted: usize,
|
|
||||||
resource_events_fetched: usize,
|
|
||||||
resource_events_failed: usize,
|
|
||||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
|
||||||
status_enrichment: Vec<StatusEnrichmentJson>,
|
|
||||||
status_enrichment_errors: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize)]
|
|
||||||
struct StatusEnrichmentJson {
|
|
||||||
mode: String,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
reason: Option<String>,
|
|
||||||
seen: usize,
|
|
||||||
enriched: usize,
|
|
||||||
cleared: usize,
|
|
||||||
without_widget: usize,
|
|
||||||
partial_errors: usize,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
first_partial_error: Option<String>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
error: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize)]
|
|
||||||
struct IngestIssueStats {
|
|
||||||
fetched: usize,
|
|
||||||
upserted: usize,
|
|
||||||
synced_discussions: usize,
|
|
||||||
skipped_discussion_sync: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize)]
|
|
||||||
struct IngestMrStats {
|
|
||||||
fetched: usize,
|
|
||||||
upserted: usize,
|
|
||||||
synced_discussions: usize,
|
|
||||||
skipped_discussion_sync: usize,
|
|
||||||
assignees_linked: usize,
|
|
||||||
reviewers_linked: usize,
|
|
||||||
diffnotes_count: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn print_ingest_summary_json(result: &IngestResult, elapsed_ms: u64) {
|
|
||||||
let (issues, merge_requests) = if result.resource_type == "issues" {
|
|
||||||
(
|
|
||||||
Some(IngestIssueStats {
|
|
||||||
fetched: result.issues_fetched,
|
|
||||||
upserted: result.issues_upserted,
|
|
||||||
synced_discussions: result.issues_synced_discussions,
|
|
||||||
skipped_discussion_sync: result.issues_skipped_discussion_sync,
|
|
||||||
}),
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
(
|
|
||||||
None,
|
|
||||||
Some(IngestMrStats {
|
|
||||||
fetched: result.mrs_fetched,
|
|
||||||
upserted: result.mrs_upserted,
|
|
||||||
synced_discussions: result.mrs_synced_discussions,
|
|
||||||
skipped_discussion_sync: result.mrs_skipped_discussion_sync,
|
|
||||||
assignees_linked: result.assignees_linked,
|
|
||||||
reviewers_linked: result.reviewers_linked,
|
|
||||||
diffnotes_count: result.diffnotes_count,
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
};
|
|
||||||
|
|
||||||
let status_enrichment: Vec<StatusEnrichmentJson> = result
|
|
||||||
.status_enrichment_projects
|
|
||||||
.iter()
|
|
||||||
.map(|p| StatusEnrichmentJson {
|
|
||||||
mode: p.mode.clone(),
|
|
||||||
reason: p.reason.clone(),
|
|
||||||
seen: p.seen,
|
|
||||||
enriched: p.enriched,
|
|
||||||
cleared: p.cleared,
|
|
||||||
without_widget: p.without_widget,
|
|
||||||
partial_errors: p.partial_errors,
|
|
||||||
first_partial_error: p.first_partial_error.clone(),
|
|
||||||
error: p.error.clone(),
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let output = IngestJsonOutput {
|
|
||||||
ok: true,
|
|
||||||
data: IngestJsonData {
|
|
||||||
resource_type: result.resource_type.clone(),
|
|
||||||
projects_synced: result.projects_synced,
|
|
||||||
issues,
|
|
||||||
merge_requests,
|
|
||||||
labels_created: result.labels_created,
|
|
||||||
discussions_fetched: result.discussions_fetched,
|
|
||||||
notes_upserted: result.notes_upserted,
|
|
||||||
resource_events_fetched: result.resource_events_fetched,
|
|
||||||
resource_events_failed: result.resource_events_failed,
|
|
||||||
status_enrichment,
|
|
||||||
status_enrichment_errors: result.status_enrichment_errors,
|
|
||||||
},
|
|
||||||
meta: RobotMeta { elapsed_ms },
|
|
||||||
};
|
|
||||||
|
|
||||||
match serde_json::to_string(&output) {
|
|
||||||
Ok(json) => println!("{json}"),
|
|
||||||
Err(e) => eprintln!("Error serializing to JSON: {e}"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn print_ingest_summary(result: &IngestResult) {
|
|
||||||
println!();
|
|
||||||
|
|
||||||
if result.resource_type == "issues" {
|
|
||||||
println!(
|
|
||||||
"{}",
|
|
||||||
Theme::success().render(&format!(
|
|
||||||
"Total: {} issues, {} discussions, {} notes",
|
|
||||||
result.issues_upserted, result.discussions_fetched, result.notes_upserted
|
|
||||||
))
|
|
||||||
);
|
|
||||||
|
|
||||||
if result.issues_skipped_discussion_sync > 0 {
|
|
||||||
println!(
|
|
||||||
"{}",
|
|
||||||
Theme::dim().render(&format!(
|
|
||||||
"Skipped discussion sync for {} unchanged issues.",
|
|
||||||
result.issues_skipped_discussion_sync
|
|
||||||
))
|
|
||||||
);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let diffnotes_str = if result.diffnotes_count > 0 {
|
|
||||||
format!(" ({} diff notes)", result.diffnotes_count)
|
|
||||||
} else {
|
|
||||||
String::new()
|
|
||||||
};
|
|
||||||
|
|
||||||
println!(
|
|
||||||
"{}",
|
|
||||||
Theme::success().render(&format!(
|
|
||||||
"Total: {} MRs, {} discussions, {} notes{}",
|
|
||||||
result.mrs_upserted,
|
|
||||||
result.discussions_fetched,
|
|
||||||
result.notes_upserted,
|
|
||||||
diffnotes_str
|
|
||||||
))
|
|
||||||
);
|
|
||||||
|
|
||||||
if result.mrs_skipped_discussion_sync > 0 {
|
|
||||||
println!(
|
|
||||||
"{}",
|
|
||||||
Theme::dim().render(&format!(
|
|
||||||
"Skipped discussion sync for {} unchanged MRs.",
|
|
||||||
result.mrs_skipped_discussion_sync
|
|
||||||
))
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if result.resource_events_fetched > 0 || result.resource_events_failed > 0 {
|
|
||||||
println!(
|
|
||||||
" Resource events: {} fetched{}",
|
|
||||||
result.resource_events_fetched,
|
|
||||||
if result.resource_events_failed > 0 {
|
|
||||||
format!(", {} failed", result.resource_events_failed)
|
|
||||||
} else {
|
|
||||||
String::new()
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn print_dry_run_preview(preview: &DryRunPreview) {
|
|
||||||
println!(
|
|
||||||
"{} {}",
|
|
||||||
Theme::info().bold().render("Dry Run Preview"),
|
|
||||||
Theme::warning().render("(no changes will be made)")
|
|
||||||
);
|
|
||||||
println!();
|
|
||||||
|
|
||||||
let type_label = if preview.resource_type == "issues" {
|
|
||||||
"issues"
|
|
||||||
} else {
|
|
||||||
"merge requests"
|
|
||||||
};
|
|
||||||
|
|
||||||
println!(" Resource type: {}", Theme::bold().render(type_label));
|
|
||||||
println!(
|
|
||||||
" Sync mode: {}",
|
|
||||||
if preview.sync_mode == "full" {
|
|
||||||
Theme::warning().render("full (all data will be re-fetched)")
|
|
||||||
} else {
|
|
||||||
Theme::success().render("incremental (only changes since last sync)")
|
|
||||||
}
|
|
||||||
);
|
|
||||||
println!(" Projects: {}", preview.projects.len());
|
|
||||||
println!();
|
|
||||||
|
|
||||||
println!("{}", Theme::info().bold().render("Projects to sync:"));
|
|
||||||
for project in &preview.projects {
|
|
||||||
let sync_status = if !project.has_cursor {
|
|
||||||
Theme::warning().render("initial sync")
|
|
||||||
} else {
|
|
||||||
Theme::success().render("incremental")
|
|
||||||
};
|
|
||||||
|
|
||||||
println!(
|
|
||||||
" {} ({})",
|
|
||||||
Theme::bold().render(&project.path),
|
|
||||||
sync_status
|
|
||||||
);
|
|
||||||
println!(" Existing {}: {}", type_label, project.existing_count);
|
|
||||||
|
|
||||||
if let Some(ref last_synced) = project.last_synced {
|
|
||||||
println!(" Last synced: {}", last_synced);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize)]
|
|
||||||
struct DryRunJsonOutput {
|
|
||||||
ok: bool,
|
|
||||||
dry_run: bool,
|
|
||||||
data: DryRunPreview,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn print_dry_run_preview_json(preview: &DryRunPreview) {
|
|
||||||
let output = DryRunJsonOutput {
|
|
||||||
ok: true,
|
|
||||||
dry_run: true,
|
|
||||||
data: preview.clone(),
|
|
||||||
};
|
|
||||||
|
|
||||||
match serde_json::to_string(&output) {
|
|
||||||
Ok(json) => println!("{json}"),
|
|
||||||
Err(e) => eprintln!("Error serializing to JSON: {e}"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
File diff suppressed because it is too large
Load Diff
443
src/cli/commands/list/issues.rs
Normal file
443
src/cli/commands/list/issues.rs
Normal file
@@ -0,0 +1,443 @@
|
|||||||
|
use crate::cli::render::{self, Align, Icons, StyledCell, Table as LoreTable, Theme};
|
||||||
|
use rusqlite::Connection;
|
||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
|
use crate::Config;
|
||||||
|
use crate::cli::robot::{expand_fields_preset, filter_fields};
|
||||||
|
use crate::core::db::create_connection;
|
||||||
|
use crate::core::error::{LoreError, Result};
|
||||||
|
use crate::core::paths::get_db_path;
|
||||||
|
use crate::core::project::resolve_project;
|
||||||
|
use crate::core::time::{ms_to_iso, parse_since};
|
||||||
|
|
||||||
|
use super::render_helpers::{format_assignees, format_discussions};
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct IssueListRow {
|
||||||
|
pub iid: i64,
|
||||||
|
pub title: String,
|
||||||
|
pub state: String,
|
||||||
|
pub author_username: String,
|
||||||
|
pub created_at: i64,
|
||||||
|
pub updated_at: i64,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub web_url: Option<String>,
|
||||||
|
pub project_path: String,
|
||||||
|
pub labels: Vec<String>,
|
||||||
|
pub assignees: Vec<String>,
|
||||||
|
pub discussion_count: i64,
|
||||||
|
pub unresolved_count: i64,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub status_name: Option<String>,
|
||||||
|
#[serde(skip_serializing)]
|
||||||
|
pub status_category: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub status_color: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub status_icon_name: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub status_synced_at: Option<i64>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct IssueListRowJson {
|
||||||
|
pub iid: i64,
|
||||||
|
pub title: String,
|
||||||
|
pub state: String,
|
||||||
|
pub author_username: String,
|
||||||
|
pub labels: Vec<String>,
|
||||||
|
pub assignees: Vec<String>,
|
||||||
|
pub discussion_count: i64,
|
||||||
|
pub unresolved_count: i64,
|
||||||
|
pub created_at_iso: String,
|
||||||
|
pub updated_at_iso: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub web_url: Option<String>,
|
||||||
|
pub project_path: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub status_name: Option<String>,
|
||||||
|
#[serde(skip_serializing)]
|
||||||
|
pub status_category: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub status_color: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub status_icon_name: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub status_synced_at_iso: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&IssueListRow> for IssueListRowJson {
|
||||||
|
fn from(row: &IssueListRow) -> Self {
|
||||||
|
Self {
|
||||||
|
iid: row.iid,
|
||||||
|
title: row.title.clone(),
|
||||||
|
state: row.state.clone(),
|
||||||
|
author_username: row.author_username.clone(),
|
||||||
|
labels: row.labels.clone(),
|
||||||
|
assignees: row.assignees.clone(),
|
||||||
|
discussion_count: row.discussion_count,
|
||||||
|
unresolved_count: row.unresolved_count,
|
||||||
|
created_at_iso: ms_to_iso(row.created_at),
|
||||||
|
updated_at_iso: ms_to_iso(row.updated_at),
|
||||||
|
web_url: row.web_url.clone(),
|
||||||
|
project_path: row.project_path.clone(),
|
||||||
|
status_name: row.status_name.clone(),
|
||||||
|
status_category: row.status_category.clone(),
|
||||||
|
status_color: row.status_color.clone(),
|
||||||
|
status_icon_name: row.status_icon_name.clone(),
|
||||||
|
status_synced_at_iso: row.status_synced_at.map(ms_to_iso),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct ListResult {
|
||||||
|
pub issues: Vec<IssueListRow>,
|
||||||
|
pub total_count: usize,
|
||||||
|
pub available_statuses: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct ListResultJson {
|
||||||
|
pub issues: Vec<IssueListRowJson>,
|
||||||
|
pub total_count: usize,
|
||||||
|
pub showing: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&ListResult> for ListResultJson {
|
||||||
|
fn from(result: &ListResult) -> Self {
|
||||||
|
Self {
|
||||||
|
issues: result.issues.iter().map(IssueListRowJson::from).collect(),
|
||||||
|
total_count: result.total_count,
|
||||||
|
showing: result.issues.len(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ListFilters<'a> {
|
||||||
|
pub limit: usize,
|
||||||
|
pub project: Option<&'a str>,
|
||||||
|
pub state: Option<&'a str>,
|
||||||
|
pub author: Option<&'a str>,
|
||||||
|
pub assignee: Option<&'a str>,
|
||||||
|
pub labels: Option<&'a [String]>,
|
||||||
|
pub milestone: Option<&'a str>,
|
||||||
|
pub since: Option<&'a str>,
|
||||||
|
pub due_before: Option<&'a str>,
|
||||||
|
pub has_due_date: bool,
|
||||||
|
pub statuses: &'a [String],
|
||||||
|
pub sort: &'a str,
|
||||||
|
pub order: &'a str,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_list_issues(config: &Config, filters: ListFilters) -> Result<ListResult> {
|
||||||
|
let db_path = get_db_path(config.storage.db_path.as_deref());
|
||||||
|
let conn = create_connection(&db_path)?;
|
||||||
|
|
||||||
|
let mut result = query_issues(&conn, &filters)?;
|
||||||
|
result.available_statuses = query_available_statuses(&conn)?;
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn query_available_statuses(conn: &Connection) -> Result<Vec<String>> {
|
||||||
|
let mut stmt = conn.prepare(
|
||||||
|
"SELECT DISTINCT status_name FROM issues WHERE status_name IS NOT NULL ORDER BY status_name",
|
||||||
|
)?;
|
||||||
|
let statuses = stmt
|
||||||
|
.query_map([], |row| row.get::<_, String>(0))?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
Ok(statuses)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn query_issues(conn: &Connection, filters: &ListFilters) -> Result<ListResult> {
|
||||||
|
let mut where_clauses = Vec::new();
|
||||||
|
let mut params: Vec<Box<dyn rusqlite::ToSql>> = Vec::new();
|
||||||
|
|
||||||
|
if let Some(project) = filters.project {
|
||||||
|
let project_id = resolve_project(conn, project)?;
|
||||||
|
where_clauses.push("i.project_id = ?");
|
||||||
|
params.push(Box::new(project_id));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(state) = filters.state
|
||||||
|
&& state != "all"
|
||||||
|
{
|
||||||
|
where_clauses.push("i.state = ?");
|
||||||
|
params.push(Box::new(state.to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(author) = filters.author {
|
||||||
|
let username = author.strip_prefix('@').unwrap_or(author);
|
||||||
|
where_clauses.push("i.author_username = ?");
|
||||||
|
params.push(Box::new(username.to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(assignee) = filters.assignee {
|
||||||
|
let username = assignee.strip_prefix('@').unwrap_or(assignee);
|
||||||
|
where_clauses.push(
|
||||||
|
"EXISTS (SELECT 1 FROM issue_assignees ia
|
||||||
|
WHERE ia.issue_id = i.id AND ia.username = ?)",
|
||||||
|
);
|
||||||
|
params.push(Box::new(username.to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(since_str) = filters.since {
|
||||||
|
let cutoff_ms = parse_since(since_str).ok_or_else(|| {
|
||||||
|
LoreError::Other(format!(
|
||||||
|
"Invalid --since value '{}'. Use relative (7d, 2w, 1m) or absolute (YYYY-MM-DD) format.",
|
||||||
|
since_str
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
where_clauses.push("i.updated_at >= ?");
|
||||||
|
params.push(Box::new(cutoff_ms));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(labels) = filters.labels {
|
||||||
|
for label in labels {
|
||||||
|
where_clauses.push(
|
||||||
|
"EXISTS (SELECT 1 FROM issue_labels il
|
||||||
|
JOIN labels l ON il.label_id = l.id
|
||||||
|
WHERE il.issue_id = i.id AND l.name = ?)",
|
||||||
|
);
|
||||||
|
params.push(Box::new(label.clone()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(milestone) = filters.milestone {
|
||||||
|
where_clauses.push("i.milestone_title = ?");
|
||||||
|
params.push(Box::new(milestone.to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(due_before) = filters.due_before {
|
||||||
|
where_clauses.push("i.due_date IS NOT NULL AND i.due_date <= ?");
|
||||||
|
params.push(Box::new(due_before.to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if filters.has_due_date {
|
||||||
|
where_clauses.push("i.due_date IS NOT NULL");
|
||||||
|
}
|
||||||
|
|
||||||
|
let status_in_clause;
|
||||||
|
if filters.statuses.len() == 1 {
|
||||||
|
where_clauses.push("i.status_name = ? COLLATE NOCASE");
|
||||||
|
params.push(Box::new(filters.statuses[0].clone()));
|
||||||
|
} else if filters.statuses.len() > 1 {
|
||||||
|
let placeholders: Vec<&str> = filters.statuses.iter().map(|_| "?").collect();
|
||||||
|
status_in_clause = format!(
|
||||||
|
"i.status_name COLLATE NOCASE IN ({})",
|
||||||
|
placeholders.join(", ")
|
||||||
|
);
|
||||||
|
where_clauses.push(&status_in_clause);
|
||||||
|
for s in filters.statuses {
|
||||||
|
params.push(Box::new(s.clone()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let where_sql = if where_clauses.is_empty() {
|
||||||
|
String::new()
|
||||||
|
} else {
|
||||||
|
format!("WHERE {}", where_clauses.join(" AND "))
|
||||||
|
};
|
||||||
|
|
||||||
|
let count_sql = format!(
|
||||||
|
"SELECT COUNT(*) FROM issues i
|
||||||
|
JOIN projects p ON i.project_id = p.id
|
||||||
|
{where_sql}"
|
||||||
|
);
|
||||||
|
|
||||||
|
let param_refs: Vec<&dyn rusqlite::ToSql> = params.iter().map(|p| p.as_ref()).collect();
|
||||||
|
let total_count: i64 = conn.query_row(&count_sql, param_refs.as_slice(), |row| row.get(0))?;
|
||||||
|
let total_count = total_count as usize;
|
||||||
|
|
||||||
|
let sort_column = match filters.sort {
|
||||||
|
"created" => "i.created_at",
|
||||||
|
"iid" => "i.iid",
|
||||||
|
_ => "i.updated_at",
|
||||||
|
};
|
||||||
|
let order = if filters.order == "asc" {
|
||||||
|
"ASC"
|
||||||
|
} else {
|
||||||
|
"DESC"
|
||||||
|
};
|
||||||
|
|
||||||
|
let query_sql = format!(
|
||||||
|
"SELECT
|
||||||
|
i.iid,
|
||||||
|
i.title,
|
||||||
|
i.state,
|
||||||
|
i.author_username,
|
||||||
|
i.created_at,
|
||||||
|
i.updated_at,
|
||||||
|
i.web_url,
|
||||||
|
p.path_with_namespace,
|
||||||
|
(SELECT GROUP_CONCAT(l.name, X'1F')
|
||||||
|
FROM issue_labels il
|
||||||
|
JOIN labels l ON il.label_id = l.id
|
||||||
|
WHERE il.issue_id = i.id) AS labels_csv,
|
||||||
|
(SELECT GROUP_CONCAT(ia.username, X'1F')
|
||||||
|
FROM issue_assignees ia
|
||||||
|
WHERE ia.issue_id = i.id) AS assignees_csv,
|
||||||
|
(SELECT COUNT(*) FROM discussions d
|
||||||
|
WHERE d.issue_id = i.id) AS discussion_count,
|
||||||
|
(SELECT COUNT(*) FROM discussions d
|
||||||
|
WHERE d.issue_id = i.id AND d.resolvable = 1 AND d.resolved = 0) AS unresolved_count,
|
||||||
|
i.status_name,
|
||||||
|
i.status_category,
|
||||||
|
i.status_color,
|
||||||
|
i.status_icon_name,
|
||||||
|
i.status_synced_at
|
||||||
|
FROM issues i
|
||||||
|
JOIN projects p ON i.project_id = p.id
|
||||||
|
{where_sql}
|
||||||
|
ORDER BY {sort_column} {order}
|
||||||
|
LIMIT ?"
|
||||||
|
);
|
||||||
|
|
||||||
|
params.push(Box::new(filters.limit as i64));
|
||||||
|
let param_refs: Vec<&dyn rusqlite::ToSql> = params.iter().map(|p| p.as_ref()).collect();
|
||||||
|
|
||||||
|
let mut stmt = conn.prepare(&query_sql)?;
|
||||||
|
let issues: Vec<IssueListRow> = stmt
|
||||||
|
.query_map(param_refs.as_slice(), |row| {
|
||||||
|
let labels_csv: Option<String> = row.get(8)?;
|
||||||
|
let labels = labels_csv
|
||||||
|
.map(|s| s.split('\x1F').map(String::from).collect())
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
let assignees_csv: Option<String> = row.get(9)?;
|
||||||
|
let assignees = assignees_csv
|
||||||
|
.map(|s| s.split('\x1F').map(String::from).collect())
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
Ok(IssueListRow {
|
||||||
|
iid: row.get(0)?,
|
||||||
|
title: row.get(1)?,
|
||||||
|
state: row.get(2)?,
|
||||||
|
author_username: row.get(3)?,
|
||||||
|
created_at: row.get(4)?,
|
||||||
|
updated_at: row.get(5)?,
|
||||||
|
web_url: row.get(6)?,
|
||||||
|
project_path: row.get(7)?,
|
||||||
|
labels,
|
||||||
|
assignees,
|
||||||
|
discussion_count: row.get(10)?,
|
||||||
|
unresolved_count: row.get(11)?,
|
||||||
|
status_name: row.get(12)?,
|
||||||
|
status_category: row.get(13)?,
|
||||||
|
status_color: row.get(14)?,
|
||||||
|
status_icon_name: row.get(15)?,
|
||||||
|
status_synced_at: row.get(16)?,
|
||||||
|
})
|
||||||
|
})?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
Ok(ListResult {
|
||||||
|
issues,
|
||||||
|
total_count,
|
||||||
|
available_statuses: Vec::new(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_list_issues(result: &ListResult) {
|
||||||
|
if result.issues.is_empty() {
|
||||||
|
println!("No issues found.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
println!(
|
||||||
|
"{} {} of {}\n",
|
||||||
|
Theme::bold().render("Issues"),
|
||||||
|
result.issues.len(),
|
||||||
|
result.total_count
|
||||||
|
);
|
||||||
|
|
||||||
|
let has_any_status = result.issues.iter().any(|i| i.status_name.is_some());
|
||||||
|
|
||||||
|
let mut headers = vec!["IID", "Title", "State"];
|
||||||
|
if has_any_status {
|
||||||
|
headers.push("Status");
|
||||||
|
}
|
||||||
|
headers.extend(["Assignee", "Labels", "Disc", "Updated"]);
|
||||||
|
|
||||||
|
let mut table = LoreTable::new().headers(&headers).align(0, Align::Right);
|
||||||
|
|
||||||
|
for issue in &result.issues {
|
||||||
|
let title = render::truncate(&issue.title, 45);
|
||||||
|
let relative_time = render::format_relative_time_compact(issue.updated_at);
|
||||||
|
let labels = render::format_labels_bare(&issue.labels, 2);
|
||||||
|
let assignee = format_assignees(&issue.assignees);
|
||||||
|
let discussions = format_discussions(issue.discussion_count, issue.unresolved_count);
|
||||||
|
|
||||||
|
let (icon, state_style) = if issue.state == "opened" {
|
||||||
|
(Icons::issue_opened(), Theme::success())
|
||||||
|
} else {
|
||||||
|
(Icons::issue_closed(), Theme::dim())
|
||||||
|
};
|
||||||
|
let state_cell = StyledCell::styled(format!("{icon} {}", issue.state), state_style);
|
||||||
|
|
||||||
|
let mut row = vec![
|
||||||
|
StyledCell::styled(format!("#{}", issue.iid), Theme::info()),
|
||||||
|
StyledCell::plain(title),
|
||||||
|
state_cell,
|
||||||
|
];
|
||||||
|
if has_any_status {
|
||||||
|
match &issue.status_name {
|
||||||
|
Some(status) => {
|
||||||
|
row.push(StyledCell::plain(render::style_with_hex(
|
||||||
|
status,
|
||||||
|
issue.status_color.as_deref(),
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
row.push(StyledCell::plain(""));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
row.extend([
|
||||||
|
StyledCell::styled(assignee, Theme::accent()),
|
||||||
|
StyledCell::styled(labels, Theme::warning()),
|
||||||
|
discussions,
|
||||||
|
StyledCell::styled(relative_time, Theme::dim()),
|
||||||
|
]);
|
||||||
|
table.add_row(row);
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("{}", table.render());
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_list_issues_json(result: &ListResult, elapsed_ms: u64, fields: Option<&[String]>) {
|
||||||
|
let json_result = ListResultJson::from(result);
|
||||||
|
let output = serde_json::json!({
|
||||||
|
"ok": true,
|
||||||
|
"data": json_result,
|
||||||
|
"meta": {
|
||||||
|
"elapsed_ms": elapsed_ms,
|
||||||
|
"available_statuses": result.available_statuses,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
let mut output = output;
|
||||||
|
if let Some(f) = fields {
|
||||||
|
let expanded = expand_fields_preset(f, "issues");
|
||||||
|
filter_fields(&mut output, "issues", &expanded);
|
||||||
|
}
|
||||||
|
match serde_json::to_string(&output) {
|
||||||
|
Ok(json) => println!("{json}"),
|
||||||
|
Err(e) => eprintln!("Error serializing to JSON: {e}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn open_issue_in_browser(result: &ListResult) -> Option<String> {
|
||||||
|
let first_issue = result.issues.first()?;
|
||||||
|
let url = first_issue.web_url.as_ref()?;
|
||||||
|
|
||||||
|
match open::that(url) {
|
||||||
|
Ok(()) => {
|
||||||
|
println!("Opened: {url}");
|
||||||
|
Some(url.clone())
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
eprintln!("Failed to open browser: {e}");
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,6 +1,9 @@
|
|||||||
use super::*;
|
use super::*;
|
||||||
use crate::cli::render;
|
use crate::cli::render;
|
||||||
use crate::core::time::now_ms;
|
use crate::core::time::now_ms;
|
||||||
|
use crate::test_support::{
|
||||||
|
insert_project as insert_test_project, setup_test_db as setup_note_test_db, test_config,
|
||||||
|
};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn truncate_leaves_short_strings_alone() {
|
fn truncate_leaves_short_strings_alone() {
|
||||||
@@ -82,34 +85,6 @@ fn format_discussions_with_unresolved() {
|
|||||||
// Note query layer tests
|
// Note query layer tests
|
||||||
// -----------------------------------------------------------------------
|
// -----------------------------------------------------------------------
|
||||||
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
use crate::core::config::{
|
|
||||||
Config, EmbeddingConfig, GitLabConfig, LoggingConfig, ProjectConfig, ScoringConfig,
|
|
||||||
StorageConfig, SyncConfig,
|
|
||||||
};
|
|
||||||
use crate::core::db::{create_connection, run_migrations};
|
|
||||||
|
|
||||||
fn test_config(default_project: Option<&str>) -> Config {
|
|
||||||
Config {
|
|
||||||
gitlab: GitLabConfig {
|
|
||||||
base_url: "https://gitlab.example.com".to_string(),
|
|
||||||
token_env_var: "GITLAB_TOKEN".to_string(),
|
|
||||||
token: None,
|
|
||||||
username: None,
|
|
||||||
},
|
|
||||||
projects: vec![ProjectConfig {
|
|
||||||
path: "group/project".to_string(),
|
|
||||||
}],
|
|
||||||
default_project: default_project.map(String::from),
|
|
||||||
sync: SyncConfig::default(),
|
|
||||||
storage: StorageConfig::default(),
|
|
||||||
embedding: EmbeddingConfig::default(),
|
|
||||||
logging: LoggingConfig::default(),
|
|
||||||
scoring: ScoringConfig::default(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn default_note_filters() -> NoteListFilters {
|
fn default_note_filters() -> NoteListFilters {
|
||||||
NoteListFilters {
|
NoteListFilters {
|
||||||
limit: 50,
|
limit: 50,
|
||||||
@@ -132,26 +107,6 @@ fn default_note_filters() -> NoteListFilters {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn setup_note_test_db() -> Connection {
|
|
||||||
let conn = create_connection(Path::new(":memory:")).unwrap();
|
|
||||||
run_migrations(&conn).unwrap();
|
|
||||||
conn
|
|
||||||
}
|
|
||||||
|
|
||||||
fn insert_test_project(conn: &Connection, id: i64, path: &str) {
|
|
||||||
conn.execute(
|
|
||||||
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace, web_url)
|
|
||||||
VALUES (?1, ?2, ?3, ?4)",
|
|
||||||
rusqlite::params![
|
|
||||||
id,
|
|
||||||
id * 100,
|
|
||||||
path,
|
|
||||||
format!("https://gitlab.example.com/{path}")
|
|
||||||
],
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn insert_test_issue(conn: &Connection, id: i64, project_id: i64, iid: i64, title: &str) {
|
fn insert_test_issue(conn: &Connection, id: i64, project_id: i64, iid: i64, title: &str) {
|
||||||
conn.execute(
|
conn.execute(
|
||||||
"INSERT INTO issues (id, gitlab_id, project_id, iid, title, state, author_username,
|
"INSERT INTO issues (id, gitlab_id, project_id, iid, title, state, author_username,
|
||||||
28
src/cli/commands/list/mod.rs
Normal file
28
src/cli/commands/list/mod.rs
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
mod issues;
|
||||||
|
mod mrs;
|
||||||
|
mod notes;
|
||||||
|
mod render_helpers;
|
||||||
|
|
||||||
|
pub use issues::{
|
||||||
|
IssueListRow, IssueListRowJson, ListFilters, ListResult, ListResultJson, open_issue_in_browser,
|
||||||
|
print_list_issues, print_list_issues_json, run_list_issues,
|
||||||
|
};
|
||||||
|
pub use mrs::{
|
||||||
|
MrListFilters, MrListResult, MrListResultJson, MrListRow, MrListRowJson, open_mr_in_browser,
|
||||||
|
print_list_mrs, print_list_mrs_json, run_list_mrs,
|
||||||
|
};
|
||||||
|
pub use notes::{
|
||||||
|
NoteListFilters, NoteListResult, NoteListResultJson, NoteListRow, NoteListRowJson,
|
||||||
|
print_list_notes, print_list_notes_json, query_notes,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
use crate::core::path_resolver::escape_like as note_escape_like;
|
||||||
|
#[cfg(test)]
|
||||||
|
use render_helpers::{format_discussions, format_note_parent, format_note_type, truncate_body};
|
||||||
|
#[cfg(test)]
|
||||||
|
use rusqlite::Connection;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
#[path = "list_tests.rs"]
|
||||||
|
mod tests;
|
||||||
404
src/cli/commands/list/mrs.rs
Normal file
404
src/cli/commands/list/mrs.rs
Normal file
@@ -0,0 +1,404 @@
|
|||||||
|
use crate::cli::render::{self, Align, Icons, StyledCell, Table as LoreTable, Theme};
|
||||||
|
use rusqlite::Connection;
|
||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
|
use crate::Config;
|
||||||
|
use crate::cli::robot::{RobotMeta, expand_fields_preset, filter_fields};
|
||||||
|
use crate::core::db::create_connection;
|
||||||
|
use crate::core::error::{LoreError, Result};
|
||||||
|
use crate::core::paths::get_db_path;
|
||||||
|
use crate::core::project::resolve_project;
|
||||||
|
use crate::core::time::{ms_to_iso, parse_since};
|
||||||
|
|
||||||
|
use super::render_helpers::{format_branches, format_discussions};
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct MrListRow {
|
||||||
|
pub iid: i64,
|
||||||
|
pub title: String,
|
||||||
|
pub state: String,
|
||||||
|
pub draft: bool,
|
||||||
|
pub author_username: String,
|
||||||
|
pub source_branch: String,
|
||||||
|
pub target_branch: String,
|
||||||
|
pub created_at: i64,
|
||||||
|
pub updated_at: i64,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub web_url: Option<String>,
|
||||||
|
pub project_path: String,
|
||||||
|
pub labels: Vec<String>,
|
||||||
|
pub assignees: Vec<String>,
|
||||||
|
pub reviewers: Vec<String>,
|
||||||
|
pub discussion_count: i64,
|
||||||
|
pub unresolved_count: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct MrListRowJson {
|
||||||
|
pub iid: i64,
|
||||||
|
pub title: String,
|
||||||
|
pub state: String,
|
||||||
|
pub draft: bool,
|
||||||
|
pub author_username: String,
|
||||||
|
pub source_branch: String,
|
||||||
|
pub target_branch: String,
|
||||||
|
pub labels: Vec<String>,
|
||||||
|
pub assignees: Vec<String>,
|
||||||
|
pub reviewers: Vec<String>,
|
||||||
|
pub discussion_count: i64,
|
||||||
|
pub unresolved_count: i64,
|
||||||
|
pub created_at_iso: String,
|
||||||
|
pub updated_at_iso: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub web_url: Option<String>,
|
||||||
|
pub project_path: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&MrListRow> for MrListRowJson {
|
||||||
|
fn from(row: &MrListRow) -> Self {
|
||||||
|
Self {
|
||||||
|
iid: row.iid,
|
||||||
|
title: row.title.clone(),
|
||||||
|
state: row.state.clone(),
|
||||||
|
draft: row.draft,
|
||||||
|
author_username: row.author_username.clone(),
|
||||||
|
source_branch: row.source_branch.clone(),
|
||||||
|
target_branch: row.target_branch.clone(),
|
||||||
|
labels: row.labels.clone(),
|
||||||
|
assignees: row.assignees.clone(),
|
||||||
|
reviewers: row.reviewers.clone(),
|
||||||
|
discussion_count: row.discussion_count,
|
||||||
|
unresolved_count: row.unresolved_count,
|
||||||
|
created_at_iso: ms_to_iso(row.created_at),
|
||||||
|
updated_at_iso: ms_to_iso(row.updated_at),
|
||||||
|
web_url: row.web_url.clone(),
|
||||||
|
project_path: row.project_path.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct MrListResult {
|
||||||
|
pub mrs: Vec<MrListRow>,
|
||||||
|
pub total_count: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct MrListResultJson {
|
||||||
|
pub mrs: Vec<MrListRowJson>,
|
||||||
|
pub total_count: usize,
|
||||||
|
pub showing: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&MrListResult> for MrListResultJson {
|
||||||
|
fn from(result: &MrListResult) -> Self {
|
||||||
|
Self {
|
||||||
|
mrs: result.mrs.iter().map(MrListRowJson::from).collect(),
|
||||||
|
total_count: result.total_count,
|
||||||
|
showing: result.mrs.len(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct MrListFilters<'a> {
|
||||||
|
pub limit: usize,
|
||||||
|
pub project: Option<&'a str>,
|
||||||
|
pub state: Option<&'a str>,
|
||||||
|
pub author: Option<&'a str>,
|
||||||
|
pub assignee: Option<&'a str>,
|
||||||
|
pub reviewer: Option<&'a str>,
|
||||||
|
pub labels: Option<&'a [String]>,
|
||||||
|
pub since: Option<&'a str>,
|
||||||
|
pub draft: bool,
|
||||||
|
pub no_draft: bool,
|
||||||
|
pub target_branch: Option<&'a str>,
|
||||||
|
pub source_branch: Option<&'a str>,
|
||||||
|
pub sort: &'a str,
|
||||||
|
pub order: &'a str,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_list_mrs(config: &Config, filters: MrListFilters) -> Result<MrListResult> {
|
||||||
|
let db_path = get_db_path(config.storage.db_path.as_deref());
|
||||||
|
let conn = create_connection(&db_path)?;
|
||||||
|
|
||||||
|
let result = query_mrs(&conn, &filters)?;
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn query_mrs(conn: &Connection, filters: &MrListFilters) -> Result<MrListResult> {
|
||||||
|
let mut where_clauses = Vec::new();
|
||||||
|
let mut params: Vec<Box<dyn rusqlite::ToSql>> = Vec::new();
|
||||||
|
|
||||||
|
if let Some(project) = filters.project {
|
||||||
|
let project_id = resolve_project(conn, project)?;
|
||||||
|
where_clauses.push("m.project_id = ?");
|
||||||
|
params.push(Box::new(project_id));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(state) = filters.state
|
||||||
|
&& state != "all"
|
||||||
|
{
|
||||||
|
where_clauses.push("m.state = ?");
|
||||||
|
params.push(Box::new(state.to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(author) = filters.author {
|
||||||
|
let username = author.strip_prefix('@').unwrap_or(author);
|
||||||
|
where_clauses.push("m.author_username = ?");
|
||||||
|
params.push(Box::new(username.to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(assignee) = filters.assignee {
|
||||||
|
let username = assignee.strip_prefix('@').unwrap_or(assignee);
|
||||||
|
where_clauses.push(
|
||||||
|
"EXISTS (SELECT 1 FROM mr_assignees ma
|
||||||
|
WHERE ma.merge_request_id = m.id AND ma.username = ?)",
|
||||||
|
);
|
||||||
|
params.push(Box::new(username.to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(reviewer) = filters.reviewer {
|
||||||
|
let username = reviewer.strip_prefix('@').unwrap_or(reviewer);
|
||||||
|
where_clauses.push(
|
||||||
|
"EXISTS (SELECT 1 FROM mr_reviewers mr
|
||||||
|
WHERE mr.merge_request_id = m.id AND mr.username = ?)",
|
||||||
|
);
|
||||||
|
params.push(Box::new(username.to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(since_str) = filters.since {
|
||||||
|
let cutoff_ms = parse_since(since_str).ok_or_else(|| {
|
||||||
|
LoreError::Other(format!(
|
||||||
|
"Invalid --since value '{}'. Use relative (7d, 2w, 1m) or absolute (YYYY-MM-DD) format.",
|
||||||
|
since_str
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
where_clauses.push("m.updated_at >= ?");
|
||||||
|
params.push(Box::new(cutoff_ms));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(labels) = filters.labels {
|
||||||
|
for label in labels {
|
||||||
|
where_clauses.push(
|
||||||
|
"EXISTS (SELECT 1 FROM mr_labels ml
|
||||||
|
JOIN labels l ON ml.label_id = l.id
|
||||||
|
WHERE ml.merge_request_id = m.id AND l.name = ?)",
|
||||||
|
);
|
||||||
|
params.push(Box::new(label.clone()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if filters.draft {
|
||||||
|
where_clauses.push("m.draft = 1");
|
||||||
|
} else if filters.no_draft {
|
||||||
|
where_clauses.push("m.draft = 0");
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(target_branch) = filters.target_branch {
|
||||||
|
where_clauses.push("m.target_branch = ?");
|
||||||
|
params.push(Box::new(target_branch.to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(source_branch) = filters.source_branch {
|
||||||
|
where_clauses.push("m.source_branch = ?");
|
||||||
|
params.push(Box::new(source_branch.to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let where_sql = if where_clauses.is_empty() {
|
||||||
|
String::new()
|
||||||
|
} else {
|
||||||
|
format!("WHERE {}", where_clauses.join(" AND "))
|
||||||
|
};
|
||||||
|
|
||||||
|
let count_sql = format!(
|
||||||
|
"SELECT COUNT(*) FROM merge_requests m
|
||||||
|
JOIN projects p ON m.project_id = p.id
|
||||||
|
{where_sql}"
|
||||||
|
);
|
||||||
|
|
||||||
|
let param_refs: Vec<&dyn rusqlite::ToSql> = params.iter().map(|p| p.as_ref()).collect();
|
||||||
|
let total_count: i64 = conn.query_row(&count_sql, param_refs.as_slice(), |row| row.get(0))?;
|
||||||
|
let total_count = total_count as usize;
|
||||||
|
|
||||||
|
let sort_column = match filters.sort {
|
||||||
|
"created" => "m.created_at",
|
||||||
|
"iid" => "m.iid",
|
||||||
|
_ => "m.updated_at",
|
||||||
|
};
|
||||||
|
let order = if filters.order == "asc" {
|
||||||
|
"ASC"
|
||||||
|
} else {
|
||||||
|
"DESC"
|
||||||
|
};
|
||||||
|
|
||||||
|
let query_sql = format!(
|
||||||
|
"SELECT
|
||||||
|
m.iid,
|
||||||
|
m.title,
|
||||||
|
m.state,
|
||||||
|
m.draft,
|
||||||
|
m.author_username,
|
||||||
|
m.source_branch,
|
||||||
|
m.target_branch,
|
||||||
|
m.created_at,
|
||||||
|
m.updated_at,
|
||||||
|
m.web_url,
|
||||||
|
p.path_with_namespace,
|
||||||
|
(SELECT GROUP_CONCAT(l.name, X'1F')
|
||||||
|
FROM mr_labels ml
|
||||||
|
JOIN labels l ON ml.label_id = l.id
|
||||||
|
WHERE ml.merge_request_id = m.id) AS labels_csv,
|
||||||
|
(SELECT GROUP_CONCAT(ma.username, X'1F')
|
||||||
|
FROM mr_assignees ma
|
||||||
|
WHERE ma.merge_request_id = m.id) AS assignees_csv,
|
||||||
|
(SELECT GROUP_CONCAT(mr.username, X'1F')
|
||||||
|
FROM mr_reviewers mr
|
||||||
|
WHERE mr.merge_request_id = m.id) AS reviewers_csv,
|
||||||
|
(SELECT COUNT(*) FROM discussions d
|
||||||
|
WHERE d.merge_request_id = m.id) AS discussion_count,
|
||||||
|
(SELECT COUNT(*) FROM discussions d
|
||||||
|
WHERE d.merge_request_id = m.id AND d.resolvable = 1 AND d.resolved = 0) AS unresolved_count
|
||||||
|
FROM merge_requests m
|
||||||
|
JOIN projects p ON m.project_id = p.id
|
||||||
|
{where_sql}
|
||||||
|
ORDER BY {sort_column} {order}
|
||||||
|
LIMIT ?"
|
||||||
|
);
|
||||||
|
|
||||||
|
params.push(Box::new(filters.limit as i64));
|
||||||
|
let param_refs: Vec<&dyn rusqlite::ToSql> = params.iter().map(|p| p.as_ref()).collect();
|
||||||
|
|
||||||
|
let mut stmt = conn.prepare(&query_sql)?;
|
||||||
|
let mrs: Vec<MrListRow> = stmt
|
||||||
|
.query_map(param_refs.as_slice(), |row| {
|
||||||
|
let labels_csv: Option<String> = row.get(11)?;
|
||||||
|
let labels = labels_csv
|
||||||
|
.map(|s| s.split('\x1F').map(String::from).collect())
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
let assignees_csv: Option<String> = row.get(12)?;
|
||||||
|
let assignees = assignees_csv
|
||||||
|
.map(|s| s.split('\x1F').map(String::from).collect())
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
let reviewers_csv: Option<String> = row.get(13)?;
|
||||||
|
let reviewers = reviewers_csv
|
||||||
|
.map(|s| s.split('\x1F').map(String::from).collect())
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
let draft_int: i64 = row.get(3)?;
|
||||||
|
|
||||||
|
Ok(MrListRow {
|
||||||
|
iid: row.get(0)?,
|
||||||
|
title: row.get(1)?,
|
||||||
|
state: row.get(2)?,
|
||||||
|
draft: draft_int == 1,
|
||||||
|
author_username: row.get(4)?,
|
||||||
|
source_branch: row.get(5)?,
|
||||||
|
target_branch: row.get(6)?,
|
||||||
|
created_at: row.get(7)?,
|
||||||
|
updated_at: row.get(8)?,
|
||||||
|
web_url: row.get(9)?,
|
||||||
|
project_path: row.get(10)?,
|
||||||
|
labels,
|
||||||
|
assignees,
|
||||||
|
reviewers,
|
||||||
|
discussion_count: row.get(14)?,
|
||||||
|
unresolved_count: row.get(15)?,
|
||||||
|
})
|
||||||
|
})?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
Ok(MrListResult { mrs, total_count })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_list_mrs(result: &MrListResult) {
|
||||||
|
if result.mrs.is_empty() {
|
||||||
|
println!("No merge requests found.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
println!(
|
||||||
|
"{} {} of {}\n",
|
||||||
|
Theme::bold().render("Merge Requests"),
|
||||||
|
result.mrs.len(),
|
||||||
|
result.total_count
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut table = LoreTable::new()
|
||||||
|
.headers(&[
|
||||||
|
"IID", "Title", "State", "Author", "Branches", "Disc", "Updated",
|
||||||
|
])
|
||||||
|
.align(0, Align::Right);
|
||||||
|
|
||||||
|
for mr in &result.mrs {
|
||||||
|
let title = if mr.draft {
|
||||||
|
format!("{} {}", Icons::mr_draft(), render::truncate(&mr.title, 42))
|
||||||
|
} else {
|
||||||
|
render::truncate(&mr.title, 45)
|
||||||
|
};
|
||||||
|
|
||||||
|
let relative_time = render::format_relative_time_compact(mr.updated_at);
|
||||||
|
let branches = format_branches(&mr.target_branch, &mr.source_branch, 25);
|
||||||
|
let discussions = format_discussions(mr.discussion_count, mr.unresolved_count);
|
||||||
|
|
||||||
|
let (icon, style) = match mr.state.as_str() {
|
||||||
|
"opened" => (Icons::mr_opened(), Theme::success()),
|
||||||
|
"merged" => (Icons::mr_merged(), Theme::accent()),
|
||||||
|
"closed" => (Icons::mr_closed(), Theme::error()),
|
||||||
|
"locked" => (Icons::mr_opened(), Theme::warning()),
|
||||||
|
_ => (Icons::mr_opened(), Theme::dim()),
|
||||||
|
};
|
||||||
|
let state_cell = StyledCell::styled(format!("{icon} {}", mr.state), style);
|
||||||
|
|
||||||
|
table.add_row(vec![
|
||||||
|
StyledCell::styled(format!("!{}", mr.iid), Theme::info()),
|
||||||
|
StyledCell::plain(title),
|
||||||
|
state_cell,
|
||||||
|
StyledCell::styled(
|
||||||
|
format!("@{}", render::truncate(&mr.author_username, 12)),
|
||||||
|
Theme::accent(),
|
||||||
|
),
|
||||||
|
StyledCell::styled(branches, Theme::info()),
|
||||||
|
discussions,
|
||||||
|
StyledCell::styled(relative_time, Theme::dim()),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("{}", table.render());
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_list_mrs_json(result: &MrListResult, elapsed_ms: u64, fields: Option<&[String]>) {
|
||||||
|
let json_result = MrListResultJson::from(result);
|
||||||
|
let meta = RobotMeta::new(elapsed_ms);
|
||||||
|
let output = serde_json::json!({
|
||||||
|
"ok": true,
|
||||||
|
"data": json_result,
|
||||||
|
"meta": meta,
|
||||||
|
});
|
||||||
|
let mut output = output;
|
||||||
|
if let Some(f) = fields {
|
||||||
|
let expanded = expand_fields_preset(f, "mrs");
|
||||||
|
filter_fields(&mut output, "mrs", &expanded);
|
||||||
|
}
|
||||||
|
match serde_json::to_string(&output) {
|
||||||
|
Ok(json) => println!("{json}"),
|
||||||
|
Err(e) => eprintln!("Error serializing to JSON: {e}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn open_mr_in_browser(result: &MrListResult) -> Option<String> {
|
||||||
|
let first_mr = result.mrs.first()?;
|
||||||
|
let url = first_mr.web_url.as_ref()?;
|
||||||
|
|
||||||
|
match open::that(url) {
|
||||||
|
Ok(()) => {
|
||||||
|
println!("Opened: {url}");
|
||||||
|
Some(url.clone())
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
eprintln!("Failed to open browser: {e}");
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
470
src/cli/commands/list/notes.rs
Normal file
470
src/cli/commands/list/notes.rs
Normal file
@@ -0,0 +1,470 @@
|
|||||||
|
use crate::cli::render::{self, Align, StyledCell, Table as LoreTable, Theme};
|
||||||
|
use rusqlite::Connection;
|
||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
|
use crate::Config;
|
||||||
|
use crate::cli::robot::{RobotMeta, expand_fields_preset, filter_fields};
|
||||||
|
use crate::core::error::{LoreError, Result};
|
||||||
|
use crate::core::path_resolver::escape_like as note_escape_like;
|
||||||
|
use crate::core::project::resolve_project;
|
||||||
|
use crate::core::time::{iso_to_ms, ms_to_iso, parse_since};
|
||||||
|
|
||||||
|
use super::render_helpers::{
|
||||||
|
format_note_parent, format_note_path, format_note_type, truncate_body,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct NoteListRow {
|
||||||
|
pub id: i64,
|
||||||
|
pub gitlab_id: i64,
|
||||||
|
pub author_username: String,
|
||||||
|
pub body: Option<String>,
|
||||||
|
pub note_type: Option<String>,
|
||||||
|
pub is_system: bool,
|
||||||
|
pub created_at: i64,
|
||||||
|
pub updated_at: i64,
|
||||||
|
pub position_new_path: Option<String>,
|
||||||
|
pub position_new_line: Option<i64>,
|
||||||
|
pub position_old_path: Option<String>,
|
||||||
|
pub position_old_line: Option<i64>,
|
||||||
|
pub resolvable: bool,
|
||||||
|
pub resolved: bool,
|
||||||
|
pub resolved_by: Option<String>,
|
||||||
|
pub noteable_type: Option<String>,
|
||||||
|
pub parent_iid: Option<i64>,
|
||||||
|
pub parent_title: Option<String>,
|
||||||
|
pub project_path: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct NoteListRowJson {
|
||||||
|
pub id: i64,
|
||||||
|
pub gitlab_id: i64,
|
||||||
|
pub author_username: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub body: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub note_type: Option<String>,
|
||||||
|
pub is_system: bool,
|
||||||
|
pub created_at_iso: String,
|
||||||
|
pub updated_at_iso: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub position_new_path: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub position_new_line: Option<i64>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub position_old_path: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub position_old_line: Option<i64>,
|
||||||
|
pub resolvable: bool,
|
||||||
|
pub resolved: bool,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub resolved_by: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub noteable_type: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub parent_iid: Option<i64>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub parent_title: Option<String>,
|
||||||
|
pub project_path: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&NoteListRow> for NoteListRowJson {
|
||||||
|
fn from(row: &NoteListRow) -> Self {
|
||||||
|
Self {
|
||||||
|
id: row.id,
|
||||||
|
gitlab_id: row.gitlab_id,
|
||||||
|
author_username: row.author_username.clone(),
|
||||||
|
body: row.body.clone(),
|
||||||
|
note_type: row.note_type.clone(),
|
||||||
|
is_system: row.is_system,
|
||||||
|
created_at_iso: ms_to_iso(row.created_at),
|
||||||
|
updated_at_iso: ms_to_iso(row.updated_at),
|
||||||
|
position_new_path: row.position_new_path.clone(),
|
||||||
|
position_new_line: row.position_new_line,
|
||||||
|
position_old_path: row.position_old_path.clone(),
|
||||||
|
position_old_line: row.position_old_line,
|
||||||
|
resolvable: row.resolvable,
|
||||||
|
resolved: row.resolved,
|
||||||
|
resolved_by: row.resolved_by.clone(),
|
||||||
|
noteable_type: row.noteable_type.clone(),
|
||||||
|
parent_iid: row.parent_iid,
|
||||||
|
parent_title: row.parent_title.clone(),
|
||||||
|
project_path: row.project_path.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct NoteListResult {
|
||||||
|
pub notes: Vec<NoteListRow>,
|
||||||
|
pub total_count: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct NoteListResultJson {
|
||||||
|
pub notes: Vec<NoteListRowJson>,
|
||||||
|
pub total_count: i64,
|
||||||
|
pub showing: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&NoteListResult> for NoteListResultJson {
|
||||||
|
fn from(result: &NoteListResult) -> Self {
|
||||||
|
Self {
|
||||||
|
notes: result.notes.iter().map(NoteListRowJson::from).collect(),
|
||||||
|
total_count: result.total_count,
|
||||||
|
showing: result.notes.len(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct NoteListFilters {
|
||||||
|
pub limit: usize,
|
||||||
|
pub project: Option<String>,
|
||||||
|
pub author: Option<String>,
|
||||||
|
pub note_type: Option<String>,
|
||||||
|
pub include_system: bool,
|
||||||
|
pub for_issue_iid: Option<i64>,
|
||||||
|
pub for_mr_iid: Option<i64>,
|
||||||
|
pub note_id: Option<i64>,
|
||||||
|
pub gitlab_note_id: Option<i64>,
|
||||||
|
pub discussion_id: Option<String>,
|
||||||
|
pub since: Option<String>,
|
||||||
|
pub until: Option<String>,
|
||||||
|
pub path: Option<String>,
|
||||||
|
pub contains: Option<String>,
|
||||||
|
pub resolution: Option<String>,
|
||||||
|
pub sort: String,
|
||||||
|
pub order: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_list_notes(result: &NoteListResult) {
|
||||||
|
if result.notes.is_empty() {
|
||||||
|
println!("No notes found.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
println!(
|
||||||
|
"{} {} of {}\n",
|
||||||
|
Theme::bold().render("Notes"),
|
||||||
|
result.notes.len(),
|
||||||
|
result.total_count
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut table = LoreTable::new()
|
||||||
|
.headers(&[
|
||||||
|
"ID",
|
||||||
|
"Author",
|
||||||
|
"Type",
|
||||||
|
"Body",
|
||||||
|
"Path:Line",
|
||||||
|
"Parent",
|
||||||
|
"Created",
|
||||||
|
])
|
||||||
|
.align(0, Align::Right);
|
||||||
|
|
||||||
|
for note in &result.notes {
|
||||||
|
let body = note
|
||||||
|
.body
|
||||||
|
.as_deref()
|
||||||
|
.map(|b| truncate_body(b, 60))
|
||||||
|
.unwrap_or_default();
|
||||||
|
let path = format_note_path(note.position_new_path.as_deref(), note.position_new_line);
|
||||||
|
let parent = format_note_parent(note.noteable_type.as_deref(), note.parent_iid);
|
||||||
|
let relative_time = render::format_relative_time_compact(note.created_at);
|
||||||
|
let note_type = format_note_type(note.note_type.as_deref());
|
||||||
|
|
||||||
|
table.add_row(vec![
|
||||||
|
StyledCell::styled(note.gitlab_id.to_string(), Theme::info()),
|
||||||
|
StyledCell::styled(
|
||||||
|
format!("@{}", render::truncate(¬e.author_username, 12)),
|
||||||
|
Theme::accent(),
|
||||||
|
),
|
||||||
|
StyledCell::plain(note_type),
|
||||||
|
StyledCell::plain(body),
|
||||||
|
StyledCell::plain(path),
|
||||||
|
StyledCell::plain(parent),
|
||||||
|
StyledCell::styled(relative_time, Theme::dim()),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("{}", table.render());
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_list_notes_json(result: &NoteListResult, elapsed_ms: u64, fields: Option<&[String]>) {
|
||||||
|
let json_result = NoteListResultJson::from(result);
|
||||||
|
let meta = RobotMeta::new(elapsed_ms);
|
||||||
|
let output = serde_json::json!({
|
||||||
|
"ok": true,
|
||||||
|
"data": json_result,
|
||||||
|
"meta": meta,
|
||||||
|
});
|
||||||
|
let mut output = output;
|
||||||
|
if let Some(f) = fields {
|
||||||
|
let expanded = expand_fields_preset(f, "notes");
|
||||||
|
filter_fields(&mut output, "notes", &expanded);
|
||||||
|
}
|
||||||
|
match serde_json::to_string(&output) {
|
||||||
|
Ok(json) => println!("{json}"),
|
||||||
|
Err(e) => eprintln!("Error serializing to JSON: {e}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn query_notes(
|
||||||
|
conn: &Connection,
|
||||||
|
filters: &NoteListFilters,
|
||||||
|
config: &Config,
|
||||||
|
) -> Result<NoteListResult> {
|
||||||
|
let mut where_clauses: Vec<String> = Vec::new();
|
||||||
|
let mut params: Vec<Box<dyn rusqlite::ToSql>> = Vec::new();
|
||||||
|
|
||||||
|
if let Some(ref project) = filters.project {
|
||||||
|
let project_id = resolve_project(conn, project)?;
|
||||||
|
where_clauses.push("n.project_id = ?".to_string());
|
||||||
|
params.push(Box::new(project_id));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(ref author) = filters.author {
|
||||||
|
let username = author.strip_prefix('@').unwrap_or(author);
|
||||||
|
where_clauses.push("n.author_username = ? COLLATE NOCASE".to_string());
|
||||||
|
params.push(Box::new(username.to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(ref note_type) = filters.note_type {
|
||||||
|
where_clauses.push("n.note_type = ?".to_string());
|
||||||
|
params.push(Box::new(note_type.clone()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if !filters.include_system {
|
||||||
|
where_clauses.push("n.is_system = 0".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
let since_ms = if let Some(ref since_str) = filters.since {
|
||||||
|
let ms = parse_since(since_str).ok_or_else(|| {
|
||||||
|
LoreError::Other(format!(
|
||||||
|
"Invalid --since value '{}'. Use relative (7d, 2w, 1m) or absolute (YYYY-MM-DD) format.",
|
||||||
|
since_str
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
where_clauses.push("n.created_at >= ?".to_string());
|
||||||
|
params.push(Box::new(ms));
|
||||||
|
Some(ms)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(ref until_str) = filters.until {
|
||||||
|
let until_ms = if until_str.len() == 10
|
||||||
|
&& until_str.chars().filter(|&c| c == '-').count() == 2
|
||||||
|
{
|
||||||
|
let iso_full = format!("{until_str}T23:59:59.999Z");
|
||||||
|
iso_to_ms(&iso_full).ok_or_else(|| {
|
||||||
|
LoreError::Other(format!(
|
||||||
|
"Invalid --until value '{}'. Use YYYY-MM-DD or relative format.",
|
||||||
|
until_str
|
||||||
|
))
|
||||||
|
})?
|
||||||
|
} else {
|
||||||
|
parse_since(until_str).ok_or_else(|| {
|
||||||
|
LoreError::Other(format!(
|
||||||
|
"Invalid --until value '{}'. Use relative (7d, 2w, 1m) or absolute (YYYY-MM-DD) format.",
|
||||||
|
until_str
|
||||||
|
))
|
||||||
|
})?
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(s) = since_ms
|
||||||
|
&& s > until_ms
|
||||||
|
{
|
||||||
|
return Err(LoreError::Other(
|
||||||
|
"Invalid time window: --since is after --until.".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
where_clauses.push("n.created_at <= ?".to_string());
|
||||||
|
params.push(Box::new(until_ms));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(ref path) = filters.path {
|
||||||
|
if let Some(prefix) = path.strip_suffix('/') {
|
||||||
|
let escaped = note_escape_like(prefix);
|
||||||
|
where_clauses.push("n.position_new_path LIKE ? ESCAPE '\\'".to_string());
|
||||||
|
params.push(Box::new(format!("{escaped}%")));
|
||||||
|
} else {
|
||||||
|
where_clauses.push("n.position_new_path = ?".to_string());
|
||||||
|
params.push(Box::new(path.clone()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(ref contains) = filters.contains {
|
||||||
|
let escaped = note_escape_like(contains);
|
||||||
|
where_clauses.push("n.body LIKE ? ESCAPE '\\' COLLATE NOCASE".to_string());
|
||||||
|
params.push(Box::new(format!("%{escaped}%")));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(ref resolution) = filters.resolution {
|
||||||
|
match resolution.as_str() {
|
||||||
|
"unresolved" => {
|
||||||
|
where_clauses.push("n.resolvable = 1 AND n.resolved = 0".to_string());
|
||||||
|
}
|
||||||
|
"resolved" => {
|
||||||
|
where_clauses.push("n.resolvable = 1 AND n.resolved = 1".to_string());
|
||||||
|
}
|
||||||
|
other => {
|
||||||
|
return Err(LoreError::Other(format!(
|
||||||
|
"Invalid --resolution value '{}'. Use 'resolved' or 'unresolved'.",
|
||||||
|
other
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(iid) = filters.for_issue_iid {
|
||||||
|
let project_str = filters
|
||||||
|
.project
|
||||||
|
.as_deref()
|
||||||
|
.or(config.default_project.as_deref())
|
||||||
|
.ok_or_else(|| {
|
||||||
|
LoreError::Other(
|
||||||
|
"Cannot filter by issue IID without a project context. Use --project or set defaultProject in config."
|
||||||
|
.to_string(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
let project_id = resolve_project(conn, project_str)?;
|
||||||
|
where_clauses.push(
|
||||||
|
"d.issue_id = (SELECT id FROM issues WHERE project_id = ? AND iid = ?)".to_string(),
|
||||||
|
);
|
||||||
|
params.push(Box::new(project_id));
|
||||||
|
params.push(Box::new(iid));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(iid) = filters.for_mr_iid {
|
||||||
|
let project_str = filters
|
||||||
|
.project
|
||||||
|
.as_deref()
|
||||||
|
.or(config.default_project.as_deref())
|
||||||
|
.ok_or_else(|| {
|
||||||
|
LoreError::Other(
|
||||||
|
"Cannot filter by MR IID without a project context. Use --project or set defaultProject in config."
|
||||||
|
.to_string(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
let project_id = resolve_project(conn, project_str)?;
|
||||||
|
where_clauses.push(
|
||||||
|
"d.merge_request_id = (SELECT id FROM merge_requests WHERE project_id = ? AND iid = ?)"
|
||||||
|
.to_string(),
|
||||||
|
);
|
||||||
|
params.push(Box::new(project_id));
|
||||||
|
params.push(Box::new(iid));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(id) = filters.note_id {
|
||||||
|
where_clauses.push("n.id = ?".to_string());
|
||||||
|
params.push(Box::new(id));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(gitlab_id) = filters.gitlab_note_id {
|
||||||
|
where_clauses.push("n.gitlab_id = ?".to_string());
|
||||||
|
params.push(Box::new(gitlab_id));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(ref disc_id) = filters.discussion_id {
|
||||||
|
where_clauses.push("d.gitlab_discussion_id = ?".to_string());
|
||||||
|
params.push(Box::new(disc_id.clone()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let where_sql = if where_clauses.is_empty() {
|
||||||
|
String::new()
|
||||||
|
} else {
|
||||||
|
format!("WHERE {}", where_clauses.join(" AND "))
|
||||||
|
};
|
||||||
|
|
||||||
|
let count_sql = format!(
|
||||||
|
"SELECT COUNT(*) FROM notes n
|
||||||
|
JOIN discussions d ON n.discussion_id = d.id
|
||||||
|
JOIN projects p ON n.project_id = p.id
|
||||||
|
LEFT JOIN issues i ON d.issue_id = i.id
|
||||||
|
LEFT JOIN merge_requests m ON d.merge_request_id = m.id
|
||||||
|
{where_sql}"
|
||||||
|
);
|
||||||
|
|
||||||
|
let param_refs: Vec<&dyn rusqlite::ToSql> = params.iter().map(|p| p.as_ref()).collect();
|
||||||
|
let total_count: i64 = conn.query_row(&count_sql, param_refs.as_slice(), |row| row.get(0))?;
|
||||||
|
|
||||||
|
let sort_column = match filters.sort.as_str() {
|
||||||
|
"updated" => "n.updated_at",
|
||||||
|
_ => "n.created_at",
|
||||||
|
};
|
||||||
|
let order = if filters.order == "asc" {
|
||||||
|
"ASC"
|
||||||
|
} else {
|
||||||
|
"DESC"
|
||||||
|
};
|
||||||
|
|
||||||
|
let query_sql = format!(
|
||||||
|
"SELECT
|
||||||
|
n.id,
|
||||||
|
n.gitlab_id,
|
||||||
|
n.author_username,
|
||||||
|
n.body,
|
||||||
|
n.note_type,
|
||||||
|
n.is_system,
|
||||||
|
n.created_at,
|
||||||
|
n.updated_at,
|
||||||
|
n.position_new_path,
|
||||||
|
n.position_new_line,
|
||||||
|
n.position_old_path,
|
||||||
|
n.position_old_line,
|
||||||
|
n.resolvable,
|
||||||
|
n.resolved,
|
||||||
|
n.resolved_by,
|
||||||
|
d.noteable_type,
|
||||||
|
COALESCE(i.iid, m.iid) AS parent_iid,
|
||||||
|
COALESCE(i.title, m.title) AS parent_title,
|
||||||
|
p.path_with_namespace AS project_path
|
||||||
|
FROM notes n
|
||||||
|
JOIN discussions d ON n.discussion_id = d.id
|
||||||
|
JOIN projects p ON n.project_id = p.id
|
||||||
|
LEFT JOIN issues i ON d.issue_id = i.id
|
||||||
|
LEFT JOIN merge_requests m ON d.merge_request_id = m.id
|
||||||
|
{where_sql}
|
||||||
|
ORDER BY {sort_column} {order}, n.id {order}
|
||||||
|
LIMIT ?"
|
||||||
|
);
|
||||||
|
|
||||||
|
params.push(Box::new(filters.limit as i64));
|
||||||
|
let param_refs: Vec<&dyn rusqlite::ToSql> = params.iter().map(|p| p.as_ref()).collect();
|
||||||
|
|
||||||
|
let mut stmt = conn.prepare(&query_sql)?;
|
||||||
|
let notes: Vec<NoteListRow> = stmt
|
||||||
|
.query_map(param_refs.as_slice(), |row| {
|
||||||
|
let is_system_int: i64 = row.get(5)?;
|
||||||
|
let resolvable_int: i64 = row.get(12)?;
|
||||||
|
let resolved_int: i64 = row.get(13)?;
|
||||||
|
|
||||||
|
Ok(NoteListRow {
|
||||||
|
id: row.get(0)?,
|
||||||
|
gitlab_id: row.get(1)?,
|
||||||
|
author_username: row.get::<_, Option<String>>(2)?.unwrap_or_default(),
|
||||||
|
body: row.get(3)?,
|
||||||
|
note_type: row.get(4)?,
|
||||||
|
is_system: is_system_int == 1,
|
||||||
|
created_at: row.get(6)?,
|
||||||
|
updated_at: row.get(7)?,
|
||||||
|
position_new_path: row.get(8)?,
|
||||||
|
position_new_line: row.get(9)?,
|
||||||
|
position_old_path: row.get(10)?,
|
||||||
|
position_old_line: row.get(11)?,
|
||||||
|
resolvable: resolvable_int == 1,
|
||||||
|
resolved: resolved_int == 1,
|
||||||
|
resolved_by: row.get(14)?,
|
||||||
|
noteable_type: row.get(15)?,
|
||||||
|
parent_iid: row.get(16)?,
|
||||||
|
parent_title: row.get(17)?,
|
||||||
|
project_path: row.get(18)?,
|
||||||
|
})
|
||||||
|
})?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
Ok(NoteListResult { notes, total_count })
|
||||||
|
}
|
||||||
73
src/cli/commands/list/render_helpers.rs
Normal file
73
src/cli/commands/list/render_helpers.rs
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
use crate::cli::render::{self, StyledCell, Theme};
|
||||||
|
|
||||||
|
pub(crate) fn format_assignees(assignees: &[String]) -> String {
|
||||||
|
if assignees.is_empty() {
|
||||||
|
return "-".to_string();
|
||||||
|
}
|
||||||
|
|
||||||
|
let max_shown = 2;
|
||||||
|
let shown: Vec<String> = assignees
|
||||||
|
.iter()
|
||||||
|
.take(max_shown)
|
||||||
|
.map(|s| format!("@{}", render::truncate(s, 10)))
|
||||||
|
.collect();
|
||||||
|
let overflow = assignees.len().saturating_sub(max_shown);
|
||||||
|
|
||||||
|
if overflow > 0 {
|
||||||
|
format!("{} +{}", shown.join(", "), overflow)
|
||||||
|
} else {
|
||||||
|
shown.join(", ")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn format_discussions(total: i64, unresolved: i64) -> StyledCell {
|
||||||
|
if total == 0 {
|
||||||
|
return StyledCell::plain(String::new());
|
||||||
|
}
|
||||||
|
|
||||||
|
if unresolved > 0 {
|
||||||
|
let text = format!("{total}/");
|
||||||
|
let warn = Theme::warning().render(&format!("{unresolved}!"));
|
||||||
|
StyledCell::plain(format!("{text}{warn}"))
|
||||||
|
} else {
|
||||||
|
StyledCell::plain(format!("{total}"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn format_branches(target: &str, source: &str, max_width: usize) -> String {
|
||||||
|
let full = format!("{} <- {}", target, source);
|
||||||
|
render::truncate(&full, max_width)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn truncate_body(body: &str, max_len: usize) -> String {
|
||||||
|
if body.chars().count() <= max_len {
|
||||||
|
body.to_string()
|
||||||
|
} else {
|
||||||
|
let truncated: String = body.chars().take(max_len).collect();
|
||||||
|
format!("{truncated}...")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn format_note_type(note_type: Option<&str>) -> &'static str {
|
||||||
|
match note_type {
|
||||||
|
Some("DiffNote") => "Diff",
|
||||||
|
Some("DiscussionNote") => "Disc",
|
||||||
|
_ => "-",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn format_note_path(path: Option<&str>, line: Option<i64>) -> String {
|
||||||
|
match (path, line) {
|
||||||
|
(Some(p), Some(l)) => format!("{p}:{l}"),
|
||||||
|
(Some(p), None) => p.to_string(),
|
||||||
|
_ => "-".to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn format_note_parent(noteable_type: Option<&str>, parent_iid: Option<i64>) -> String {
|
||||||
|
match (noteable_type, parent_iid) {
|
||||||
|
(Some("Issue"), Some(iid)) => format!("Issue #{iid}"),
|
||||||
|
(Some("MergeRequest"), Some(iid)) => format!("MR !{iid}"),
|
||||||
|
_ => "-".to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,32 +1,11 @@
|
|||||||
use super::*;
|
use super::*;
|
||||||
use crate::cli::commands::me::types::{ActivityEventType, AttentionState};
|
use crate::cli::commands::me::types::{ActivityEventType, AttentionState};
|
||||||
use crate::core::db::{create_connection, run_migrations};
|
|
||||||
use crate::core::time::now_ms;
|
use crate::core::time::now_ms;
|
||||||
|
use crate::test_support::{insert_project, setup_test_db};
|
||||||
use rusqlite::Connection;
|
use rusqlite::Connection;
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
// ─── Helpers ────────────────────────────────────────────────────────────────
|
// ─── Helpers ────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
fn setup_test_db() -> Connection {
|
|
||||||
let conn = create_connection(Path::new(":memory:")).unwrap();
|
|
||||||
run_migrations(&conn).unwrap();
|
|
||||||
conn
|
|
||||||
}
|
|
||||||
|
|
||||||
fn insert_project(conn: &Connection, id: i64, path: &str) {
|
|
||||||
conn.execute(
|
|
||||||
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace, web_url)
|
|
||||||
VALUES (?1, ?2, ?3, ?4)",
|
|
||||||
rusqlite::params![
|
|
||||||
id,
|
|
||||||
id * 100,
|
|
||||||
path,
|
|
||||||
format!("https://git.example.com/{path}")
|
|
||||||
],
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn insert_issue(conn: &Connection, id: i64, project_id: i64, iid: i64, author: &str) {
|
fn insert_issue(conn: &Connection, id: i64, project_id: i64, iid: i64, author: &str) {
|
||||||
insert_issue_with_status(
|
insert_issue_with_status(
|
||||||
conn,
|
conn,
|
||||||
@@ -648,6 +627,115 @@ fn activity_is_own_flag() {
|
|||||||
assert!(results[0].is_own);
|
assert!(results[0].is_own);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ─── Activity on Closed/Merged Items ─────────────────────────────────────────
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn activity_note_on_merged_authored_mr() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
insert_project(&conn, 1, "group/repo");
|
||||||
|
insert_mr(&conn, 10, 1, 99, "alice", "merged", false);
|
||||||
|
|
||||||
|
let disc_id = 100;
|
||||||
|
insert_discussion(&conn, disc_id, 1, Some(10), None);
|
||||||
|
let t = now_ms() - 1000;
|
||||||
|
insert_note_at(
|
||||||
|
&conn,
|
||||||
|
200,
|
||||||
|
disc_id,
|
||||||
|
1,
|
||||||
|
"bob",
|
||||||
|
false,
|
||||||
|
"follow-up question",
|
||||||
|
t,
|
||||||
|
);
|
||||||
|
|
||||||
|
let results = query_activity(&conn, "alice", &[], 0).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
results.len(),
|
||||||
|
1,
|
||||||
|
"should see activity on merged MR authored by user"
|
||||||
|
);
|
||||||
|
assert_eq!(results[0].entity_iid, 99);
|
||||||
|
assert_eq!(results[0].entity_type, "mr");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn activity_note_on_closed_mr_as_reviewer() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
insert_project(&conn, 1, "group/repo");
|
||||||
|
insert_mr(&conn, 10, 1, 99, "bob", "closed", false);
|
||||||
|
insert_reviewer(&conn, 10, "alice");
|
||||||
|
|
||||||
|
let disc_id = 100;
|
||||||
|
insert_discussion(&conn, disc_id, 1, Some(10), None);
|
||||||
|
let t = now_ms() - 1000;
|
||||||
|
insert_note_at(&conn, 200, disc_id, 1, "bob", false, "can you re-check?", t);
|
||||||
|
|
||||||
|
let results = query_activity(&conn, "alice", &[], 0).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
results.len(),
|
||||||
|
1,
|
||||||
|
"should see activity on closed MR where user is reviewer"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn activity_note_on_closed_assigned_issue() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
insert_project(&conn, 1, "group/repo");
|
||||||
|
insert_issue_with_state(&conn, 10, 1, 42, "someone", "closed");
|
||||||
|
insert_assignee(&conn, 10, "alice");
|
||||||
|
|
||||||
|
let disc_id = 100;
|
||||||
|
insert_discussion(&conn, disc_id, 1, None, Some(10));
|
||||||
|
let t = now_ms() - 1000;
|
||||||
|
insert_note_at(
|
||||||
|
&conn,
|
||||||
|
200,
|
||||||
|
disc_id,
|
||||||
|
1,
|
||||||
|
"bob",
|
||||||
|
false,
|
||||||
|
"reopening discussion",
|
||||||
|
t,
|
||||||
|
);
|
||||||
|
|
||||||
|
let results = query_activity(&conn, "alice", &[], 0).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
results.len(),
|
||||||
|
1,
|
||||||
|
"should see activity on closed issue assigned to user"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn since_last_check_includes_comment_on_merged_mr() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
insert_project(&conn, 1, "group/repo");
|
||||||
|
insert_mr(&conn, 10, 1, 99, "alice", "merged", false);
|
||||||
|
|
||||||
|
let disc_id = 100;
|
||||||
|
insert_discussion(&conn, disc_id, 1, Some(10), None);
|
||||||
|
let t = now_ms() - 1000;
|
||||||
|
insert_note_at(
|
||||||
|
&conn,
|
||||||
|
200,
|
||||||
|
disc_id,
|
||||||
|
1,
|
||||||
|
"bob",
|
||||||
|
false,
|
||||||
|
"post-merge question",
|
||||||
|
t,
|
||||||
|
);
|
||||||
|
|
||||||
|
let groups = query_since_last_check(&conn, "alice", 0).unwrap();
|
||||||
|
let total_events: usize = groups.iter().map(|g| g.events.len()).sum();
|
||||||
|
assert_eq!(
|
||||||
|
total_events, 1,
|
||||||
|
"should see others' comments on merged MR in inbox"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
// ─── Assignment Detection Tests (Task #12) ─────────────────────────────────
|
// ─── Assignment Detection Tests (Task #12) ─────────────────────────────────
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -858,7 +946,7 @@ fn mentioned_in_finds_mention_on_unassigned_issue() {
|
|||||||
);
|
);
|
||||||
|
|
||||||
let recency_cutoff = now_ms() - 7 * 24 * 3600 * 1000;
|
let recency_cutoff = now_ms() - 7 * 24 * 3600 * 1000;
|
||||||
let results = query_mentioned_in(&conn, "alice", &[], recency_cutoff).unwrap();
|
let results = query_mentioned_in(&conn, "alice", &[], recency_cutoff, 0).unwrap();
|
||||||
assert_eq!(results.len(), 1);
|
assert_eq!(results.len(), 1);
|
||||||
assert_eq!(results[0].entity_type, "issue");
|
assert_eq!(results[0].entity_type, "issue");
|
||||||
assert_eq!(results[0].iid, 42);
|
assert_eq!(results[0].iid, 42);
|
||||||
@@ -876,10 +964,25 @@ fn mentioned_in_excludes_assigned_issue() {
|
|||||||
insert_note_at(&conn, 200, disc_id, 1, "bob", false, "hey @alice", t);
|
insert_note_at(&conn, 200, disc_id, 1, "bob", false, "hey @alice", t);
|
||||||
|
|
||||||
let recency_cutoff = now_ms() - 7 * 24 * 3600 * 1000;
|
let recency_cutoff = now_ms() - 7 * 24 * 3600 * 1000;
|
||||||
let results = query_mentioned_in(&conn, "alice", &[], recency_cutoff).unwrap();
|
let results = query_mentioned_in(&conn, "alice", &[], recency_cutoff, 0).unwrap();
|
||||||
assert!(results.is_empty(), "should exclude assigned issues");
|
assert!(results.is_empty(), "should exclude assigned issues");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn mentioned_in_excludes_authored_issue() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
insert_project(&conn, 1, "group/repo");
|
||||||
|
insert_issue(&conn, 10, 1, 42, "alice"); // alice IS author
|
||||||
|
let disc_id = 100;
|
||||||
|
insert_discussion(&conn, disc_id, 1, None, Some(10));
|
||||||
|
let t = now_ms() - 1000;
|
||||||
|
insert_note_at(&conn, 200, disc_id, 1, "bob", false, "hey @alice", t);
|
||||||
|
|
||||||
|
let recency_cutoff = now_ms() - 7 * 24 * 3600 * 1000;
|
||||||
|
let results = query_mentioned_in(&conn, "alice", &[], recency_cutoff, 0).unwrap();
|
||||||
|
assert!(results.is_empty(), "should exclude authored issues");
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn mentioned_in_finds_mention_on_non_authored_mr() {
|
fn mentioned_in_finds_mention_on_non_authored_mr() {
|
||||||
let conn = setup_test_db();
|
let conn = setup_test_db();
|
||||||
@@ -892,7 +995,7 @@ fn mentioned_in_finds_mention_on_non_authored_mr() {
|
|||||||
insert_note_at(&conn, 200, disc_id, 1, "bob", false, "cc @alice", t);
|
insert_note_at(&conn, 200, disc_id, 1, "bob", false, "cc @alice", t);
|
||||||
|
|
||||||
let recency_cutoff = now_ms() - 7 * 24 * 3600 * 1000;
|
let recency_cutoff = now_ms() - 7 * 24 * 3600 * 1000;
|
||||||
let results = query_mentioned_in(&conn, "alice", &[], recency_cutoff).unwrap();
|
let results = query_mentioned_in(&conn, "alice", &[], recency_cutoff, 0).unwrap();
|
||||||
assert_eq!(results.len(), 1);
|
assert_eq!(results.len(), 1);
|
||||||
assert_eq!(results[0].entity_type, "mr");
|
assert_eq!(results[0].entity_type, "mr");
|
||||||
assert_eq!(results[0].iid, 99);
|
assert_eq!(results[0].iid, 99);
|
||||||
@@ -909,7 +1012,7 @@ fn mentioned_in_excludes_authored_mr() {
|
|||||||
insert_note_at(&conn, 200, disc_id, 1, "bob", false, "@alice thoughts?", t);
|
insert_note_at(&conn, 200, disc_id, 1, "bob", false, "@alice thoughts?", t);
|
||||||
|
|
||||||
let recency_cutoff = now_ms() - 7 * 24 * 3600 * 1000;
|
let recency_cutoff = now_ms() - 7 * 24 * 3600 * 1000;
|
||||||
let results = query_mentioned_in(&conn, "alice", &[], recency_cutoff).unwrap();
|
let results = query_mentioned_in(&conn, "alice", &[], recency_cutoff, 0).unwrap();
|
||||||
assert!(results.is_empty(), "should exclude authored MRs");
|
assert!(results.is_empty(), "should exclude authored MRs");
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -925,7 +1028,7 @@ fn mentioned_in_excludes_reviewer_mr() {
|
|||||||
insert_note_at(&conn, 200, disc_id, 1, "charlie", false, "@alice fyi", t);
|
insert_note_at(&conn, 200, disc_id, 1, "charlie", false, "@alice fyi", t);
|
||||||
|
|
||||||
let recency_cutoff = now_ms() - 7 * 24 * 3600 * 1000;
|
let recency_cutoff = now_ms() - 7 * 24 * 3600 * 1000;
|
||||||
let results = query_mentioned_in(&conn, "alice", &[], recency_cutoff).unwrap();
|
let results = query_mentioned_in(&conn, "alice", &[], recency_cutoff, 0).unwrap();
|
||||||
assert!(
|
assert!(
|
||||||
results.is_empty(),
|
results.is_empty(),
|
||||||
"should exclude MRs where user is reviewer"
|
"should exclude MRs where user is reviewer"
|
||||||
@@ -949,7 +1052,7 @@ fn mentioned_in_includes_recently_closed_issue() {
|
|||||||
insert_note_at(&conn, 200, disc_id, 1, "bob", false, "hey @alice", t);
|
insert_note_at(&conn, 200, disc_id, 1, "bob", false, "hey @alice", t);
|
||||||
|
|
||||||
let recency_cutoff = now_ms() - 7 * 24 * 3600 * 1000;
|
let recency_cutoff = now_ms() - 7 * 24 * 3600 * 1000;
|
||||||
let results = query_mentioned_in(&conn, "alice", &[], recency_cutoff).unwrap();
|
let results = query_mentioned_in(&conn, "alice", &[], recency_cutoff, 0).unwrap();
|
||||||
assert_eq!(results.len(), 1, "recently closed issue should be included");
|
assert_eq!(results.len(), 1, "recently closed issue should be included");
|
||||||
assert_eq!(results[0].state, "closed");
|
assert_eq!(results[0].state, "closed");
|
||||||
}
|
}
|
||||||
@@ -971,7 +1074,7 @@ fn mentioned_in_excludes_old_closed_issue() {
|
|||||||
insert_note_at(&conn, 200, disc_id, 1, "bob", false, "hey @alice", t);
|
insert_note_at(&conn, 200, disc_id, 1, "bob", false, "hey @alice", t);
|
||||||
|
|
||||||
let recency_cutoff = now_ms() - 7 * 24 * 3600 * 1000;
|
let recency_cutoff = now_ms() - 7 * 24 * 3600 * 1000;
|
||||||
let results = query_mentioned_in(&conn, "alice", &[], recency_cutoff).unwrap();
|
let results = query_mentioned_in(&conn, "alice", &[], recency_cutoff, 0).unwrap();
|
||||||
assert!(results.is_empty(), "old closed issue should be excluded");
|
assert!(results.is_empty(), "old closed issue should be excluded");
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -996,7 +1099,7 @@ fn mentioned_in_attention_needs_attention_when_unreplied() {
|
|||||||
// alice has NOT replied
|
// alice has NOT replied
|
||||||
|
|
||||||
let recency_cutoff = now_ms() - 7 * 24 * 3600 * 1000;
|
let recency_cutoff = now_ms() - 7 * 24 * 3600 * 1000;
|
||||||
let results = query_mentioned_in(&conn, "alice", &[], recency_cutoff).unwrap();
|
let results = query_mentioned_in(&conn, "alice", &[], recency_cutoff, 0).unwrap();
|
||||||
assert_eq!(results.len(), 1);
|
assert_eq!(results.len(), 1);
|
||||||
assert_eq!(results[0].attention_state, AttentionState::NeedsAttention);
|
assert_eq!(results[0].attention_state, AttentionState::NeedsAttention);
|
||||||
}
|
}
|
||||||
@@ -1023,7 +1126,7 @@ fn mentioned_in_attention_awaiting_when_replied() {
|
|||||||
insert_note_at(&conn, 201, disc_id, 1, "alice", false, "looks good", t2);
|
insert_note_at(&conn, 201, disc_id, 1, "alice", false, "looks good", t2);
|
||||||
|
|
||||||
let recency_cutoff = now_ms() - 7 * 24 * 3600 * 1000;
|
let recency_cutoff = now_ms() - 7 * 24 * 3600 * 1000;
|
||||||
let results = query_mentioned_in(&conn, "alice", &[], recency_cutoff).unwrap();
|
let results = query_mentioned_in(&conn, "alice", &[], recency_cutoff, 0).unwrap();
|
||||||
assert_eq!(results.len(), 1);
|
assert_eq!(results.len(), 1);
|
||||||
assert_eq!(results[0].attention_state, AttentionState::AwaitingResponse);
|
assert_eq!(results[0].attention_state, AttentionState::AwaitingResponse);
|
||||||
}
|
}
|
||||||
@@ -1044,7 +1147,7 @@ fn mentioned_in_project_filter() {
|
|||||||
insert_note_at(&conn, 201, disc_b, 2, "bob", false, "@alice", t);
|
insert_note_at(&conn, 201, disc_b, 2, "bob", false, "@alice", t);
|
||||||
|
|
||||||
let recency_cutoff = now_ms() - 7 * 24 * 3600 * 1000;
|
let recency_cutoff = now_ms() - 7 * 24 * 3600 * 1000;
|
||||||
let results = query_mentioned_in(&conn, "alice", &[1], recency_cutoff).unwrap();
|
let results = query_mentioned_in(&conn, "alice", &[1], recency_cutoff, 0).unwrap();
|
||||||
assert_eq!(results.len(), 1);
|
assert_eq!(results.len(), 1);
|
||||||
assert_eq!(results[0].project_path, "group/repo-a");
|
assert_eq!(results[0].project_path, "group/repo-a");
|
||||||
}
|
}
|
||||||
@@ -1063,7 +1166,7 @@ fn mentioned_in_deduplicates_multiple_mentions_same_entity() {
|
|||||||
insert_note_at(&conn, 201, disc_id, 1, "charlie", false, "@alice +1", t2);
|
insert_note_at(&conn, 201, disc_id, 1, "charlie", false, "@alice +1", t2);
|
||||||
|
|
||||||
let recency_cutoff = now_ms() - 7 * 24 * 3600 * 1000;
|
let recency_cutoff = now_ms() - 7 * 24 * 3600 * 1000;
|
||||||
let results = query_mentioned_in(&conn, "alice", &[], recency_cutoff).unwrap();
|
let results = query_mentioned_in(&conn, "alice", &[], recency_cutoff, 0).unwrap();
|
||||||
assert_eq!(results.len(), 1, "should deduplicate to one entity");
|
assert_eq!(results.len(), 1, "should deduplicate to one entity");
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1087,12 +1190,70 @@ fn mentioned_in_rejects_false_positive_email() {
|
|||||||
);
|
);
|
||||||
|
|
||||||
let recency_cutoff = now_ms() - 7 * 24 * 3600 * 1000;
|
let recency_cutoff = now_ms() - 7 * 24 * 3600 * 1000;
|
||||||
let results = query_mentioned_in(&conn, "alice", &[], recency_cutoff).unwrap();
|
let results = query_mentioned_in(&conn, "alice", &[], recency_cutoff, 0).unwrap();
|
||||||
assert!(results.is_empty(), "email-like text should not match");
|
assert!(results.is_empty(), "email-like text should not match");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn mentioned_in_excludes_old_mention_on_open_issue() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
insert_project(&conn, 1, "group/repo");
|
||||||
|
insert_issue(&conn, 10, 1, 42, "someone");
|
||||||
|
let disc_id = 100;
|
||||||
|
insert_discussion(&conn, disc_id, 1, None, Some(10));
|
||||||
|
// Mention from 45 days ago — outside 30-day mention window
|
||||||
|
let t = now_ms() - 45 * 24 * 3600 * 1000;
|
||||||
|
insert_note_at(&conn, 200, disc_id, 1, "bob", false, "hey @alice", t);
|
||||||
|
|
||||||
|
let recency_cutoff = now_ms() - 7 * 24 * 3600 * 1000;
|
||||||
|
let mention_cutoff = now_ms() - 30 * 24 * 3600 * 1000;
|
||||||
|
let results = query_mentioned_in(&conn, "alice", &[], recency_cutoff, mention_cutoff).unwrap();
|
||||||
|
assert!(
|
||||||
|
results.is_empty(),
|
||||||
|
"mentions older than 30 days should be excluded"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn mentioned_in_includes_recent_mention_on_open_issue() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
insert_project(&conn, 1, "group/repo");
|
||||||
|
insert_issue(&conn, 10, 1, 42, "someone");
|
||||||
|
let disc_id = 100;
|
||||||
|
insert_discussion(&conn, disc_id, 1, None, Some(10));
|
||||||
|
// Mention from 5 days ago — within 30-day window
|
||||||
|
let t = now_ms() - 5 * 24 * 3600 * 1000;
|
||||||
|
insert_note_at(&conn, 200, disc_id, 1, "bob", false, "hey @alice", t);
|
||||||
|
|
||||||
|
let recency_cutoff = now_ms() - 7 * 24 * 3600 * 1000;
|
||||||
|
let mention_cutoff = now_ms() - 30 * 24 * 3600 * 1000;
|
||||||
|
let results = query_mentioned_in(&conn, "alice", &[], recency_cutoff, mention_cutoff).unwrap();
|
||||||
|
assert_eq!(results.len(), 1, "recent mentions should be included");
|
||||||
|
}
|
||||||
|
|
||||||
// ─── Helper Tests ──────────────────────────────────────────────────────────
|
// ─── Helper Tests ──────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn mentioned_in_sql_materializes_core_ctes() {
|
||||||
|
let sql = build_mentioned_in_sql("");
|
||||||
|
assert!(
|
||||||
|
sql.contains("candidate_issues AS MATERIALIZED"),
|
||||||
|
"candidate_issues should be materialized"
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
sql.contains("candidate_mrs AS MATERIALIZED"),
|
||||||
|
"candidate_mrs should be materialized"
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
sql.contains("note_ts_issue AS MATERIALIZED"),
|
||||||
|
"note_ts_issue should be materialized"
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
sql.contains("note_ts_mr AS MATERIALIZED"),
|
||||||
|
"note_ts_mr should be materialized"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_attention_state_all_variants() {
|
fn parse_attention_state_all_variants() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
|||||||
@@ -27,6 +27,8 @@ const DEFAULT_ACTIVITY_SINCE_DAYS: i64 = 1;
|
|||||||
const MS_PER_DAY: i64 = 24 * 60 * 60 * 1000;
|
const MS_PER_DAY: i64 = 24 * 60 * 60 * 1000;
|
||||||
/// Recency window for closed/merged items in the "Mentioned In" section: 7 days.
|
/// Recency window for closed/merged items in the "Mentioned In" section: 7 days.
|
||||||
const RECENCY_WINDOW_MS: i64 = 7 * MS_PER_DAY;
|
const RECENCY_WINDOW_MS: i64 = 7 * MS_PER_DAY;
|
||||||
|
/// Only show mentions from notes created within this window (30 days).
|
||||||
|
const MENTION_WINDOW_MS: i64 = 30 * MS_PER_DAY;
|
||||||
|
|
||||||
/// Resolve the effective username from CLI flag or config.
|
/// Resolve the effective username from CLI flag or config.
|
||||||
///
|
///
|
||||||
@@ -151,7 +153,14 @@ pub fn run_me(config: &Config, args: &MeArgs, robot_mode: bool) -> Result<()> {
|
|||||||
|
|
||||||
let mentioned_in = if want_mentions {
|
let mentioned_in = if want_mentions {
|
||||||
let recency_cutoff = crate::core::time::now_ms() - RECENCY_WINDOW_MS;
|
let recency_cutoff = crate::core::time::now_ms() - RECENCY_WINDOW_MS;
|
||||||
query_mentioned_in(&conn, username, &project_ids, recency_cutoff)?
|
let mention_cutoff = crate::core::time::now_ms() - MENTION_WINDOW_MS;
|
||||||
|
query_mentioned_in(
|
||||||
|
&conn,
|
||||||
|
username,
|
||||||
|
&project_ids,
|
||||||
|
recency_cutoff,
|
||||||
|
mention_cutoff,
|
||||||
|
)?
|
||||||
} else {
|
} else {
|
||||||
Vec::new()
|
Vec::new()
|
||||||
};
|
};
|
||||||
@@ -247,7 +256,7 @@ pub fn run_me(config: &Config, args: &MeArgs, robot_mode: bool) -> Result<()> {
|
|||||||
|
|
||||||
if robot_mode {
|
if robot_mode {
|
||||||
let fields = args.fields.as_deref();
|
let fields = args.fields.as_deref();
|
||||||
render_robot::print_me_json(&dashboard, elapsed_ms, fields)?;
|
render_robot::print_me_json(&dashboard, elapsed_ms, fields, &config.gitlab.base_url)?;
|
||||||
} else if show_all {
|
} else if show_all {
|
||||||
render_human::print_me_dashboard(&dashboard, single_project);
|
render_human::print_me_dashboard(&dashboard, single_project);
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -362,19 +362,18 @@ pub fn query_activity(
|
|||||||
let project_clause = build_project_clause_at("p.id", project_ids, 3);
|
let project_clause = build_project_clause_at("p.id", project_ids, 3);
|
||||||
|
|
||||||
// Build the "my items" subquery fragments for issue/MR association checks.
|
// Build the "my items" subquery fragments for issue/MR association checks.
|
||||||
// These ensure we only see activity on items CURRENTLY associated with the user
|
// These ensure we only see activity on items associated with the user,
|
||||||
// AND currently open (AC-3.6). Without the state filter, activity would include
|
// regardless of state (open, closed, or merged). Comments on merged MRs
|
||||||
// events on closed/merged items that don't appear in the dashboard lists.
|
// and closed issues are still relevant (follow-up discussions, post-merge
|
||||||
|
// questions, etc.).
|
||||||
let my_issue_check = "EXISTS (
|
let my_issue_check = "EXISTS (
|
||||||
SELECT 1 FROM issue_assignees ia
|
SELECT 1 FROM issue_assignees ia
|
||||||
JOIN issues i2 ON ia.issue_id = i2.id
|
WHERE ia.issue_id = {entity_issue_id} AND ia.username = ?1
|
||||||
WHERE ia.issue_id = {entity_issue_id} AND ia.username = ?1 AND i2.state = 'opened'
|
|
||||||
)";
|
)";
|
||||||
let my_mr_check = "(
|
let my_mr_check = "(
|
||||||
EXISTS (SELECT 1 FROM merge_requests mr2 WHERE mr2.id = {entity_mr_id} AND mr2.author_username = ?1 AND mr2.state = 'opened')
|
EXISTS (SELECT 1 FROM merge_requests mr2 WHERE mr2.id = {entity_mr_id} AND mr2.author_username = ?1)
|
||||||
OR EXISTS (SELECT 1 FROM mr_reviewers rv
|
OR EXISTS (SELECT 1 FROM mr_reviewers rv
|
||||||
JOIN merge_requests mr3 ON rv.merge_request_id = mr3.id
|
WHERE rv.merge_request_id = {entity_mr_id} AND rv.username = ?1)
|
||||||
WHERE rv.merge_request_id = {entity_mr_id} AND rv.username = ?1 AND mr3.state = 'opened')
|
|
||||||
)";
|
)";
|
||||||
|
|
||||||
// Source 1: Human comments on my items
|
// Source 1: Human comments on my items
|
||||||
@@ -574,7 +573,7 @@ struct RawSinceCheckRow {
|
|||||||
|
|
||||||
/// Query actionable events from others since `cursor_ms`.
|
/// Query actionable events from others since `cursor_ms`.
|
||||||
/// Returns events from three sources:
|
/// Returns events from three sources:
|
||||||
/// 1. Others' comments on my open items
|
/// 1. Others' comments on my items (any state)
|
||||||
/// 2. @mentions on any item (not restricted to my items)
|
/// 2. @mentions on any item (not restricted to my items)
|
||||||
/// 3. Assignment/review-request system notes mentioning me
|
/// 3. Assignment/review-request system notes mentioning me
|
||||||
pub fn query_since_last_check(
|
pub fn query_since_last_check(
|
||||||
@@ -583,19 +582,18 @@ pub fn query_since_last_check(
|
|||||||
cursor_ms: i64,
|
cursor_ms: i64,
|
||||||
) -> Result<Vec<SinceCheckGroup>> {
|
) -> Result<Vec<SinceCheckGroup>> {
|
||||||
// Build the "my items" subquery fragments (reused from activity).
|
// Build the "my items" subquery fragments (reused from activity).
|
||||||
|
// No state filter: comments on closed/merged items are still actionable.
|
||||||
let my_issue_check = "EXISTS (
|
let my_issue_check = "EXISTS (
|
||||||
SELECT 1 FROM issue_assignees ia
|
SELECT 1 FROM issue_assignees ia
|
||||||
JOIN issues i2 ON ia.issue_id = i2.id
|
WHERE ia.issue_id = {entity_issue_id} AND ia.username = ?1
|
||||||
WHERE ia.issue_id = {entity_issue_id} AND ia.username = ?1 AND i2.state = 'opened'
|
|
||||||
)";
|
)";
|
||||||
let my_mr_check = "(
|
let my_mr_check = "(
|
||||||
EXISTS (SELECT 1 FROM merge_requests mr2 WHERE mr2.id = {entity_mr_id} AND mr2.author_username = ?1 AND mr2.state = 'opened')
|
EXISTS (SELECT 1 FROM merge_requests mr2 WHERE mr2.id = {entity_mr_id} AND mr2.author_username = ?1)
|
||||||
OR EXISTS (SELECT 1 FROM mr_reviewers rv
|
OR EXISTS (SELECT 1 FROM mr_reviewers rv
|
||||||
JOIN merge_requests mr3 ON rv.merge_request_id = mr3.id
|
WHERE rv.merge_request_id = {entity_mr_id} AND rv.username = ?1)
|
||||||
WHERE rv.merge_request_id = {entity_mr_id} AND rv.username = ?1 AND mr3.state = 'opened')
|
|
||||||
)";
|
)";
|
||||||
|
|
||||||
// Source 1: Others' comments on my open items
|
// Source 1: Others' comments on my items (any state)
|
||||||
let source1 = format!(
|
let source1 = format!(
|
||||||
"SELECT n.created_at, 'note',
|
"SELECT n.created_at, 'note',
|
||||||
CASE WHEN d.issue_id IS NOT NULL THEN 'issue' ELSE 'mr' END,
|
CASE WHEN d.issue_id IS NOT NULL THEN 'issue' ELSE 'mr' END,
|
||||||
@@ -789,10 +787,94 @@ struct RawMentionRow {
|
|||||||
mention_body: String,
|
mention_body: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn build_mentioned_in_sql(project_clause: &str) -> String {
|
||||||
|
format!(
|
||||||
|
"WITH candidate_issues AS MATERIALIZED (
|
||||||
|
SELECT i.id, i.iid, i.title, p.path_with_namespace, i.state,
|
||||||
|
i.updated_at, i.web_url
|
||||||
|
FROM issues i
|
||||||
|
JOIN projects p ON i.project_id = p.id
|
||||||
|
WHERE (i.state = 'opened' OR (i.state = 'closed' AND i.updated_at > ?2))
|
||||||
|
AND (i.author_username IS NULL OR i.author_username != ?1)
|
||||||
|
AND NOT EXISTS (
|
||||||
|
SELECT 1 FROM issue_assignees ia
|
||||||
|
WHERE ia.issue_id = i.id AND ia.username = ?1
|
||||||
|
)
|
||||||
|
{project_clause}
|
||||||
|
),
|
||||||
|
candidate_mrs AS MATERIALIZED (
|
||||||
|
SELECT m.id, m.iid, m.title, p.path_with_namespace, m.state,
|
||||||
|
m.updated_at, m.web_url
|
||||||
|
FROM merge_requests m
|
||||||
|
JOIN projects p ON m.project_id = p.id
|
||||||
|
WHERE (m.state = 'opened'
|
||||||
|
OR (m.state IN ('merged', 'closed') AND m.updated_at > ?2))
|
||||||
|
AND m.author_username != ?1
|
||||||
|
AND NOT EXISTS (
|
||||||
|
SELECT 1 FROM mr_reviewers rv
|
||||||
|
WHERE rv.merge_request_id = m.id AND rv.username = ?1
|
||||||
|
)
|
||||||
|
{project_clause}
|
||||||
|
),
|
||||||
|
note_ts_issue AS MATERIALIZED (
|
||||||
|
SELECT d.issue_id,
|
||||||
|
MAX(CASE WHEN n.author_username = ?1 THEN n.created_at END) AS my_ts,
|
||||||
|
MAX(CASE WHEN n.author_username != ?1 THEN n.created_at END) AS others_ts,
|
||||||
|
MAX(n.created_at) AS any_ts
|
||||||
|
FROM notes n
|
||||||
|
JOIN discussions d ON n.discussion_id = d.id
|
||||||
|
JOIN candidate_issues ci ON ci.id = d.issue_id
|
||||||
|
WHERE n.is_system = 0
|
||||||
|
GROUP BY d.issue_id
|
||||||
|
),
|
||||||
|
note_ts_mr AS MATERIALIZED (
|
||||||
|
SELECT d.merge_request_id,
|
||||||
|
MAX(CASE WHEN n.author_username = ?1 THEN n.created_at END) AS my_ts,
|
||||||
|
MAX(CASE WHEN n.author_username != ?1 THEN n.created_at END) AS others_ts,
|
||||||
|
MAX(n.created_at) AS any_ts
|
||||||
|
FROM notes n
|
||||||
|
JOIN discussions d ON n.discussion_id = d.id
|
||||||
|
JOIN candidate_mrs cm ON cm.id = d.merge_request_id
|
||||||
|
WHERE n.is_system = 0
|
||||||
|
GROUP BY d.merge_request_id
|
||||||
|
)
|
||||||
|
-- Issue mentions (scoped to candidate entities only)
|
||||||
|
SELECT 'issue', ci.iid, ci.title, ci.path_with_namespace, ci.state,
|
||||||
|
ci.updated_at, ci.web_url,
|
||||||
|
nt.my_ts, nt.others_ts, nt.any_ts,
|
||||||
|
n.body
|
||||||
|
FROM notes n
|
||||||
|
JOIN discussions d ON n.discussion_id = d.id
|
||||||
|
JOIN candidate_issues ci ON ci.id = d.issue_id
|
||||||
|
LEFT JOIN note_ts_issue nt ON nt.issue_id = ci.id
|
||||||
|
WHERE n.is_system = 0
|
||||||
|
AND n.author_username != ?1
|
||||||
|
AND n.created_at > ?3
|
||||||
|
AND LOWER(n.body) LIKE '%@' || LOWER(?1) || '%'
|
||||||
|
UNION ALL
|
||||||
|
-- MR mentions (scoped to candidate entities only)
|
||||||
|
SELECT 'mr', cm.iid, cm.title, cm.path_with_namespace, cm.state,
|
||||||
|
cm.updated_at, cm.web_url,
|
||||||
|
nt.my_ts, nt.others_ts, nt.any_ts,
|
||||||
|
n.body
|
||||||
|
FROM notes n
|
||||||
|
JOIN discussions d ON n.discussion_id = d.id
|
||||||
|
JOIN candidate_mrs cm ON cm.id = d.merge_request_id
|
||||||
|
LEFT JOIN note_ts_mr nt ON nt.merge_request_id = cm.id
|
||||||
|
WHERE n.is_system = 0
|
||||||
|
AND n.author_username != ?1
|
||||||
|
AND n.created_at > ?3
|
||||||
|
AND LOWER(n.body) LIKE '%@' || LOWER(?1) || '%'
|
||||||
|
ORDER BY 6 DESC
|
||||||
|
LIMIT 500",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
/// Query issues and MRs where the user is @mentioned but not assigned/authored/reviewing.
|
/// Query issues and MRs where the user is @mentioned but not assigned/authored/reviewing.
|
||||||
///
|
///
|
||||||
/// Includes open items unconditionally, plus recently-closed/merged items
|
/// Includes open items unconditionally, plus recently-closed/merged items
|
||||||
/// (where `updated_at > recency_cutoff_ms`).
|
/// (where `updated_at > recency_cutoff_ms`). Only considers mentions in notes
|
||||||
|
/// created after `mention_cutoff_ms` (typically 30 days ago).
|
||||||
///
|
///
|
||||||
/// Returns deduplicated results sorted by attention priority then recency.
|
/// Returns deduplicated results sorted by attention priority then recency.
|
||||||
pub fn query_mentioned_in(
|
pub fn query_mentioned_in(
|
||||||
@@ -800,83 +882,16 @@ pub fn query_mentioned_in(
|
|||||||
username: &str,
|
username: &str,
|
||||||
project_ids: &[i64],
|
project_ids: &[i64],
|
||||||
recency_cutoff_ms: i64,
|
recency_cutoff_ms: i64,
|
||||||
|
mention_cutoff_ms: i64,
|
||||||
) -> Result<Vec<MeMention>> {
|
) -> Result<Vec<MeMention>> {
|
||||||
let project_clause = build_project_clause_at("p.id", project_ids, 3);
|
let project_clause = build_project_clause_at("p.id", project_ids, 4);
|
||||||
|
// Materialized CTEs avoid pathological query plans for project-scoped mentions.
|
||||||
// CTE: note timestamps per issue (for attention state computation)
|
let sql = build_mentioned_in_sql(&project_clause);
|
||||||
// CTE: note timestamps per MR
|
|
||||||
// Then UNION ALL of issue mentions + MR mentions
|
|
||||||
let sql = format!(
|
|
||||||
"WITH note_ts_issue AS (
|
|
||||||
SELECT d.issue_id,
|
|
||||||
MAX(CASE WHEN n.author_username = ?1 THEN n.created_at END) AS my_ts,
|
|
||||||
MAX(CASE WHEN n.author_username != ?1 THEN n.created_at END) AS others_ts,
|
|
||||||
MAX(n.created_at) AS any_ts
|
|
||||||
FROM notes n
|
|
||||||
JOIN discussions d ON n.discussion_id = d.id
|
|
||||||
WHERE n.is_system = 0 AND d.issue_id IS NOT NULL
|
|
||||||
GROUP BY d.issue_id
|
|
||||||
),
|
|
||||||
note_ts_mr AS (
|
|
||||||
SELECT d.merge_request_id,
|
|
||||||
MAX(CASE WHEN n.author_username = ?1 THEN n.created_at END) AS my_ts,
|
|
||||||
MAX(CASE WHEN n.author_username != ?1 THEN n.created_at END) AS others_ts,
|
|
||||||
MAX(n.created_at) AS any_ts
|
|
||||||
FROM notes n
|
|
||||||
JOIN discussions d ON n.discussion_id = d.id
|
|
||||||
WHERE n.is_system = 0 AND d.merge_request_id IS NOT NULL
|
|
||||||
GROUP BY d.merge_request_id
|
|
||||||
)
|
|
||||||
-- Issue mentions
|
|
||||||
SELECT 'issue', i.iid, i.title, p.path_with_namespace, i.state,
|
|
||||||
i.updated_at, i.web_url,
|
|
||||||
nt.my_ts, nt.others_ts, nt.any_ts,
|
|
||||||
n.body
|
|
||||||
FROM notes n
|
|
||||||
JOIN discussions d ON n.discussion_id = d.id
|
|
||||||
JOIN issues i ON d.issue_id = i.id
|
|
||||||
JOIN projects p ON i.project_id = p.id
|
|
||||||
LEFT JOIN note_ts_issue nt ON nt.issue_id = i.id
|
|
||||||
WHERE n.is_system = 0
|
|
||||||
AND n.author_username != ?1
|
|
||||||
AND d.issue_id IS NOT NULL
|
|
||||||
AND LOWER(n.body) LIKE '%@' || LOWER(?1) || '%'
|
|
||||||
AND NOT EXISTS (
|
|
||||||
SELECT 1 FROM issue_assignees ia
|
|
||||||
WHERE ia.issue_id = d.issue_id AND ia.username = ?1
|
|
||||||
)
|
|
||||||
AND (i.state = 'opened' OR (i.state = 'closed' AND i.updated_at > ?2))
|
|
||||||
{project_clause}
|
|
||||||
UNION ALL
|
|
||||||
-- MR mentions
|
|
||||||
SELECT 'mr', m.iid, m.title, p.path_with_namespace, m.state,
|
|
||||||
m.updated_at, m.web_url,
|
|
||||||
nt.my_ts, nt.others_ts, nt.any_ts,
|
|
||||||
n.body
|
|
||||||
FROM notes n
|
|
||||||
JOIN discussions d ON n.discussion_id = d.id
|
|
||||||
JOIN merge_requests m ON d.merge_request_id = m.id
|
|
||||||
JOIN projects p ON m.project_id = p.id
|
|
||||||
LEFT JOIN note_ts_mr nt ON nt.merge_request_id = m.id
|
|
||||||
WHERE n.is_system = 0
|
|
||||||
AND n.author_username != ?1
|
|
||||||
AND d.merge_request_id IS NOT NULL
|
|
||||||
AND LOWER(n.body) LIKE '%@' || LOWER(?1) || '%'
|
|
||||||
AND m.author_username != ?1
|
|
||||||
AND NOT EXISTS (
|
|
||||||
SELECT 1 FROM mr_reviewers rv
|
|
||||||
WHERE rv.merge_request_id = d.merge_request_id AND rv.username = ?1
|
|
||||||
)
|
|
||||||
AND (m.state = 'opened'
|
|
||||||
OR (m.state IN ('merged', 'closed') AND m.updated_at > ?2))
|
|
||||||
{project_clause}
|
|
||||||
ORDER BY 6 DESC
|
|
||||||
LIMIT 500",
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut params: Vec<Box<dyn rusqlite::types::ToSql>> = Vec::new();
|
let mut params: Vec<Box<dyn rusqlite::types::ToSql>> = Vec::new();
|
||||||
params.push(Box::new(username.to_string()));
|
params.push(Box::new(username.to_string()));
|
||||||
params.push(Box::new(recency_cutoff_ms));
|
params.push(Box::new(recency_cutoff_ms));
|
||||||
|
params.push(Box::new(mention_cutoff_ms));
|
||||||
for &pid in project_ids {
|
for &pid in project_ids {
|
||||||
params.push(Box::new(pid));
|
params.push(Box::new(pid));
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,11 +15,12 @@ pub fn print_me_json(
|
|||||||
dashboard: &MeDashboard,
|
dashboard: &MeDashboard,
|
||||||
elapsed_ms: u64,
|
elapsed_ms: u64,
|
||||||
fields: Option<&[String]>,
|
fields: Option<&[String]>,
|
||||||
|
gitlab_base_url: &str,
|
||||||
) -> crate::core::error::Result<()> {
|
) -> crate::core::error::Result<()> {
|
||||||
let envelope = MeJsonEnvelope {
|
let envelope = MeJsonEnvelope {
|
||||||
ok: true,
|
ok: true,
|
||||||
data: MeDataJson::from_dashboard(dashboard),
|
data: MeDataJson::from_dashboard(dashboard),
|
||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta::with_base_url(elapsed_ms, gitlab_base_url),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut value = serde_json::to_value(&envelope)
|
let mut value = serde_json::to_value(&envelope)
|
||||||
@@ -478,4 +479,107 @@ mod tests {
|
|||||||
assert_eq!(value["data"]["cursor_reset"], serde_json::json!(true));
|
assert_eq!(value["data"]["cursor_reset"], serde_json::json!(true));
|
||||||
assert_eq!(value["meta"]["elapsed_ms"], serde_json::json!(17));
|
assert_eq!(value["meta"]["elapsed_ms"], serde_json::json!(17));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Integration test: full envelope serialization includes gitlab_base_url in meta.
|
||||||
|
/// Guards against drift where the wiring from run_me -> print_me_json -> JSON
|
||||||
|
/// could silently lose the base URL field.
|
||||||
|
#[test]
|
||||||
|
fn me_envelope_includes_gitlab_base_url_in_meta() {
|
||||||
|
let dashboard = MeDashboard {
|
||||||
|
username: "testuser".to_string(),
|
||||||
|
since_ms: Some(1_700_000_000_000),
|
||||||
|
summary: MeSummary {
|
||||||
|
project_count: 1,
|
||||||
|
open_issue_count: 0,
|
||||||
|
authored_mr_count: 0,
|
||||||
|
reviewing_mr_count: 0,
|
||||||
|
mentioned_in_count: 0,
|
||||||
|
needs_attention_count: 0,
|
||||||
|
},
|
||||||
|
open_issues: vec![],
|
||||||
|
open_mrs_authored: vec![],
|
||||||
|
reviewing_mrs: vec![],
|
||||||
|
mentioned_in: vec![],
|
||||||
|
activity: vec![],
|
||||||
|
since_last_check: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let envelope = MeJsonEnvelope {
|
||||||
|
ok: true,
|
||||||
|
data: MeDataJson::from_dashboard(&dashboard),
|
||||||
|
meta: RobotMeta::with_base_url(42, "https://gitlab.example.com"),
|
||||||
|
};
|
||||||
|
|
||||||
|
let value = serde_json::to_value(&envelope).unwrap();
|
||||||
|
assert_eq!(value["ok"], serde_json::json!(true));
|
||||||
|
assert_eq!(value["meta"]["elapsed_ms"], serde_json::json!(42));
|
||||||
|
assert_eq!(
|
||||||
|
value["meta"]["gitlab_base_url"],
|
||||||
|
serde_json::json!("https://gitlab.example.com")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Verify activity events carry the fields needed for URL construction
|
||||||
|
/// (entity_type, entity_iid, project) so consumers can combine with
|
||||||
|
/// meta.gitlab_base_url to build links.
|
||||||
|
#[test]
|
||||||
|
fn activity_event_carries_url_construction_fields() {
|
||||||
|
let dashboard = MeDashboard {
|
||||||
|
username: "testuser".to_string(),
|
||||||
|
since_ms: Some(1_700_000_000_000),
|
||||||
|
summary: MeSummary {
|
||||||
|
project_count: 1,
|
||||||
|
open_issue_count: 0,
|
||||||
|
authored_mr_count: 0,
|
||||||
|
reviewing_mr_count: 0,
|
||||||
|
mentioned_in_count: 0,
|
||||||
|
needs_attention_count: 0,
|
||||||
|
},
|
||||||
|
open_issues: vec![],
|
||||||
|
open_mrs_authored: vec![],
|
||||||
|
reviewing_mrs: vec![],
|
||||||
|
mentioned_in: vec![],
|
||||||
|
activity: vec![MeActivityEvent {
|
||||||
|
timestamp: 1_700_000_000_000,
|
||||||
|
event_type: ActivityEventType::Note,
|
||||||
|
entity_type: "mr".to_string(),
|
||||||
|
entity_iid: 99,
|
||||||
|
project_path: "group/repo".to_string(),
|
||||||
|
actor: Some("alice".to_string()),
|
||||||
|
is_own: false,
|
||||||
|
summary: "Commented on MR".to_string(),
|
||||||
|
body_preview: None,
|
||||||
|
}],
|
||||||
|
since_last_check: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let envelope = MeJsonEnvelope {
|
||||||
|
ok: true,
|
||||||
|
data: MeDataJson::from_dashboard(&dashboard),
|
||||||
|
meta: RobotMeta::with_base_url(0, "https://gitlab.example.com"),
|
||||||
|
};
|
||||||
|
|
||||||
|
let value = serde_json::to_value(&envelope).unwrap();
|
||||||
|
let event = &value["data"]["activity"][0];
|
||||||
|
|
||||||
|
// These three fields + meta.gitlab_base_url = complete URL
|
||||||
|
assert_eq!(event["entity_type"], "mr");
|
||||||
|
assert_eq!(event["entity_iid"], 99);
|
||||||
|
assert_eq!(event["project"], "group/repo");
|
||||||
|
|
||||||
|
// Consumer constructs: https://gitlab.example.com/group/repo/-/merge_requests/99
|
||||||
|
let base = value["meta"]["gitlab_base_url"].as_str().unwrap();
|
||||||
|
let project = event["project"].as_str().unwrap();
|
||||||
|
let entity_path = match event["entity_type"].as_str().unwrap() {
|
||||||
|
"issue" => "issues",
|
||||||
|
"mr" => "merge_requests",
|
||||||
|
other => panic!("unexpected entity_type: {other}"),
|
||||||
|
};
|
||||||
|
let iid = event["entity_iid"].as_i64().unwrap();
|
||||||
|
let url = format!("{base}/{project}/-/{entity_path}/{iid}");
|
||||||
|
assert_eq!(
|
||||||
|
url,
|
||||||
|
"https://gitlab.example.com/group/repo/-/merge_requests/99"
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ pub mod cron;
|
|||||||
pub mod doctor;
|
pub mod doctor;
|
||||||
pub mod drift;
|
pub mod drift;
|
||||||
pub mod embed;
|
pub mod embed;
|
||||||
|
pub mod explain;
|
||||||
pub mod file_history;
|
pub mod file_history;
|
||||||
pub mod generate_docs;
|
pub mod generate_docs;
|
||||||
pub mod ingest;
|
pub mod ingest;
|
||||||
@@ -17,7 +18,6 @@ pub mod show;
|
|||||||
pub mod stats;
|
pub mod stats;
|
||||||
pub mod sync;
|
pub mod sync;
|
||||||
pub mod sync_status;
|
pub mod sync_status;
|
||||||
pub mod sync_surgical;
|
|
||||||
pub mod timeline;
|
pub mod timeline;
|
||||||
pub mod trace;
|
pub mod trace;
|
||||||
pub mod who;
|
pub mod who;
|
||||||
@@ -36,6 +36,7 @@ pub use cron::{
|
|||||||
pub use doctor::{DoctorChecks, print_doctor_results, run_doctor};
|
pub use doctor::{DoctorChecks, print_doctor_results, run_doctor};
|
||||||
pub use drift::{DriftResponse, print_drift_human, print_drift_json, run_drift};
|
pub use drift::{DriftResponse, print_drift_human, print_drift_json, run_drift};
|
||||||
pub use embed::{print_embed, print_embed_json, run_embed};
|
pub use embed::{print_embed, print_embed_json, run_embed};
|
||||||
|
pub use explain::{handle_explain, print_explain, print_explain_json, run_explain};
|
||||||
pub use file_history::{print_file_history, print_file_history_json, run_file_history};
|
pub use file_history::{print_file_history, print_file_history_json, run_file_history};
|
||||||
pub use generate_docs::{print_generate_docs, print_generate_docs_json, run_generate_docs};
|
pub use generate_docs::{print_generate_docs, print_generate_docs_json, run_generate_docs};
|
||||||
pub use ingest::{
|
pub use ingest::{
|
||||||
@@ -61,9 +62,8 @@ pub use show::{
|
|||||||
run_show_mr,
|
run_show_mr,
|
||||||
};
|
};
|
||||||
pub use stats::{print_stats, print_stats_json, run_stats};
|
pub use stats::{print_stats, print_stats_json, run_stats};
|
||||||
pub use sync::{SyncOptions, SyncResult, print_sync, print_sync_json, run_sync};
|
pub use sync::{SyncOptions, SyncResult, print_sync, print_sync_json, run_sync, run_sync_surgical};
|
||||||
pub use sync_status::{print_sync_status, print_sync_status_json, run_sync_status};
|
pub use sync_status::{print_sync_status, print_sync_status_json, run_sync_status};
|
||||||
pub use sync_surgical::run_sync_surgical;
|
|
||||||
pub use timeline::{TimelineParams, print_timeline, print_timeline_json_with_meta, run_timeline};
|
pub use timeline::{TimelineParams, print_timeline, print_timeline_json_with_meta, run_timeline};
|
||||||
pub use trace::{parse_trace_path, print_trace, print_trace_json};
|
pub use trace::{parse_trace_path, print_trace, print_trace_json};
|
||||||
pub use who::{WhoRun, print_who_human, print_who_json, run_who};
|
pub use who::{WhoRun, print_who_human, print_who_json, run_who};
|
||||||
|
|||||||
@@ -558,7 +558,7 @@ pub fn print_related_human(response: &RelatedResponse) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_related_json(response: &RelatedResponse, elapsed_ms: u64) {
|
pub fn print_related_json(response: &RelatedResponse, elapsed_ms: u64) {
|
||||||
let meta = RobotMeta { elapsed_ms };
|
let meta = RobotMeta::new(elapsed_ms);
|
||||||
let output = serde_json::json!({
|
let output = serde_json::json!({
|
||||||
"ok": true,
|
"ok": true,
|
||||||
"data": response,
|
"data": response,
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use crate::cli::render::Theme;
|
use crate::cli::render::{self, Theme};
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
|
|
||||||
use crate::Config;
|
use crate::Config;
|
||||||
@@ -20,11 +20,16 @@ use crate::search::{
|
|||||||
pub struct SearchResultDisplay {
|
pub struct SearchResultDisplay {
|
||||||
pub document_id: i64,
|
pub document_id: i64,
|
||||||
pub source_type: String,
|
pub source_type: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub source_entity_iid: Option<i64>,
|
||||||
pub title: String,
|
pub title: String,
|
||||||
pub url: Option<String>,
|
pub url: Option<String>,
|
||||||
pub author: Option<String>,
|
pub author: Option<String>,
|
||||||
pub created_at: Option<String>,
|
pub created_at: Option<String>,
|
||||||
pub updated_at: Option<String>,
|
pub updated_at: Option<String>,
|
||||||
|
/// Raw epoch ms for human rendering; not serialized to JSON.
|
||||||
|
#[serde(skip)]
|
||||||
|
pub updated_at_ms: Option<i64>,
|
||||||
pub project_path: String,
|
pub project_path: String,
|
||||||
pub labels: Vec<String>,
|
pub labels: Vec<String>,
|
||||||
pub paths: Vec<String>,
|
pub paths: Vec<String>,
|
||||||
@@ -216,11 +221,13 @@ pub async fn run_search(
|
|||||||
results.push(SearchResultDisplay {
|
results.push(SearchResultDisplay {
|
||||||
document_id: row.document_id,
|
document_id: row.document_id,
|
||||||
source_type: row.source_type.clone(),
|
source_type: row.source_type.clone(),
|
||||||
|
source_entity_iid: row.source_entity_iid,
|
||||||
title: row.title.clone().unwrap_or_default(),
|
title: row.title.clone().unwrap_or_default(),
|
||||||
url: row.url.clone(),
|
url: row.url.clone(),
|
||||||
author: row.author.clone(),
|
author: row.author.clone(),
|
||||||
created_at: row.created_at.map(ms_to_iso),
|
created_at: row.created_at.map(ms_to_iso),
|
||||||
updated_at: row.updated_at.map(ms_to_iso),
|
updated_at: row.updated_at.map(ms_to_iso),
|
||||||
|
updated_at_ms: row.updated_at,
|
||||||
project_path: row.project_path.clone(),
|
project_path: row.project_path.clone(),
|
||||||
labels: row.labels.clone(),
|
labels: row.labels.clone(),
|
||||||
paths: row.paths.clone(),
|
paths: row.paths.clone(),
|
||||||
@@ -242,6 +249,7 @@ pub async fn run_search(
|
|||||||
struct HydratedRow {
|
struct HydratedRow {
|
||||||
document_id: i64,
|
document_id: i64,
|
||||||
source_type: String,
|
source_type: String,
|
||||||
|
source_entity_iid: Option<i64>,
|
||||||
title: Option<String>,
|
title: Option<String>,
|
||||||
url: Option<String>,
|
url: Option<String>,
|
||||||
author: Option<String>,
|
author: Option<String>,
|
||||||
@@ -268,7 +276,26 @@ fn hydrate_results(conn: &rusqlite::Connection, document_ids: &[i64]) -> Result<
|
|||||||
(SELECT json_group_array(dl.label_name)
|
(SELECT json_group_array(dl.label_name)
|
||||||
FROM document_labels dl WHERE dl.document_id = d.id) AS labels_json,
|
FROM document_labels dl WHERE dl.document_id = d.id) AS labels_json,
|
||||||
(SELECT json_group_array(dp.path)
|
(SELECT json_group_array(dp.path)
|
||||||
FROM document_paths dp WHERE dp.document_id = d.id) AS paths_json
|
FROM document_paths dp WHERE dp.document_id = d.id) AS paths_json,
|
||||||
|
CASE d.source_type
|
||||||
|
WHEN 'issue' THEN
|
||||||
|
(SELECT i.iid FROM issues i WHERE i.id = d.source_id)
|
||||||
|
WHEN 'merge_request' THEN
|
||||||
|
(SELECT m.iid FROM merge_requests m WHERE m.id = d.source_id)
|
||||||
|
WHEN 'discussion' THEN
|
||||||
|
(SELECT COALESCE(
|
||||||
|
(SELECT i.iid FROM issues i WHERE i.id = disc.issue_id),
|
||||||
|
(SELECT m.iid FROM merge_requests m WHERE m.id = disc.merge_request_id)
|
||||||
|
) FROM discussions disc WHERE disc.id = d.source_id)
|
||||||
|
WHEN 'note' THEN
|
||||||
|
(SELECT COALESCE(
|
||||||
|
(SELECT i.iid FROM issues i WHERE i.id = disc.issue_id),
|
||||||
|
(SELECT m.iid FROM merge_requests m WHERE m.id = disc.merge_request_id)
|
||||||
|
) FROM notes n
|
||||||
|
JOIN discussions disc ON disc.id = n.discussion_id
|
||||||
|
WHERE n.id = d.source_id)
|
||||||
|
ELSE NULL
|
||||||
|
END AS source_entity_iid
|
||||||
FROM json_each(?1) AS j
|
FROM json_each(?1) AS j
|
||||||
JOIN documents d ON d.id = j.value
|
JOIN documents d ON d.id = j.value
|
||||||
JOIN projects p ON p.id = d.project_id
|
JOIN projects p ON p.id = d.project_id
|
||||||
@@ -293,6 +320,7 @@ fn hydrate_results(conn: &rusqlite::Connection, document_ids: &[i64]) -> Result<
|
|||||||
project_path: row.get(8)?,
|
project_path: row.get(8)?,
|
||||||
labels: parse_json_array(&labels_json),
|
labels: parse_json_array(&labels_json),
|
||||||
paths: parse_json_array(&paths_json),
|
paths: parse_json_array(&paths_json),
|
||||||
|
source_entity_iid: row.get(11)?,
|
||||||
})
|
})
|
||||||
})?
|
})?
|
||||||
.collect::<std::result::Result<Vec<_>, _>>()?;
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
@@ -309,6 +337,96 @@ fn parse_json_array(json: &str) -> Vec<String> {
|
|||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Collapse newlines and runs of whitespace in a snippet into single spaces.
|
||||||
|
///
|
||||||
|
/// Document `content_text` includes multi-line metadata (Project:, URL:, Labels:, etc.).
|
||||||
|
/// FTS5 snippet() preserves these newlines, causing unindented lines when rendered.
|
||||||
|
fn collapse_newlines(s: &str) -> String {
|
||||||
|
let mut result = String::with_capacity(s.len());
|
||||||
|
let mut prev_was_space = false;
|
||||||
|
for c in s.chars() {
|
||||||
|
if c.is_whitespace() {
|
||||||
|
if !prev_was_space {
|
||||||
|
result.push(' ');
|
||||||
|
prev_was_space = true;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
result.push(c);
|
||||||
|
prev_was_space = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Truncate a snippet to `max_visible` visible characters, respecting `<mark>` tag boundaries.
|
||||||
|
///
|
||||||
|
/// Counts only visible text (not tags) toward the limit, and ensures we never cut
|
||||||
|
/// inside a `<mark>...</mark>` pair (which would break `render_snippet` highlighting).
|
||||||
|
fn truncate_snippet(snippet: &str, max_visible: usize) -> String {
|
||||||
|
if max_visible < 4 {
|
||||||
|
return snippet.to_string();
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut visible_count = 0;
|
||||||
|
let mut result = String::new();
|
||||||
|
let mut remaining = snippet;
|
||||||
|
|
||||||
|
while !remaining.is_empty() {
|
||||||
|
if let Some(start) = remaining.find("<mark>") {
|
||||||
|
// Count visible chars before the tag
|
||||||
|
let before = &remaining[..start];
|
||||||
|
let before_len = before.chars().count();
|
||||||
|
if visible_count + before_len >= max_visible.saturating_sub(3) {
|
||||||
|
// Truncate within the pre-tag text
|
||||||
|
let take = max_visible.saturating_sub(3).saturating_sub(visible_count);
|
||||||
|
let truncated: String = before.chars().take(take).collect();
|
||||||
|
result.push_str(&truncated);
|
||||||
|
result.push_str("...");
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
result.push_str(before);
|
||||||
|
visible_count += before_len;
|
||||||
|
|
||||||
|
// Find matching </mark>
|
||||||
|
let after_open = &remaining[start + 6..];
|
||||||
|
if let Some(end) = after_open.find("</mark>") {
|
||||||
|
let highlighted = &after_open[..end];
|
||||||
|
let hl_len = highlighted.chars().count();
|
||||||
|
if visible_count + hl_len >= max_visible.saturating_sub(3) {
|
||||||
|
// Truncate within the highlighted text
|
||||||
|
let take = max_visible.saturating_sub(3).saturating_sub(visible_count);
|
||||||
|
let truncated: String = highlighted.chars().take(take).collect();
|
||||||
|
result.push_str("<mark>");
|
||||||
|
result.push_str(&truncated);
|
||||||
|
result.push_str("</mark>...");
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
result.push_str(&remaining[start..start + 6 + end + 7]);
|
||||||
|
visible_count += hl_len;
|
||||||
|
remaining = &after_open[end + 7..];
|
||||||
|
} else {
|
||||||
|
// Unclosed <mark> — treat rest as plain text
|
||||||
|
result.push_str(&remaining[start..]);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// No more tags — handle remaining plain text
|
||||||
|
let rest_len = remaining.chars().count();
|
||||||
|
if visible_count + rest_len > max_visible && max_visible > 3 {
|
||||||
|
let take = max_visible.saturating_sub(3).saturating_sub(visible_count);
|
||||||
|
let truncated: String = remaining.chars().take(take).collect();
|
||||||
|
result.push_str(&truncated);
|
||||||
|
result.push_str("...");
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
result.push_str(remaining);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
/// Render FTS snippet with `<mark>` tags as terminal highlight style.
|
/// Render FTS snippet with `<mark>` tags as terminal highlight style.
|
||||||
fn render_snippet(snippet: &str) -> String {
|
fn render_snippet(snippet: &str) -> String {
|
||||||
let mut result = String::new();
|
let mut result = String::new();
|
||||||
@@ -326,7 +444,7 @@ fn render_snippet(snippet: &str) -> String {
|
|||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_search_results(response: &SearchResponse) {
|
pub fn print_search_results(response: &SearchResponse, explain: bool) {
|
||||||
if !response.warnings.is_empty() {
|
if !response.warnings.is_empty() {
|
||||||
for w in &response.warnings {
|
for w in &response.warnings {
|
||||||
eprintln!("{} {}", Theme::warning().render("Warning:"), w);
|
eprintln!("{} {}", Theme::warning().render("Warning:"), w);
|
||||||
@@ -341,11 +459,13 @@ pub fn print_search_results(response: &SearchResponse) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Phase 6: section divider header
|
||||||
println!(
|
println!(
|
||||||
"\n {} results for '{}' {}",
|
"{}",
|
||||||
Theme::bold().render(&response.total_results.to_string()),
|
render::section_divider(&format!(
|
||||||
Theme::bold().render(&response.query),
|
"{} results for '{}' {}",
|
||||||
Theme::muted().render(&response.mode)
|
response.total_results, response.query, response.mode
|
||||||
|
))
|
||||||
);
|
);
|
||||||
|
|
||||||
for (i, result) in response.results.iter().enumerate() {
|
for (i, result) in response.results.iter().enumerate() {
|
||||||
@@ -359,52 +479,104 @@ pub fn print_search_results(response: &SearchResponse) {
|
|||||||
_ => Theme::muted().render(&format!("{:>5}", &result.source_type)),
|
_ => Theme::muted().render(&format!("{:>5}", &result.source_type)),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Title line: rank, type badge, title
|
// Phase 1: entity ref (e.g. #42 or !99)
|
||||||
println!(
|
let entity_ref = result
|
||||||
" {:>3}. {} {}",
|
.source_entity_iid
|
||||||
Theme::muted().render(&(i + 1).to_string()),
|
.map(|iid| match result.source_type.as_str() {
|
||||||
type_badge,
|
"issue" | "discussion" | "note" => Theme::issue_ref().render(&format!("#{iid}")),
|
||||||
Theme::bold().render(&result.title)
|
"merge_request" => Theme::mr_ref().render(&format!("!{iid}")),
|
||||||
);
|
_ => String::new(),
|
||||||
|
});
|
||||||
|
|
||||||
// Metadata: project, author, labels — compact middle-dot line
|
// Phase 3: relative time
|
||||||
|
let time_str = result
|
||||||
|
.updated_at_ms
|
||||||
|
.map(|ms| Theme::dim().render(&render::format_relative_time_compact(ms)));
|
||||||
|
|
||||||
|
// Phase 2: build prefix, compute indent from its visible width
|
||||||
|
let prefix = format!(" {:>3}. {} ", i + 1, type_badge);
|
||||||
|
let indent = " ".repeat(render::visible_width(&prefix));
|
||||||
|
|
||||||
|
// Title line: rank, type badge, entity ref, title, relative time
|
||||||
|
let mut title_line = prefix;
|
||||||
|
if let Some(ref eref) = entity_ref {
|
||||||
|
title_line.push_str(eref);
|
||||||
|
title_line.push_str(" ");
|
||||||
|
}
|
||||||
|
title_line.push_str(&Theme::bold().render(&result.title));
|
||||||
|
if let Some(ref time) = time_str {
|
||||||
|
title_line.push_str(" ");
|
||||||
|
title_line.push_str(time);
|
||||||
|
}
|
||||||
|
println!("{title_line}");
|
||||||
|
|
||||||
|
// Metadata: project, author — compact middle-dot line
|
||||||
let sep = Theme::muted().render(" \u{b7} ");
|
let sep = Theme::muted().render(" \u{b7} ");
|
||||||
let mut meta_parts: Vec<String> = Vec::new();
|
let mut meta_parts: Vec<String> = Vec::new();
|
||||||
meta_parts.push(Theme::muted().render(&result.project_path));
|
meta_parts.push(Theme::muted().render(&result.project_path));
|
||||||
if let Some(ref author) = result.author {
|
if let Some(ref author) = result.author {
|
||||||
meta_parts.push(Theme::username().render(&format!("@{author}")));
|
meta_parts.push(Theme::username().render(&format!("@{author}")));
|
||||||
}
|
}
|
||||||
if !result.labels.is_empty() {
|
println!("{indent}{}", meta_parts.join(&sep));
|
||||||
let label_str = if result.labels.len() <= 3 {
|
|
||||||
result.labels.join(", ")
|
|
||||||
} else {
|
|
||||||
format!(
|
|
||||||
"{} +{}",
|
|
||||||
result.labels[..2].join(", "),
|
|
||||||
result.labels.len() - 2
|
|
||||||
)
|
|
||||||
};
|
|
||||||
meta_parts.push(Theme::muted().render(&label_str));
|
|
||||||
}
|
|
||||||
println!(" {}", meta_parts.join(&sep));
|
|
||||||
|
|
||||||
// Snippet with highlight styling
|
// Phase 5: limit snippet to ~2 terminal lines.
|
||||||
let rendered = render_snippet(&result.snippet);
|
// First collapse newlines — content_text includes multi-line metadata
|
||||||
println!(" {rendered}");
|
// (Project:, URL:, Labels:, etc.) that would print at column 0.
|
||||||
|
let collapsed = collapse_newlines(&result.snippet);
|
||||||
|
// Truncate based on visible text length (excluding <mark></mark> tags)
|
||||||
|
// to avoid cutting inside a highlight tag pair.
|
||||||
|
let max_snippet_width =
|
||||||
|
render::terminal_width().saturating_sub(render::visible_width(&indent));
|
||||||
|
let max_snippet_chars = max_snippet_width.saturating_mul(2);
|
||||||
|
let snippet = truncate_snippet(&collapsed, max_snippet_chars);
|
||||||
|
let rendered = render_snippet(&snippet);
|
||||||
|
println!("{indent}{rendered}");
|
||||||
|
|
||||||
if let Some(ref explain) = result.explain {
|
if let Some(ref explain_data) = result.explain {
|
||||||
println!(
|
let mut explain_line = format!(
|
||||||
" {} vec={} fts={} rrf={:.4}",
|
"{indent}{} vec={} fts={} rrf={:.4}",
|
||||||
Theme::accent().render("explain"),
|
Theme::accent().render("explain"),
|
||||||
explain
|
explain_data
|
||||||
.vector_rank
|
.vector_rank
|
||||||
.map(|r| r.to_string())
|
.map(|r| r.to_string())
|
||||||
.unwrap_or_else(|| "-".into()),
|
.unwrap_or_else(|| "-".into()),
|
||||||
explain
|
explain_data
|
||||||
.fts_rank
|
.fts_rank
|
||||||
.map(|r| r.to_string())
|
.map(|r| r.to_string())
|
||||||
.unwrap_or_else(|| "-".into()),
|
.unwrap_or_else(|| "-".into()),
|
||||||
explain.rrf_score
|
explain_data.rrf_score
|
||||||
|
);
|
||||||
|
// Phase 5: labels shown only in explain mode
|
||||||
|
if explain && !result.labels.is_empty() {
|
||||||
|
let label_str = if result.labels.len() <= 3 {
|
||||||
|
result.labels.join(", ")
|
||||||
|
} else {
|
||||||
|
format!(
|
||||||
|
"{} +{}",
|
||||||
|
result.labels[..2].join(", "),
|
||||||
|
result.labels.len() - 2
|
||||||
|
)
|
||||||
|
};
|
||||||
|
explain_line.push_str(&format!(" {}", Theme::muted().render(&label_str)));
|
||||||
|
}
|
||||||
|
println!("{explain_line}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Phase 4: drill-down hint footer
|
||||||
|
if let Some(first) = response.results.first()
|
||||||
|
&& let Some(iid) = first.source_entity_iid
|
||||||
|
{
|
||||||
|
let cmd = match first.source_type.as_str() {
|
||||||
|
"issue" | "discussion" | "note" => Some(format!("lore issues {iid}")),
|
||||||
|
"merge_request" => Some(format!("lore mrs {iid}")),
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
if let Some(cmd) = cmd {
|
||||||
|
println!(
|
||||||
|
"\n {} {}",
|
||||||
|
Theme::dim().render("Tip:"),
|
||||||
|
Theme::dim().render(&format!("{cmd} for details"))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -434,7 +606,13 @@ pub fn print_search_results_json(
|
|||||||
data: response,
|
data: response,
|
||||||
meta: SearchMeta { elapsed_ms },
|
meta: SearchMeta { elapsed_ms },
|
||||||
};
|
};
|
||||||
let mut value = serde_json::to_value(&output).unwrap();
|
let mut value = match serde_json::to_value(&output) {
|
||||||
|
Ok(v) => v,
|
||||||
|
Err(e) => {
|
||||||
|
eprintln!("Error serializing search response: {e}");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
};
|
||||||
if let Some(f) = fields {
|
if let Some(f) = fields {
|
||||||
let expanded = crate::cli::robot::expand_fields_preset(f, "search");
|
let expanded = crate::cli::robot::expand_fields_preset(f, "search");
|
||||||
crate::cli::robot::filter_fields(&mut value, "results", &expanded);
|
crate::cli::robot::filter_fields(&mut value, "results", &expanded);
|
||||||
@@ -444,3 +622,89 @@ pub fn print_search_results_json(
|
|||||||
Err(e) => eprintln!("Error serializing to JSON: {e}"),
|
Err(e) => eprintln!("Error serializing to JSON: {e}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn truncate_snippet_short_text_unchanged() {
|
||||||
|
let s = "hello world";
|
||||||
|
assert_eq!(truncate_snippet(s, 100), "hello world");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn truncate_snippet_plain_text_truncated() {
|
||||||
|
let s = "this is a long string that exceeds the limit";
|
||||||
|
let result = truncate_snippet(s, 20);
|
||||||
|
assert!(result.ends_with("..."), "got: {result}");
|
||||||
|
// Visible chars should be <= 20
|
||||||
|
assert!(result.chars().count() <= 20, "got: {result}");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn truncate_snippet_preserves_mark_tags() {
|
||||||
|
let s = "some text <mark>keyword</mark> and more text here that is long";
|
||||||
|
let result = truncate_snippet(s, 30);
|
||||||
|
// Should not cut inside a <mark> pair
|
||||||
|
let open_count = result.matches("<mark>").count();
|
||||||
|
let close_count = result.matches("</mark>").count();
|
||||||
|
assert_eq!(open_count, close_count, "unbalanced tags in: {result}");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn truncate_snippet_cuts_before_mark_tag() {
|
||||||
|
let s = "a]very long prefix that exceeds the limit <mark>word</mark>";
|
||||||
|
let result = truncate_snippet(s, 15);
|
||||||
|
assert!(result.ends_with("..."), "got: {result}");
|
||||||
|
// The <mark> tag should not appear since we truncated before reaching it
|
||||||
|
assert!(
|
||||||
|
!result.contains("<mark>"),
|
||||||
|
"should not include tag: {result}"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn truncate_snippet_does_not_count_tags_as_visible() {
|
||||||
|
// With tags, raw length is 42 chars. Without tags, visible is 29.
|
||||||
|
let s = "prefix <mark>keyword</mark> suffix text";
|
||||||
|
// If max_visible = 35, the visible text (29 chars) fits — should NOT truncate
|
||||||
|
let result = truncate_snippet(s, 35);
|
||||||
|
assert_eq!(result, s, "should not truncate when visible text fits");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn truncate_snippet_small_limit_returns_as_is() {
|
||||||
|
let s = "text <mark>x</mark>";
|
||||||
|
// Very small limit should return as-is (guard clause)
|
||||||
|
assert_eq!(truncate_snippet(s, 3), s);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn collapse_newlines_flattens_multiline_metadata() {
|
||||||
|
let s = "[[Issue]] #4018: Remove math.js\nProject: vs/typescript-code\nURL: https://example.com\nLabels: []";
|
||||||
|
let result = collapse_newlines(s);
|
||||||
|
assert!(
|
||||||
|
!result.contains('\n'),
|
||||||
|
"should not contain newlines: {result}"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
result,
|
||||||
|
"[[Issue]] #4018: Remove math.js Project: vs/typescript-code URL: https://example.com Labels: []"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn collapse_newlines_preserves_mark_tags() {
|
||||||
|
let s = "first line\n<mark>keyword</mark>\nsecond line";
|
||||||
|
let result = collapse_newlines(s);
|
||||||
|
assert_eq!(result, "first line <mark>keyword</mark> second line");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn collapse_newlines_collapses_runs_of_whitespace() {
|
||||||
|
let s = "a \n\n b\t\tc";
|
||||||
|
let result = collapse_newlines(s);
|
||||||
|
assert_eq!(result, "a b c");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
312
src/cli/commands/show/issue.rs
Normal file
312
src/cli/commands/show/issue.rs
Normal file
@@ -0,0 +1,312 @@
|
|||||||
|
#[derive(Debug, Clone, Serialize)]
|
||||||
|
pub struct ClosingMrRef {
|
||||||
|
pub iid: i64,
|
||||||
|
pub title: String,
|
||||||
|
pub state: String,
|
||||||
|
pub web_url: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct IssueDetail {
|
||||||
|
pub id: i64,
|
||||||
|
pub iid: i64,
|
||||||
|
pub title: String,
|
||||||
|
pub description: Option<String>,
|
||||||
|
pub state: String,
|
||||||
|
pub author_username: String,
|
||||||
|
pub created_at: i64,
|
||||||
|
pub updated_at: i64,
|
||||||
|
pub closed_at: Option<String>,
|
||||||
|
pub confidential: bool,
|
||||||
|
pub web_url: Option<String>,
|
||||||
|
pub project_path: String,
|
||||||
|
pub references_full: String,
|
||||||
|
pub labels: Vec<String>,
|
||||||
|
pub assignees: Vec<String>,
|
||||||
|
pub due_date: Option<String>,
|
||||||
|
pub milestone: Option<String>,
|
||||||
|
pub user_notes_count: i64,
|
||||||
|
pub merge_requests_count: usize,
|
||||||
|
pub closing_merge_requests: Vec<ClosingMrRef>,
|
||||||
|
pub discussions: Vec<DiscussionDetail>,
|
||||||
|
pub status_name: Option<String>,
|
||||||
|
pub status_category: Option<String>,
|
||||||
|
pub status_color: Option<String>,
|
||||||
|
pub status_icon_name: Option<String>,
|
||||||
|
pub status_synced_at: Option<i64>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct DiscussionDetail {
|
||||||
|
pub notes: Vec<NoteDetail>,
|
||||||
|
pub individual_note: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct NoteDetail {
|
||||||
|
pub gitlab_id: i64,
|
||||||
|
pub author_username: String,
|
||||||
|
pub body: String,
|
||||||
|
pub created_at: i64,
|
||||||
|
pub is_system: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_show_issue(
|
||||||
|
config: &Config,
|
||||||
|
iid: i64,
|
||||||
|
project_filter: Option<&str>,
|
||||||
|
) -> Result<IssueDetail> {
|
||||||
|
let db_path = get_db_path(config.storage.db_path.as_deref());
|
||||||
|
let conn = create_connection(&db_path)?;
|
||||||
|
|
||||||
|
let issue = find_issue(&conn, iid, project_filter)?;
|
||||||
|
|
||||||
|
let labels = get_issue_labels(&conn, issue.id)?;
|
||||||
|
|
||||||
|
let assignees = get_issue_assignees(&conn, issue.id)?;
|
||||||
|
|
||||||
|
let closing_mrs = get_closing_mrs(&conn, issue.id)?;
|
||||||
|
|
||||||
|
let discussions = get_issue_discussions(&conn, issue.id)?;
|
||||||
|
|
||||||
|
let references_full = format!("{}#{}", issue.project_path, issue.iid);
|
||||||
|
let merge_requests_count = closing_mrs.len();
|
||||||
|
|
||||||
|
Ok(IssueDetail {
|
||||||
|
id: issue.id,
|
||||||
|
iid: issue.iid,
|
||||||
|
title: issue.title,
|
||||||
|
description: issue.description,
|
||||||
|
state: issue.state,
|
||||||
|
author_username: issue.author_username,
|
||||||
|
created_at: issue.created_at,
|
||||||
|
updated_at: issue.updated_at,
|
||||||
|
closed_at: issue.closed_at,
|
||||||
|
confidential: issue.confidential,
|
||||||
|
web_url: issue.web_url,
|
||||||
|
project_path: issue.project_path,
|
||||||
|
references_full,
|
||||||
|
labels,
|
||||||
|
assignees,
|
||||||
|
due_date: issue.due_date,
|
||||||
|
milestone: issue.milestone_title,
|
||||||
|
user_notes_count: issue.user_notes_count,
|
||||||
|
merge_requests_count,
|
||||||
|
closing_merge_requests: closing_mrs,
|
||||||
|
discussions,
|
||||||
|
status_name: issue.status_name,
|
||||||
|
status_category: issue.status_category,
|
||||||
|
status_color: issue.status_color,
|
||||||
|
status_icon_name: issue.status_icon_name,
|
||||||
|
status_synced_at: issue.status_synced_at,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct IssueRow {
|
||||||
|
id: i64,
|
||||||
|
iid: i64,
|
||||||
|
title: String,
|
||||||
|
description: Option<String>,
|
||||||
|
state: String,
|
||||||
|
author_username: String,
|
||||||
|
created_at: i64,
|
||||||
|
updated_at: i64,
|
||||||
|
closed_at: Option<String>,
|
||||||
|
confidential: bool,
|
||||||
|
web_url: Option<String>,
|
||||||
|
project_path: String,
|
||||||
|
due_date: Option<String>,
|
||||||
|
milestone_title: Option<String>,
|
||||||
|
user_notes_count: i64,
|
||||||
|
status_name: Option<String>,
|
||||||
|
status_category: Option<String>,
|
||||||
|
status_color: Option<String>,
|
||||||
|
status_icon_name: Option<String>,
|
||||||
|
status_synced_at: Option<i64>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn find_issue(conn: &Connection, iid: i64, project_filter: Option<&str>) -> Result<IssueRow> {
|
||||||
|
let (sql, params): (&str, Vec<Box<dyn rusqlite::ToSql>>) = match project_filter {
|
||||||
|
Some(project) => {
|
||||||
|
let project_id = resolve_project(conn, project)?;
|
||||||
|
(
|
||||||
|
"SELECT i.id, i.iid, i.title, i.description, i.state, i.author_username,
|
||||||
|
i.created_at, i.updated_at, i.closed_at, i.confidential,
|
||||||
|
i.web_url, p.path_with_namespace,
|
||||||
|
i.due_date, i.milestone_title,
|
||||||
|
(SELECT COUNT(*) FROM notes n
|
||||||
|
JOIN discussions d ON n.discussion_id = d.id
|
||||||
|
WHERE d.noteable_type = 'Issue' AND d.issue_id = i.id AND n.is_system = 0) AS user_notes_count,
|
||||||
|
i.status_name, i.status_category, i.status_color,
|
||||||
|
i.status_icon_name, i.status_synced_at
|
||||||
|
FROM issues i
|
||||||
|
JOIN projects p ON i.project_id = p.id
|
||||||
|
WHERE i.iid = ? AND i.project_id = ?",
|
||||||
|
vec![Box::new(iid), Box::new(project_id)],
|
||||||
|
)
|
||||||
|
}
|
||||||
|
None => (
|
||||||
|
"SELECT i.id, i.iid, i.title, i.description, i.state, i.author_username,
|
||||||
|
i.created_at, i.updated_at, i.closed_at, i.confidential,
|
||||||
|
i.web_url, p.path_with_namespace,
|
||||||
|
i.due_date, i.milestone_title,
|
||||||
|
(SELECT COUNT(*) FROM notes n
|
||||||
|
JOIN discussions d ON n.discussion_id = d.id
|
||||||
|
WHERE d.noteable_type = 'Issue' AND d.issue_id = i.id AND n.is_system = 0) AS user_notes_count,
|
||||||
|
i.status_name, i.status_category, i.status_color,
|
||||||
|
i.status_icon_name, i.status_synced_at
|
||||||
|
FROM issues i
|
||||||
|
JOIN projects p ON i.project_id = p.id
|
||||||
|
WHERE i.iid = ?",
|
||||||
|
vec![Box::new(iid)],
|
||||||
|
),
|
||||||
|
};
|
||||||
|
|
||||||
|
let param_refs: Vec<&dyn rusqlite::ToSql> = params.iter().map(|p| p.as_ref()).collect();
|
||||||
|
|
||||||
|
let mut stmt = conn.prepare(sql)?;
|
||||||
|
let issues: Vec<IssueRow> = stmt
|
||||||
|
.query_map(param_refs.as_slice(), |row| {
|
||||||
|
let confidential_val: i64 = row.get(9)?;
|
||||||
|
Ok(IssueRow {
|
||||||
|
id: row.get(0)?,
|
||||||
|
iid: row.get(1)?,
|
||||||
|
title: row.get(2)?,
|
||||||
|
description: row.get(3)?,
|
||||||
|
state: row.get(4)?,
|
||||||
|
author_username: row.get(5)?,
|
||||||
|
created_at: row.get(6)?,
|
||||||
|
updated_at: row.get(7)?,
|
||||||
|
closed_at: row.get(8)?,
|
||||||
|
confidential: confidential_val != 0,
|
||||||
|
web_url: row.get(10)?,
|
||||||
|
project_path: row.get(11)?,
|
||||||
|
due_date: row.get(12)?,
|
||||||
|
milestone_title: row.get(13)?,
|
||||||
|
user_notes_count: row.get(14)?,
|
||||||
|
status_name: row.get(15)?,
|
||||||
|
status_category: row.get(16)?,
|
||||||
|
status_color: row.get(17)?,
|
||||||
|
status_icon_name: row.get(18)?,
|
||||||
|
status_synced_at: row.get(19)?,
|
||||||
|
})
|
||||||
|
})?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
match issues.len() {
|
||||||
|
0 => Err(LoreError::NotFound(format!("Issue #{} not found", iid))),
|
||||||
|
1 => Ok(issues.into_iter().next().unwrap()),
|
||||||
|
_ => {
|
||||||
|
let projects: Vec<String> = issues.iter().map(|i| i.project_path.clone()).collect();
|
||||||
|
Err(LoreError::Ambiguous(format!(
|
||||||
|
"Issue #{} exists in multiple projects: {}. Use --project to specify.",
|
||||||
|
iid,
|
||||||
|
projects.join(", ")
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_issue_labels(conn: &Connection, issue_id: i64) -> Result<Vec<String>> {
|
||||||
|
let mut stmt = conn.prepare(
|
||||||
|
"SELECT l.name FROM labels l
|
||||||
|
JOIN issue_labels il ON l.id = il.label_id
|
||||||
|
WHERE il.issue_id = ?
|
||||||
|
ORDER BY l.name",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let labels: Vec<String> = stmt
|
||||||
|
.query_map([issue_id], |row| row.get(0))?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
Ok(labels)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_issue_assignees(conn: &Connection, issue_id: i64) -> Result<Vec<String>> {
|
||||||
|
let mut stmt = conn.prepare(
|
||||||
|
"SELECT username FROM issue_assignees
|
||||||
|
WHERE issue_id = ?
|
||||||
|
ORDER BY username",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let assignees: Vec<String> = stmt
|
||||||
|
.query_map([issue_id], |row| row.get(0))?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
Ok(assignees)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_closing_mrs(conn: &Connection, issue_id: i64) -> Result<Vec<ClosingMrRef>> {
|
||||||
|
let mut stmt = conn.prepare(
|
||||||
|
"SELECT mr.iid, mr.title, mr.state, mr.web_url
|
||||||
|
FROM entity_references er
|
||||||
|
JOIN merge_requests mr ON mr.id = er.source_entity_id
|
||||||
|
WHERE er.target_entity_type = 'issue'
|
||||||
|
AND er.target_entity_id = ?
|
||||||
|
AND er.source_entity_type = 'merge_request'
|
||||||
|
AND er.reference_type = 'closes'
|
||||||
|
ORDER BY mr.iid",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let mrs: Vec<ClosingMrRef> = stmt
|
||||||
|
.query_map([issue_id], |row| {
|
||||||
|
Ok(ClosingMrRef {
|
||||||
|
iid: row.get(0)?,
|
||||||
|
title: row.get(1)?,
|
||||||
|
state: row.get(2)?,
|
||||||
|
web_url: row.get(3)?,
|
||||||
|
})
|
||||||
|
})?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
Ok(mrs)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_issue_discussions(conn: &Connection, issue_id: i64) -> Result<Vec<DiscussionDetail>> {
|
||||||
|
let mut disc_stmt = conn.prepare(
|
||||||
|
"SELECT id, individual_note FROM discussions
|
||||||
|
WHERE issue_id = ?
|
||||||
|
ORDER BY first_note_at",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let disc_rows: Vec<(i64, bool)> = disc_stmt
|
||||||
|
.query_map([issue_id], |row| {
|
||||||
|
let individual: i64 = row.get(1)?;
|
||||||
|
Ok((row.get(0)?, individual == 1))
|
||||||
|
})?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
let mut note_stmt = conn.prepare(
|
||||||
|
"SELECT gitlab_id, author_username, body, created_at, is_system
|
||||||
|
FROM notes
|
||||||
|
WHERE discussion_id = ?
|
||||||
|
ORDER BY position",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let mut discussions = Vec::new();
|
||||||
|
for (disc_id, individual_note) in disc_rows {
|
||||||
|
let notes: Vec<NoteDetail> = note_stmt
|
||||||
|
.query_map([disc_id], |row| {
|
||||||
|
let is_system: i64 = row.get(4)?;
|
||||||
|
Ok(NoteDetail {
|
||||||
|
gitlab_id: row.get(0)?,
|
||||||
|
author_username: row.get(1)?,
|
||||||
|
body: row.get(2)?,
|
||||||
|
created_at: row.get(3)?,
|
||||||
|
is_system: is_system == 1,
|
||||||
|
})
|
||||||
|
})?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
let has_user_notes = notes.iter().any(|n| !n.is_system);
|
||||||
|
if has_user_notes || notes.is_empty() {
|
||||||
|
discussions.push(DiscussionDetail {
|
||||||
|
notes,
|
||||||
|
individual_note,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(discussions)
|
||||||
|
}
|
||||||
19
src/cli/commands/show/mod.rs
Normal file
19
src/cli/commands/show/mod.rs
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
use crate::cli::render::{self, Icons, Theme};
|
||||||
|
use rusqlite::Connection;
|
||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
|
use crate::Config;
|
||||||
|
use crate::cli::robot::RobotMeta;
|
||||||
|
use crate::core::db::create_connection;
|
||||||
|
use crate::core::error::{LoreError, Result};
|
||||||
|
use crate::core::paths::get_db_path;
|
||||||
|
use crate::core::project::resolve_project;
|
||||||
|
use crate::core::time::ms_to_iso;
|
||||||
|
|
||||||
|
include!("issue.rs");
|
||||||
|
include!("mr.rs");
|
||||||
|
include!("render.rs");
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
#[path = "show_tests.rs"]
|
||||||
|
mod tests;
|
||||||
285
src/cli/commands/show/mr.rs
Normal file
285
src/cli/commands/show/mr.rs
Normal file
@@ -0,0 +1,285 @@
|
|||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct MrDetail {
|
||||||
|
pub id: i64,
|
||||||
|
pub iid: i64,
|
||||||
|
pub title: String,
|
||||||
|
pub description: Option<String>,
|
||||||
|
pub state: String,
|
||||||
|
pub draft: bool,
|
||||||
|
pub author_username: String,
|
||||||
|
pub source_branch: String,
|
||||||
|
pub target_branch: String,
|
||||||
|
pub created_at: i64,
|
||||||
|
pub updated_at: i64,
|
||||||
|
pub merged_at: Option<i64>,
|
||||||
|
pub closed_at: Option<i64>,
|
||||||
|
pub web_url: Option<String>,
|
||||||
|
pub project_path: String,
|
||||||
|
pub labels: Vec<String>,
|
||||||
|
pub assignees: Vec<String>,
|
||||||
|
pub reviewers: Vec<String>,
|
||||||
|
pub discussions: Vec<MrDiscussionDetail>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct MrDiscussionDetail {
|
||||||
|
pub notes: Vec<MrNoteDetail>,
|
||||||
|
pub individual_note: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct MrNoteDetail {
|
||||||
|
pub gitlab_id: i64,
|
||||||
|
pub author_username: String,
|
||||||
|
pub body: String,
|
||||||
|
pub created_at: i64,
|
||||||
|
pub is_system: bool,
|
||||||
|
pub position: Option<DiffNotePosition>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize)]
|
||||||
|
pub struct DiffNotePosition {
|
||||||
|
pub old_path: Option<String>,
|
||||||
|
pub new_path: Option<String>,
|
||||||
|
pub old_line: Option<i64>,
|
||||||
|
pub new_line: Option<i64>,
|
||||||
|
pub position_type: Option<String>,
|
||||||
|
}
|
||||||
|
pub fn run_show_mr(config: &Config, iid: i64, project_filter: Option<&str>) -> Result<MrDetail> {
|
||||||
|
let db_path = get_db_path(config.storage.db_path.as_deref());
|
||||||
|
let conn = create_connection(&db_path)?;
|
||||||
|
|
||||||
|
let mr = find_mr(&conn, iid, project_filter)?;
|
||||||
|
|
||||||
|
let labels = get_mr_labels(&conn, mr.id)?;
|
||||||
|
|
||||||
|
let assignees = get_mr_assignees(&conn, mr.id)?;
|
||||||
|
|
||||||
|
let reviewers = get_mr_reviewers(&conn, mr.id)?;
|
||||||
|
|
||||||
|
let discussions = get_mr_discussions(&conn, mr.id)?;
|
||||||
|
|
||||||
|
Ok(MrDetail {
|
||||||
|
id: mr.id,
|
||||||
|
iid: mr.iid,
|
||||||
|
title: mr.title,
|
||||||
|
description: mr.description,
|
||||||
|
state: mr.state,
|
||||||
|
draft: mr.draft,
|
||||||
|
author_username: mr.author_username,
|
||||||
|
source_branch: mr.source_branch,
|
||||||
|
target_branch: mr.target_branch,
|
||||||
|
created_at: mr.created_at,
|
||||||
|
updated_at: mr.updated_at,
|
||||||
|
merged_at: mr.merged_at,
|
||||||
|
closed_at: mr.closed_at,
|
||||||
|
web_url: mr.web_url,
|
||||||
|
project_path: mr.project_path,
|
||||||
|
labels,
|
||||||
|
assignees,
|
||||||
|
reviewers,
|
||||||
|
discussions,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
struct MrRow {
|
||||||
|
id: i64,
|
||||||
|
iid: i64,
|
||||||
|
title: String,
|
||||||
|
description: Option<String>,
|
||||||
|
state: String,
|
||||||
|
draft: bool,
|
||||||
|
author_username: String,
|
||||||
|
source_branch: String,
|
||||||
|
target_branch: String,
|
||||||
|
created_at: i64,
|
||||||
|
updated_at: i64,
|
||||||
|
merged_at: Option<i64>,
|
||||||
|
closed_at: Option<i64>,
|
||||||
|
web_url: Option<String>,
|
||||||
|
project_path: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn find_mr(conn: &Connection, iid: i64, project_filter: Option<&str>) -> Result<MrRow> {
|
||||||
|
let (sql, params): (&str, Vec<Box<dyn rusqlite::ToSql>>) = match project_filter {
|
||||||
|
Some(project) => {
|
||||||
|
let project_id = resolve_project(conn, project)?;
|
||||||
|
(
|
||||||
|
"SELECT m.id, m.iid, m.title, m.description, m.state, m.draft,
|
||||||
|
m.author_username, m.source_branch, m.target_branch,
|
||||||
|
m.created_at, m.updated_at, m.merged_at, m.closed_at,
|
||||||
|
m.web_url, p.path_with_namespace
|
||||||
|
FROM merge_requests m
|
||||||
|
JOIN projects p ON m.project_id = p.id
|
||||||
|
WHERE m.iid = ? AND m.project_id = ?",
|
||||||
|
vec![Box::new(iid), Box::new(project_id)],
|
||||||
|
)
|
||||||
|
}
|
||||||
|
None => (
|
||||||
|
"SELECT m.id, m.iid, m.title, m.description, m.state, m.draft,
|
||||||
|
m.author_username, m.source_branch, m.target_branch,
|
||||||
|
m.created_at, m.updated_at, m.merged_at, m.closed_at,
|
||||||
|
m.web_url, p.path_with_namespace
|
||||||
|
FROM merge_requests m
|
||||||
|
JOIN projects p ON m.project_id = p.id
|
||||||
|
WHERE m.iid = ?",
|
||||||
|
vec![Box::new(iid)],
|
||||||
|
),
|
||||||
|
};
|
||||||
|
|
||||||
|
let param_refs: Vec<&dyn rusqlite::ToSql> = params.iter().map(|p| p.as_ref()).collect();
|
||||||
|
|
||||||
|
let mut stmt = conn.prepare(sql)?;
|
||||||
|
let mrs: Vec<MrRow> = stmt
|
||||||
|
.query_map(param_refs.as_slice(), |row| {
|
||||||
|
let draft_val: i64 = row.get(5)?;
|
||||||
|
Ok(MrRow {
|
||||||
|
id: row.get(0)?,
|
||||||
|
iid: row.get(1)?,
|
||||||
|
title: row.get(2)?,
|
||||||
|
description: row.get(3)?,
|
||||||
|
state: row.get(4)?,
|
||||||
|
draft: draft_val == 1,
|
||||||
|
author_username: row.get(6)?,
|
||||||
|
source_branch: row.get(7)?,
|
||||||
|
target_branch: row.get(8)?,
|
||||||
|
created_at: row.get(9)?,
|
||||||
|
updated_at: row.get(10)?,
|
||||||
|
merged_at: row.get(11)?,
|
||||||
|
closed_at: row.get(12)?,
|
||||||
|
web_url: row.get(13)?,
|
||||||
|
project_path: row.get(14)?,
|
||||||
|
})
|
||||||
|
})?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
match mrs.len() {
|
||||||
|
0 => Err(LoreError::NotFound(format!("MR !{} not found", iid))),
|
||||||
|
1 => Ok(mrs.into_iter().next().unwrap()),
|
||||||
|
_ => {
|
||||||
|
let projects: Vec<String> = mrs.iter().map(|m| m.project_path.clone()).collect();
|
||||||
|
Err(LoreError::Ambiguous(format!(
|
||||||
|
"MR !{} exists in multiple projects: {}. Use --project to specify.",
|
||||||
|
iid,
|
||||||
|
projects.join(", ")
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_mr_labels(conn: &Connection, mr_id: i64) -> Result<Vec<String>> {
|
||||||
|
let mut stmt = conn.prepare(
|
||||||
|
"SELECT l.name FROM labels l
|
||||||
|
JOIN mr_labels ml ON l.id = ml.label_id
|
||||||
|
WHERE ml.merge_request_id = ?
|
||||||
|
ORDER BY l.name",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let labels: Vec<String> = stmt
|
||||||
|
.query_map([mr_id], |row| row.get(0))?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
Ok(labels)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_mr_assignees(conn: &Connection, mr_id: i64) -> Result<Vec<String>> {
|
||||||
|
let mut stmt = conn.prepare(
|
||||||
|
"SELECT username FROM mr_assignees
|
||||||
|
WHERE merge_request_id = ?
|
||||||
|
ORDER BY username",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let assignees: Vec<String> = stmt
|
||||||
|
.query_map([mr_id], |row| row.get(0))?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
Ok(assignees)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_mr_reviewers(conn: &Connection, mr_id: i64) -> Result<Vec<String>> {
|
||||||
|
let mut stmt = conn.prepare(
|
||||||
|
"SELECT username FROM mr_reviewers
|
||||||
|
WHERE merge_request_id = ?
|
||||||
|
ORDER BY username",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let reviewers: Vec<String> = stmt
|
||||||
|
.query_map([mr_id], |row| row.get(0))?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
Ok(reviewers)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_mr_discussions(conn: &Connection, mr_id: i64) -> Result<Vec<MrDiscussionDetail>> {
|
||||||
|
let mut disc_stmt = conn.prepare(
|
||||||
|
"SELECT id, individual_note FROM discussions
|
||||||
|
WHERE merge_request_id = ?
|
||||||
|
ORDER BY first_note_at",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let disc_rows: Vec<(i64, bool)> = disc_stmt
|
||||||
|
.query_map([mr_id], |row| {
|
||||||
|
let individual: i64 = row.get(1)?;
|
||||||
|
Ok((row.get(0)?, individual == 1))
|
||||||
|
})?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
let mut note_stmt = conn.prepare(
|
||||||
|
"SELECT gitlab_id, author_username, body, created_at, is_system,
|
||||||
|
position_old_path, position_new_path, position_old_line,
|
||||||
|
position_new_line, position_type
|
||||||
|
FROM notes
|
||||||
|
WHERE discussion_id = ?
|
||||||
|
ORDER BY position",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let mut discussions = Vec::new();
|
||||||
|
for (disc_id, individual_note) in disc_rows {
|
||||||
|
let notes: Vec<MrNoteDetail> = note_stmt
|
||||||
|
.query_map([disc_id], |row| {
|
||||||
|
let is_system: i64 = row.get(4)?;
|
||||||
|
let old_path: Option<String> = row.get(5)?;
|
||||||
|
let new_path: Option<String> = row.get(6)?;
|
||||||
|
let old_line: Option<i64> = row.get(7)?;
|
||||||
|
let new_line: Option<i64> = row.get(8)?;
|
||||||
|
let position_type: Option<String> = row.get(9)?;
|
||||||
|
|
||||||
|
let position = if old_path.is_some()
|
||||||
|
|| new_path.is_some()
|
||||||
|
|| old_line.is_some()
|
||||||
|
|| new_line.is_some()
|
||||||
|
{
|
||||||
|
Some(DiffNotePosition {
|
||||||
|
old_path,
|
||||||
|
new_path,
|
||||||
|
old_line,
|
||||||
|
new_line,
|
||||||
|
position_type,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(MrNoteDetail {
|
||||||
|
gitlab_id: row.get(0)?,
|
||||||
|
author_username: row.get(1)?,
|
||||||
|
body: row.get(2)?,
|
||||||
|
created_at: row.get(3)?,
|
||||||
|
is_system: is_system == 1,
|
||||||
|
position,
|
||||||
|
})
|
||||||
|
})?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
let has_user_notes = notes.iter().any(|n| !n.is_system);
|
||||||
|
if has_user_notes || notes.is_empty() {
|
||||||
|
discussions.push(MrDiscussionDetail {
|
||||||
|
notes,
|
||||||
|
individual_note,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(discussions)
|
||||||
|
}
|
||||||
|
|
||||||
584
src/cli/commands/show/render.rs
Normal file
584
src/cli/commands/show/render.rs
Normal file
@@ -0,0 +1,584 @@
|
|||||||
|
fn format_date(ms: i64) -> String {
|
||||||
|
render::format_date(ms)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn wrap_text(text: &str, width: usize, indent: &str) -> String {
|
||||||
|
render::wrap_indent(text, width, indent)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_show_issue(issue: &IssueDetail) {
|
||||||
|
// Title line
|
||||||
|
println!(
|
||||||
|
" Issue #{}: {}",
|
||||||
|
issue.iid,
|
||||||
|
Theme::bold().render(&issue.title),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Details section
|
||||||
|
println!("{}", render::section_divider("Details"));
|
||||||
|
|
||||||
|
println!(
|
||||||
|
" Ref {}",
|
||||||
|
Theme::muted().render(&issue.references_full)
|
||||||
|
);
|
||||||
|
println!(
|
||||||
|
" Project {}",
|
||||||
|
Theme::info().render(&issue.project_path)
|
||||||
|
);
|
||||||
|
|
||||||
|
let (icon, state_style) = if issue.state == "opened" {
|
||||||
|
(Icons::issue_opened(), Theme::success())
|
||||||
|
} else {
|
||||||
|
(Icons::issue_closed(), Theme::dim())
|
||||||
|
};
|
||||||
|
println!(
|
||||||
|
" State {}",
|
||||||
|
state_style.render(&format!("{icon} {}", issue.state))
|
||||||
|
);
|
||||||
|
|
||||||
|
if let Some(status) = &issue.status_name {
|
||||||
|
println!(
|
||||||
|
" Status {}",
|
||||||
|
render::style_with_hex(status, issue.status_color.as_deref())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if issue.confidential {
|
||||||
|
println!(" {}", Theme::error().bold().render("CONFIDENTIAL"));
|
||||||
|
}
|
||||||
|
|
||||||
|
println!(" Author @{}", issue.author_username);
|
||||||
|
|
||||||
|
if !issue.assignees.is_empty() {
|
||||||
|
let label = if issue.assignees.len() > 1 {
|
||||||
|
"Assignees"
|
||||||
|
} else {
|
||||||
|
"Assignee"
|
||||||
|
};
|
||||||
|
println!(
|
||||||
|
" {}{} {}",
|
||||||
|
label,
|
||||||
|
" ".repeat(12 - label.len()),
|
||||||
|
issue
|
||||||
|
.assignees
|
||||||
|
.iter()
|
||||||
|
.map(|a| format!("@{a}"))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(", ")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
println!(
|
||||||
|
" Created {} ({})",
|
||||||
|
format_date(issue.created_at),
|
||||||
|
render::format_relative_time_compact(issue.created_at),
|
||||||
|
);
|
||||||
|
println!(
|
||||||
|
" Updated {} ({})",
|
||||||
|
format_date(issue.updated_at),
|
||||||
|
render::format_relative_time_compact(issue.updated_at),
|
||||||
|
);
|
||||||
|
|
||||||
|
if let Some(closed_at) = &issue.closed_at {
|
||||||
|
println!(" Closed {closed_at}");
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(due) = &issue.due_date {
|
||||||
|
println!(" Due {due}");
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(ms) = &issue.milestone {
|
||||||
|
println!(" Milestone {ms}");
|
||||||
|
}
|
||||||
|
|
||||||
|
if !issue.labels.is_empty() {
|
||||||
|
println!(
|
||||||
|
" Labels {}",
|
||||||
|
render::format_labels_bare(&issue.labels, issue.labels.len())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(url) = &issue.web_url {
|
||||||
|
println!(" URL {}", Theme::muted().render(url));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Development section
|
||||||
|
if !issue.closing_merge_requests.is_empty() {
|
||||||
|
println!("{}", render::section_divider("Development"));
|
||||||
|
for mr in &issue.closing_merge_requests {
|
||||||
|
let (mr_icon, mr_style) = match mr.state.as_str() {
|
||||||
|
"merged" => (Icons::mr_merged(), Theme::accent()),
|
||||||
|
"opened" => (Icons::mr_opened(), Theme::success()),
|
||||||
|
"closed" => (Icons::mr_closed(), Theme::error()),
|
||||||
|
_ => (Icons::mr_opened(), Theme::dim()),
|
||||||
|
};
|
||||||
|
println!(
|
||||||
|
" {} !{} {} {}",
|
||||||
|
mr_style.render(mr_icon),
|
||||||
|
mr.iid,
|
||||||
|
mr.title,
|
||||||
|
mr_style.render(&mr.state),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Description section
|
||||||
|
println!("{}", render::section_divider("Description"));
|
||||||
|
if let Some(desc) = &issue.description {
|
||||||
|
let wrapped = wrap_text(desc, 72, " ");
|
||||||
|
println!(" {wrapped}");
|
||||||
|
} else {
|
||||||
|
println!(" {}", Theme::muted().render("(no description)"));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Discussions section
|
||||||
|
let user_discussions: Vec<&DiscussionDetail> = issue
|
||||||
|
.discussions
|
||||||
|
.iter()
|
||||||
|
.filter(|d| d.notes.iter().any(|n| !n.is_system))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
if user_discussions.is_empty() {
|
||||||
|
println!("\n {}", Theme::muted().render("No discussions"));
|
||||||
|
} else {
|
||||||
|
println!(
|
||||||
|
"{}",
|
||||||
|
render::section_divider(&format!("Discussions ({})", user_discussions.len()))
|
||||||
|
);
|
||||||
|
|
||||||
|
for discussion in user_discussions {
|
||||||
|
let user_notes: Vec<&NoteDetail> =
|
||||||
|
discussion.notes.iter().filter(|n| !n.is_system).collect();
|
||||||
|
|
||||||
|
if let Some(first_note) = user_notes.first() {
|
||||||
|
println!(
|
||||||
|
" {} {}",
|
||||||
|
Theme::info().render(&format!("@{}", first_note.author_username)),
|
||||||
|
format_date(first_note.created_at),
|
||||||
|
);
|
||||||
|
let wrapped = wrap_text(&first_note.body, 68, " ");
|
||||||
|
println!(" {wrapped}");
|
||||||
|
println!();
|
||||||
|
|
||||||
|
for reply in user_notes.iter().skip(1) {
|
||||||
|
println!(
|
||||||
|
" {} {}",
|
||||||
|
Theme::info().render(&format!("@{}", reply.author_username)),
|
||||||
|
format_date(reply.created_at),
|
||||||
|
);
|
||||||
|
let wrapped = wrap_text(&reply.body, 66, " ");
|
||||||
|
println!(" {wrapped}");
|
||||||
|
println!();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_show_mr(mr: &MrDetail) {
|
||||||
|
// Title line
|
||||||
|
let draft_prefix = if mr.draft {
|
||||||
|
format!("{} ", Icons::mr_draft())
|
||||||
|
} else {
|
||||||
|
String::new()
|
||||||
|
};
|
||||||
|
println!(
|
||||||
|
" MR !{}: {}{}",
|
||||||
|
mr.iid,
|
||||||
|
draft_prefix,
|
||||||
|
Theme::bold().render(&mr.title),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Details section
|
||||||
|
println!("{}", render::section_divider("Details"));
|
||||||
|
|
||||||
|
println!(" Project {}", Theme::info().render(&mr.project_path));
|
||||||
|
|
||||||
|
let (icon, state_style) = match mr.state.as_str() {
|
||||||
|
"opened" => (Icons::mr_opened(), Theme::success()),
|
||||||
|
"merged" => (Icons::mr_merged(), Theme::accent()),
|
||||||
|
"closed" => (Icons::mr_closed(), Theme::error()),
|
||||||
|
_ => (Icons::mr_opened(), Theme::dim()),
|
||||||
|
};
|
||||||
|
println!(
|
||||||
|
" State {}",
|
||||||
|
state_style.render(&format!("{icon} {}", mr.state))
|
||||||
|
);
|
||||||
|
|
||||||
|
println!(
|
||||||
|
" Branches {} -> {}",
|
||||||
|
Theme::info().render(&mr.source_branch),
|
||||||
|
Theme::warning().render(&mr.target_branch)
|
||||||
|
);
|
||||||
|
|
||||||
|
println!(" Author @{}", mr.author_username);
|
||||||
|
|
||||||
|
if !mr.assignees.is_empty() {
|
||||||
|
println!(
|
||||||
|
" Assignees {}",
|
||||||
|
mr.assignees
|
||||||
|
.iter()
|
||||||
|
.map(|a| format!("@{a}"))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(", ")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if !mr.reviewers.is_empty() {
|
||||||
|
println!(
|
||||||
|
" Reviewers {}",
|
||||||
|
mr.reviewers
|
||||||
|
.iter()
|
||||||
|
.map(|r| format!("@{r}"))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(", ")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
println!(
|
||||||
|
" Created {} ({})",
|
||||||
|
format_date(mr.created_at),
|
||||||
|
render::format_relative_time_compact(mr.created_at),
|
||||||
|
);
|
||||||
|
println!(
|
||||||
|
" Updated {} ({})",
|
||||||
|
format_date(mr.updated_at),
|
||||||
|
render::format_relative_time_compact(mr.updated_at),
|
||||||
|
);
|
||||||
|
|
||||||
|
if let Some(merged_at) = mr.merged_at {
|
||||||
|
println!(
|
||||||
|
" Merged {} ({})",
|
||||||
|
format_date(merged_at),
|
||||||
|
render::format_relative_time_compact(merged_at),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(closed_at) = mr.closed_at {
|
||||||
|
println!(
|
||||||
|
" Closed {} ({})",
|
||||||
|
format_date(closed_at),
|
||||||
|
render::format_relative_time_compact(closed_at),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if !mr.labels.is_empty() {
|
||||||
|
println!(
|
||||||
|
" Labels {}",
|
||||||
|
render::format_labels_bare(&mr.labels, mr.labels.len())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(url) = &mr.web_url {
|
||||||
|
println!(" URL {}", Theme::muted().render(url));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Description section
|
||||||
|
println!("{}", render::section_divider("Description"));
|
||||||
|
if let Some(desc) = &mr.description {
|
||||||
|
let wrapped = wrap_text(desc, 72, " ");
|
||||||
|
println!(" {wrapped}");
|
||||||
|
} else {
|
||||||
|
println!(" {}", Theme::muted().render("(no description)"));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Discussions section
|
||||||
|
let user_discussions: Vec<&MrDiscussionDetail> = mr
|
||||||
|
.discussions
|
||||||
|
.iter()
|
||||||
|
.filter(|d| d.notes.iter().any(|n| !n.is_system))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
if user_discussions.is_empty() {
|
||||||
|
println!("\n {}", Theme::muted().render("No discussions"));
|
||||||
|
} else {
|
||||||
|
println!(
|
||||||
|
"{}",
|
||||||
|
render::section_divider(&format!("Discussions ({})", user_discussions.len()))
|
||||||
|
);
|
||||||
|
|
||||||
|
for discussion in user_discussions {
|
||||||
|
let user_notes: Vec<&MrNoteDetail> =
|
||||||
|
discussion.notes.iter().filter(|n| !n.is_system).collect();
|
||||||
|
|
||||||
|
if let Some(first_note) = user_notes.first() {
|
||||||
|
if let Some(pos) = &first_note.position {
|
||||||
|
print_diff_position(pos);
|
||||||
|
}
|
||||||
|
|
||||||
|
println!(
|
||||||
|
" {} {}",
|
||||||
|
Theme::info().render(&format!("@{}", first_note.author_username)),
|
||||||
|
format_date(first_note.created_at),
|
||||||
|
);
|
||||||
|
let wrapped = wrap_text(&first_note.body, 68, " ");
|
||||||
|
println!(" {wrapped}");
|
||||||
|
println!();
|
||||||
|
|
||||||
|
for reply in user_notes.iter().skip(1) {
|
||||||
|
println!(
|
||||||
|
" {} {}",
|
||||||
|
Theme::info().render(&format!("@{}", reply.author_username)),
|
||||||
|
format_date(reply.created_at),
|
||||||
|
);
|
||||||
|
let wrapped = wrap_text(&reply.body, 66, " ");
|
||||||
|
println!(" {wrapped}");
|
||||||
|
println!();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn print_diff_position(pos: &DiffNotePosition) {
|
||||||
|
let file = pos.new_path.as_ref().or(pos.old_path.as_ref());
|
||||||
|
|
||||||
|
if let Some(file_path) = file {
|
||||||
|
let line_str = match (pos.old_line, pos.new_line) {
|
||||||
|
(Some(old), Some(new)) if old == new => format!(":{}", new),
|
||||||
|
(Some(old), Some(new)) => format!(":{}→{}", old, new),
|
||||||
|
(None, Some(new)) => format!(":+{}", new),
|
||||||
|
(Some(old), None) => format!(":-{}", old),
|
||||||
|
(None, None) => String::new(),
|
||||||
|
};
|
||||||
|
|
||||||
|
println!(
|
||||||
|
" {} {}{}",
|
||||||
|
Theme::dim().render("\u{1f4cd}"),
|
||||||
|
Theme::warning().render(file_path),
|
||||||
|
Theme::dim().render(&line_str)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct IssueDetailJson {
|
||||||
|
pub id: i64,
|
||||||
|
pub iid: i64,
|
||||||
|
pub title: String,
|
||||||
|
pub description: Option<String>,
|
||||||
|
pub state: String,
|
||||||
|
pub author_username: String,
|
||||||
|
pub created_at: String,
|
||||||
|
pub updated_at: String,
|
||||||
|
pub closed_at: Option<String>,
|
||||||
|
pub confidential: bool,
|
||||||
|
pub web_url: Option<String>,
|
||||||
|
pub project_path: String,
|
||||||
|
pub references_full: String,
|
||||||
|
pub labels: Vec<String>,
|
||||||
|
pub assignees: Vec<String>,
|
||||||
|
pub due_date: Option<String>,
|
||||||
|
pub milestone: Option<String>,
|
||||||
|
pub user_notes_count: i64,
|
||||||
|
pub merge_requests_count: usize,
|
||||||
|
pub closing_merge_requests: Vec<ClosingMrRefJson>,
|
||||||
|
pub discussions: Vec<DiscussionDetailJson>,
|
||||||
|
pub status_name: Option<String>,
|
||||||
|
#[serde(skip_serializing)]
|
||||||
|
pub status_category: Option<String>,
|
||||||
|
pub status_color: Option<String>,
|
||||||
|
pub status_icon_name: Option<String>,
|
||||||
|
pub status_synced_at: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct ClosingMrRefJson {
|
||||||
|
pub iid: i64,
|
||||||
|
pub title: String,
|
||||||
|
pub state: String,
|
||||||
|
pub web_url: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct DiscussionDetailJson {
|
||||||
|
pub notes: Vec<NoteDetailJson>,
|
||||||
|
pub individual_note: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct NoteDetailJson {
|
||||||
|
pub gitlab_id: i64,
|
||||||
|
pub author_username: String,
|
||||||
|
pub body: String,
|
||||||
|
pub created_at: String,
|
||||||
|
pub is_system: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&IssueDetail> for IssueDetailJson {
|
||||||
|
fn from(issue: &IssueDetail) -> Self {
|
||||||
|
Self {
|
||||||
|
id: issue.id,
|
||||||
|
iid: issue.iid,
|
||||||
|
title: issue.title.clone(),
|
||||||
|
description: issue.description.clone(),
|
||||||
|
state: issue.state.clone(),
|
||||||
|
author_username: issue.author_username.clone(),
|
||||||
|
created_at: ms_to_iso(issue.created_at),
|
||||||
|
updated_at: ms_to_iso(issue.updated_at),
|
||||||
|
closed_at: issue.closed_at.clone(),
|
||||||
|
confidential: issue.confidential,
|
||||||
|
web_url: issue.web_url.clone(),
|
||||||
|
project_path: issue.project_path.clone(),
|
||||||
|
references_full: issue.references_full.clone(),
|
||||||
|
labels: issue.labels.clone(),
|
||||||
|
assignees: issue.assignees.clone(),
|
||||||
|
due_date: issue.due_date.clone(),
|
||||||
|
milestone: issue.milestone.clone(),
|
||||||
|
user_notes_count: issue.user_notes_count,
|
||||||
|
merge_requests_count: issue.merge_requests_count,
|
||||||
|
closing_merge_requests: issue
|
||||||
|
.closing_merge_requests
|
||||||
|
.iter()
|
||||||
|
.map(|mr| ClosingMrRefJson {
|
||||||
|
iid: mr.iid,
|
||||||
|
title: mr.title.clone(),
|
||||||
|
state: mr.state.clone(),
|
||||||
|
web_url: mr.web_url.clone(),
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
discussions: issue.discussions.iter().map(|d| d.into()).collect(),
|
||||||
|
status_name: issue.status_name.clone(),
|
||||||
|
status_category: issue.status_category.clone(),
|
||||||
|
status_color: issue.status_color.clone(),
|
||||||
|
status_icon_name: issue.status_icon_name.clone(),
|
||||||
|
status_synced_at: issue.status_synced_at.map(ms_to_iso),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&DiscussionDetail> for DiscussionDetailJson {
|
||||||
|
fn from(disc: &DiscussionDetail) -> Self {
|
||||||
|
Self {
|
||||||
|
notes: disc.notes.iter().map(|n| n.into()).collect(),
|
||||||
|
individual_note: disc.individual_note,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&NoteDetail> for NoteDetailJson {
|
||||||
|
fn from(note: &NoteDetail) -> Self {
|
||||||
|
Self {
|
||||||
|
gitlab_id: note.gitlab_id,
|
||||||
|
author_username: note.author_username.clone(),
|
||||||
|
body: note.body.clone(),
|
||||||
|
created_at: ms_to_iso(note.created_at),
|
||||||
|
is_system: note.is_system,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct MrDetailJson {
|
||||||
|
pub id: i64,
|
||||||
|
pub iid: i64,
|
||||||
|
pub title: String,
|
||||||
|
pub description: Option<String>,
|
||||||
|
pub state: String,
|
||||||
|
pub draft: bool,
|
||||||
|
pub author_username: String,
|
||||||
|
pub source_branch: String,
|
||||||
|
pub target_branch: String,
|
||||||
|
pub created_at: String,
|
||||||
|
pub updated_at: String,
|
||||||
|
pub merged_at: Option<String>,
|
||||||
|
pub closed_at: Option<String>,
|
||||||
|
pub web_url: Option<String>,
|
||||||
|
pub project_path: String,
|
||||||
|
pub labels: Vec<String>,
|
||||||
|
pub assignees: Vec<String>,
|
||||||
|
pub reviewers: Vec<String>,
|
||||||
|
pub discussions: Vec<MrDiscussionDetailJson>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct MrDiscussionDetailJson {
|
||||||
|
pub notes: Vec<MrNoteDetailJson>,
|
||||||
|
pub individual_note: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct MrNoteDetailJson {
|
||||||
|
pub gitlab_id: i64,
|
||||||
|
pub author_username: String,
|
||||||
|
pub body: String,
|
||||||
|
pub created_at: String,
|
||||||
|
pub is_system: bool,
|
||||||
|
pub position: Option<DiffNotePosition>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&MrDetail> for MrDetailJson {
|
||||||
|
fn from(mr: &MrDetail) -> Self {
|
||||||
|
Self {
|
||||||
|
id: mr.id,
|
||||||
|
iid: mr.iid,
|
||||||
|
title: mr.title.clone(),
|
||||||
|
description: mr.description.clone(),
|
||||||
|
state: mr.state.clone(),
|
||||||
|
draft: mr.draft,
|
||||||
|
author_username: mr.author_username.clone(),
|
||||||
|
source_branch: mr.source_branch.clone(),
|
||||||
|
target_branch: mr.target_branch.clone(),
|
||||||
|
created_at: ms_to_iso(mr.created_at),
|
||||||
|
updated_at: ms_to_iso(mr.updated_at),
|
||||||
|
merged_at: mr.merged_at.map(ms_to_iso),
|
||||||
|
closed_at: mr.closed_at.map(ms_to_iso),
|
||||||
|
web_url: mr.web_url.clone(),
|
||||||
|
project_path: mr.project_path.clone(),
|
||||||
|
labels: mr.labels.clone(),
|
||||||
|
assignees: mr.assignees.clone(),
|
||||||
|
reviewers: mr.reviewers.clone(),
|
||||||
|
discussions: mr.discussions.iter().map(|d| d.into()).collect(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&MrDiscussionDetail> for MrDiscussionDetailJson {
|
||||||
|
fn from(disc: &MrDiscussionDetail) -> Self {
|
||||||
|
Self {
|
||||||
|
notes: disc.notes.iter().map(|n| n.into()).collect(),
|
||||||
|
individual_note: disc.individual_note,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&MrNoteDetail> for MrNoteDetailJson {
|
||||||
|
fn from(note: &MrNoteDetail) -> Self {
|
||||||
|
Self {
|
||||||
|
gitlab_id: note.gitlab_id,
|
||||||
|
author_username: note.author_username.clone(),
|
||||||
|
body: note.body.clone(),
|
||||||
|
created_at: ms_to_iso(note.created_at),
|
||||||
|
is_system: note.is_system,
|
||||||
|
position: note.position.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_show_issue_json(issue: &IssueDetail, elapsed_ms: u64) {
|
||||||
|
let json_result = IssueDetailJson::from(issue);
|
||||||
|
let meta = RobotMeta::new(elapsed_ms);
|
||||||
|
let output = serde_json::json!({
|
||||||
|
"ok": true,
|
||||||
|
"data": json_result,
|
||||||
|
"meta": meta,
|
||||||
|
});
|
||||||
|
match serde_json::to_string(&output) {
|
||||||
|
Ok(json) => println!("{json}"),
|
||||||
|
Err(e) => eprintln!("Error serializing to JSON: {e}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_show_mr_json(mr: &MrDetail, elapsed_ms: u64) {
|
||||||
|
let json_result = MrDetailJson::from(mr);
|
||||||
|
let meta = RobotMeta::new(elapsed_ms);
|
||||||
|
let output = serde_json::json!({
|
||||||
|
"ok": true,
|
||||||
|
"data": json_result,
|
||||||
|
"meta": meta,
|
||||||
|
});
|
||||||
|
match serde_json::to_string(&output) {
|
||||||
|
Ok(json) => println!("{json}"),
|
||||||
|
Err(e) => eprintln!("Error serializing to JSON: {e}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
353
src/cli/commands/show/show_tests.rs
Normal file
353
src/cli/commands/show/show_tests.rs
Normal file
@@ -0,0 +1,353 @@
|
|||||||
|
use super::*;
|
||||||
|
use crate::core::db::run_migrations;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
fn setup_test_db() -> Connection {
|
||||||
|
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||||
|
run_migrations(&conn).unwrap();
|
||||||
|
conn
|
||||||
|
}
|
||||||
|
|
||||||
|
fn seed_project(conn: &Connection) {
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace, web_url, created_at, updated_at)
|
||||||
|
VALUES (1, 100, 'group/repo', 'https://gitlab.example.com', 1000, 2000)",
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn seed_issue(conn: &Connection) {
|
||||||
|
seed_project(conn);
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO issues (id, gitlab_id, iid, project_id, title, state, author_username,
|
||||||
|
created_at, updated_at, last_seen_at)
|
||||||
|
VALUES (1, 200, 10, 1, 'Test issue', 'opened', 'author', 1000, 2000, 2000)",
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn seed_second_project(conn: &Connection) {
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace, web_url, created_at, updated_at)
|
||||||
|
VALUES (2, 101, 'other/repo', 'https://gitlab.example.com/other', 1000, 2000)",
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn seed_discussion_with_notes(
|
||||||
|
conn: &Connection,
|
||||||
|
issue_id: i64,
|
||||||
|
project_id: i64,
|
||||||
|
user_notes: usize,
|
||||||
|
system_notes: usize,
|
||||||
|
) {
|
||||||
|
let disc_id: i64 = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT COALESCE(MAX(id), 0) + 1 FROM discussions",
|
||||||
|
[],
|
||||||
|
|r| r.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO discussions (id, gitlab_discussion_id, project_id, issue_id, noteable_type, first_note_at, last_note_at, last_seen_at)
|
||||||
|
VALUES (?1, ?2, ?3, ?4, 'Issue', 1000, 2000, 2000)",
|
||||||
|
rusqlite::params![disc_id, format!("disc-{}", disc_id), project_id, issue_id],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
for i in 0..user_notes {
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO notes (gitlab_id, discussion_id, project_id, author_username, body, created_at, updated_at, last_seen_at, is_system, position)
|
||||||
|
VALUES (?1, ?2, ?3, 'user1', 'comment', 1000, 2000, 2000, 0, ?4)",
|
||||||
|
rusqlite::params![1000 + disc_id * 100 + i as i64, disc_id, project_id, i as i64],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
for i in 0..system_notes {
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO notes (gitlab_id, discussion_id, project_id, author_username, body, created_at, updated_at, last_seen_at, is_system, position)
|
||||||
|
VALUES (?1, ?2, ?3, 'system', 'status changed', 1000, 2000, 2000, 1, ?4)",
|
||||||
|
rusqlite::params![2000 + disc_id * 100 + i as i64, disc_id, project_id, (user_notes + i) as i64],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- find_issue tests ---
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_find_issue_basic() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
seed_issue(&conn);
|
||||||
|
let row = find_issue(&conn, 10, None).unwrap();
|
||||||
|
assert_eq!(row.iid, 10);
|
||||||
|
assert_eq!(row.title, "Test issue");
|
||||||
|
assert_eq!(row.state, "opened");
|
||||||
|
assert_eq!(row.author_username, "author");
|
||||||
|
assert_eq!(row.project_path, "group/repo");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_find_issue_with_project_filter() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
seed_issue(&conn);
|
||||||
|
let row = find_issue(&conn, 10, Some("group/repo")).unwrap();
|
||||||
|
assert_eq!(row.iid, 10);
|
||||||
|
assert_eq!(row.project_path, "group/repo");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_find_issue_not_found() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
seed_issue(&conn);
|
||||||
|
let err = find_issue(&conn, 999, None).unwrap_err();
|
||||||
|
assert!(matches!(err, LoreError::NotFound(_)));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_find_issue_wrong_project_filter() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
seed_issue(&conn);
|
||||||
|
seed_second_project(&conn);
|
||||||
|
// Issue 10 only exists in project 1, not project 2
|
||||||
|
let err = find_issue(&conn, 10, Some("other/repo")).unwrap_err();
|
||||||
|
assert!(matches!(err, LoreError::NotFound(_)));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_find_issue_ambiguous_without_project() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
seed_issue(&conn); // issue iid=10 in project 1
|
||||||
|
seed_second_project(&conn);
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO issues (id, gitlab_id, iid, project_id, title, state, author_username,
|
||||||
|
created_at, updated_at, last_seen_at)
|
||||||
|
VALUES (2, 201, 10, 2, 'Same iid different project', 'opened', 'author', 1000, 2000, 2000)",
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
let err = find_issue(&conn, 10, None).unwrap_err();
|
||||||
|
assert!(matches!(err, LoreError::Ambiguous(_)));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_find_issue_ambiguous_resolved_with_project() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
seed_issue(&conn);
|
||||||
|
seed_second_project(&conn);
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO issues (id, gitlab_id, iid, project_id, title, state, author_username,
|
||||||
|
created_at, updated_at, last_seen_at)
|
||||||
|
VALUES (2, 201, 10, 2, 'Same iid different project', 'opened', 'author', 1000, 2000, 2000)",
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
let row = find_issue(&conn, 10, Some("other/repo")).unwrap();
|
||||||
|
assert_eq!(row.title, "Same iid different project");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_find_issue_user_notes_count_zero() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
seed_issue(&conn);
|
||||||
|
let row = find_issue(&conn, 10, None).unwrap();
|
||||||
|
assert_eq!(row.user_notes_count, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_find_issue_user_notes_count_excludes_system() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
seed_issue(&conn);
|
||||||
|
// 2 user notes + 3 system notes = should count only 2
|
||||||
|
seed_discussion_with_notes(&conn, 1, 1, 2, 3);
|
||||||
|
let row = find_issue(&conn, 10, None).unwrap();
|
||||||
|
assert_eq!(row.user_notes_count, 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_find_issue_user_notes_count_across_discussions() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
seed_issue(&conn);
|
||||||
|
seed_discussion_with_notes(&conn, 1, 1, 3, 0); // 3 user notes
|
||||||
|
seed_discussion_with_notes(&conn, 1, 1, 1, 2); // 1 user note + 2 system
|
||||||
|
let row = find_issue(&conn, 10, None).unwrap();
|
||||||
|
assert_eq!(row.user_notes_count, 4);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_find_issue_notes_count_ignores_other_issues() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
seed_issue(&conn);
|
||||||
|
// Add a second issue
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO issues (id, gitlab_id, iid, project_id, title, state, author_username,
|
||||||
|
created_at, updated_at, last_seen_at)
|
||||||
|
VALUES (2, 201, 20, 1, 'Other issue', 'opened', 'author', 1000, 2000, 2000)",
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
// Notes on issue 2, not issue 1
|
||||||
|
seed_discussion_with_notes(&conn, 2, 1, 5, 0);
|
||||||
|
let row = find_issue(&conn, 10, None).unwrap();
|
||||||
|
assert_eq!(row.user_notes_count, 0); // Issue 10 has no notes
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_ansi256_from_rgb() {
|
||||||
|
// Moved to render.rs — keeping basic hex sanity check
|
||||||
|
let result = render::style_with_hex("test", Some("#ff0000"));
|
||||||
|
assert!(!result.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_get_issue_assignees_empty() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
seed_issue(&conn);
|
||||||
|
let result = get_issue_assignees(&conn, 1).unwrap();
|
||||||
|
assert!(result.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_get_issue_assignees_single() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
seed_issue(&conn);
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO issue_assignees (issue_id, username) VALUES (1, 'charlie')",
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
let result = get_issue_assignees(&conn, 1).unwrap();
|
||||||
|
assert_eq!(result, vec!["charlie"]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_get_issue_assignees_multiple_sorted() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
seed_issue(&conn);
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO issue_assignees (issue_id, username) VALUES (1, 'bob')",
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO issue_assignees (issue_id, username) VALUES (1, 'alice')",
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
let result = get_issue_assignees(&conn, 1).unwrap();
|
||||||
|
assert_eq!(result, vec!["alice", "bob"]); // alphabetical
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_get_closing_mrs_empty() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
seed_issue(&conn);
|
||||||
|
let result = get_closing_mrs(&conn, 1).unwrap();
|
||||||
|
assert!(result.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_get_closing_mrs_single() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
seed_issue(&conn);
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO merge_requests (id, gitlab_id, iid, project_id, title, state, author_username,
|
||||||
|
source_branch, target_branch, created_at, updated_at, last_seen_at)
|
||||||
|
VALUES (1, 300, 5, 1, 'Fix the bug', 'merged', 'dev', 'fix', 'main', 1000, 2000, 2000)",
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO entity_references (project_id, source_entity_type, source_entity_id,
|
||||||
|
target_entity_type, target_entity_id, reference_type, source_method, created_at)
|
||||||
|
VALUES (1, 'merge_request', 1, 'issue', 1, 'closes', 'api', 3000)",
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
let result = get_closing_mrs(&conn, 1).unwrap();
|
||||||
|
assert_eq!(result.len(), 1);
|
||||||
|
assert_eq!(result[0].iid, 5);
|
||||||
|
assert_eq!(result[0].title, "Fix the bug");
|
||||||
|
assert_eq!(result[0].state, "merged");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_get_closing_mrs_ignores_mentioned() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
seed_issue(&conn);
|
||||||
|
// Add a 'mentioned' reference that should be ignored
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO merge_requests (id, gitlab_id, iid, project_id, title, state, author_username,
|
||||||
|
source_branch, target_branch, created_at, updated_at, last_seen_at)
|
||||||
|
VALUES (1, 300, 5, 1, 'Some MR', 'opened', 'dev', 'feat', 'main', 1000, 2000, 2000)",
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO entity_references (project_id, source_entity_type, source_entity_id,
|
||||||
|
target_entity_type, target_entity_id, reference_type, source_method, created_at)
|
||||||
|
VALUES (1, 'merge_request', 1, 'issue', 1, 'mentioned', 'note_parse', 3000)",
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
let result = get_closing_mrs(&conn, 1).unwrap();
|
||||||
|
assert!(result.is_empty()); // 'mentioned' refs not included
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_get_closing_mrs_multiple_sorted() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
seed_issue(&conn);
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO merge_requests (id, gitlab_id, iid, project_id, title, state, author_username,
|
||||||
|
source_branch, target_branch, created_at, updated_at, last_seen_at)
|
||||||
|
VALUES (1, 300, 8, 1, 'Second fix', 'opened', 'dev', 'fix2', 'main', 1000, 2000, 2000)",
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO merge_requests (id, gitlab_id, iid, project_id, title, state, author_username,
|
||||||
|
source_branch, target_branch, created_at, updated_at, last_seen_at)
|
||||||
|
VALUES (2, 301, 5, 1, 'First fix', 'merged', 'dev', 'fix1', 'main', 1000, 2000, 2000)",
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO entity_references (project_id, source_entity_type, source_entity_id,
|
||||||
|
target_entity_type, target_entity_id, reference_type, source_method, created_at)
|
||||||
|
VALUES (1, 'merge_request', 1, 'issue', 1, 'closes', 'api', 3000)",
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO entity_references (project_id, source_entity_type, source_entity_id,
|
||||||
|
target_entity_type, target_entity_id, reference_type, source_method, created_at)
|
||||||
|
VALUES (1, 'merge_request', 2, 'issue', 1, 'closes', 'api', 3000)",
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
let result = get_closing_mrs(&conn, 1).unwrap();
|
||||||
|
assert_eq!(result.len(), 2);
|
||||||
|
assert_eq!(result[0].iid, 5); // Lower iid first
|
||||||
|
assert_eq!(result[1].iid, 8);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn wrap_text_single_line() {
|
||||||
|
assert_eq!(wrap_text("hello world", 80, " "), "hello world");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn wrap_text_multiple_lines() {
|
||||||
|
let result = wrap_text("one two three four five", 10, " ");
|
||||||
|
assert!(result.contains('\n'));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn format_date_extracts_date_part() {
|
||||||
|
let ms = 1705276800000;
|
||||||
|
let date = format_date(ms);
|
||||||
|
assert!(date.starts_with("2024-01-15"));
|
||||||
|
}
|
||||||
@@ -583,7 +583,7 @@ pub fn print_stats_json(result: &StatsResult, elapsed_ms: u64) {
|
|||||||
}),
|
}),
|
||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta::new(elapsed_ms),
|
||||||
};
|
};
|
||||||
match serde_json::to_string(&output) {
|
match serde_json::to_string(&output) {
|
||||||
Ok(json) => println!("{json}"),
|
Ok(json) => println!("{json}"),
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
24
src/cli/commands/sync/mod.rs
Normal file
24
src/cli/commands/sync/mod.rs
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
pub mod surgical;
|
||||||
|
pub use surgical::run_sync_surgical;
|
||||||
|
|
||||||
|
use crate::cli::render::{self, Icons, Theme, format_number};
|
||||||
|
use serde::Serialize;
|
||||||
|
use std::time::Instant;
|
||||||
|
use tracing::Instrument;
|
||||||
|
use tracing::{debug, warn};
|
||||||
|
|
||||||
|
use crate::Config;
|
||||||
|
use crate::cli::progress::{format_stage_line, nested_progress, stage_spinner_v2};
|
||||||
|
use crate::core::error::Result;
|
||||||
|
use crate::core::metrics::{MetricsLayer, StageTiming};
|
||||||
|
use crate::core::shutdown::ShutdownSignal;
|
||||||
|
|
||||||
|
use super::embed::run_embed;
|
||||||
|
use super::generate_docs::run_generate_docs;
|
||||||
|
use super::ingest::{
|
||||||
|
DryRunPreview, IngestDisplay, ProjectStatusEnrichment, ProjectSummary, run_ingest,
|
||||||
|
run_ingest_dry_run,
|
||||||
|
};
|
||||||
|
|
||||||
|
include!("run.rs");
|
||||||
|
include!("render.rs");
|
||||||
533
src/cli/commands/sync/render.rs
Normal file
533
src/cli/commands/sync/render.rs
Normal file
@@ -0,0 +1,533 @@
|
|||||||
|
pub fn print_sync(
|
||||||
|
result: &SyncResult,
|
||||||
|
elapsed: std::time::Duration,
|
||||||
|
metrics: Option<&MetricsLayer>,
|
||||||
|
show_timings: bool,
|
||||||
|
) {
|
||||||
|
let has_data = result.issues_updated > 0
|
||||||
|
|| result.mrs_updated > 0
|
||||||
|
|| result.discussions_fetched > 0
|
||||||
|
|| result.resource_events_fetched > 0
|
||||||
|
|| result.mr_diffs_fetched > 0
|
||||||
|
|| result.documents_regenerated > 0
|
||||||
|
|| result.documents_embedded > 0
|
||||||
|
|| result.statuses_enriched > 0;
|
||||||
|
let has_failures = result.resource_events_failed > 0
|
||||||
|
|| result.mr_diffs_failed > 0
|
||||||
|
|| result.status_enrichment_errors > 0
|
||||||
|
|| result.documents_errored > 0
|
||||||
|
|| result.embedding_failed > 0;
|
||||||
|
|
||||||
|
if !has_data && !has_failures {
|
||||||
|
println!(
|
||||||
|
"\n {} ({})\n",
|
||||||
|
Theme::dim().render("Already up to date"),
|
||||||
|
Theme::timing().render(&format!("{:.1}s", elapsed.as_secs_f64()))
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
let headline = if has_failures {
|
||||||
|
Theme::warning().bold().render("Sync completed with issues")
|
||||||
|
} else {
|
||||||
|
Theme::success().bold().render("Synced")
|
||||||
|
};
|
||||||
|
println!(
|
||||||
|
"\n {} {} issues and {} MRs in {}",
|
||||||
|
headline,
|
||||||
|
Theme::info()
|
||||||
|
.bold()
|
||||||
|
.render(&result.issues_updated.to_string()),
|
||||||
|
Theme::info().bold().render(&result.mrs_updated.to_string()),
|
||||||
|
Theme::timing().render(&format!("{:.1}s", elapsed.as_secs_f64()))
|
||||||
|
);
|
||||||
|
|
||||||
|
// Detail: supporting counts, compact middle-dot format, zero-suppressed
|
||||||
|
let mut details: Vec<String> = Vec::new();
|
||||||
|
if result.discussions_fetched > 0 {
|
||||||
|
details.push(format!(
|
||||||
|
"{} {}",
|
||||||
|
Theme::info().render(&result.discussions_fetched.to_string()),
|
||||||
|
Theme::dim().render("discussions")
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if result.resource_events_fetched > 0 {
|
||||||
|
details.push(format!(
|
||||||
|
"{} {}",
|
||||||
|
Theme::info().render(&result.resource_events_fetched.to_string()),
|
||||||
|
Theme::dim().render("events")
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if result.mr_diffs_fetched > 0 {
|
||||||
|
details.push(format!(
|
||||||
|
"{} {}",
|
||||||
|
Theme::info().render(&result.mr_diffs_fetched.to_string()),
|
||||||
|
Theme::dim().render("diffs")
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if result.statuses_enriched > 0 {
|
||||||
|
details.push(format!(
|
||||||
|
"{} {}",
|
||||||
|
Theme::info().render(&result.statuses_enriched.to_string()),
|
||||||
|
Theme::dim().render("statuses updated")
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if !details.is_empty() {
|
||||||
|
let sep = Theme::dim().render(" \u{b7} ");
|
||||||
|
println!(" {}", details.join(&sep));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Documents: regeneration + embedding as a second detail line
|
||||||
|
let mut doc_parts: Vec<String> = Vec::new();
|
||||||
|
if result.documents_regenerated > 0 {
|
||||||
|
doc_parts.push(format!(
|
||||||
|
"{} {}",
|
||||||
|
Theme::info().render(&result.documents_regenerated.to_string()),
|
||||||
|
Theme::dim().render("docs regenerated")
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if result.documents_embedded > 0 {
|
||||||
|
doc_parts.push(format!(
|
||||||
|
"{} {}",
|
||||||
|
Theme::info().render(&result.documents_embedded.to_string()),
|
||||||
|
Theme::dim().render("embedded")
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if result.documents_errored > 0 {
|
||||||
|
doc_parts
|
||||||
|
.push(Theme::error().render(&format!("{} doc errors", result.documents_errored)));
|
||||||
|
}
|
||||||
|
if !doc_parts.is_empty() {
|
||||||
|
let sep = Theme::dim().render(" \u{b7} ");
|
||||||
|
println!(" {}", doc_parts.join(&sep));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Errors: visually prominent, only if non-zero
|
||||||
|
let mut errors: Vec<String> = Vec::new();
|
||||||
|
if result.resource_events_failed > 0 {
|
||||||
|
errors.push(format!("{} event failures", result.resource_events_failed));
|
||||||
|
}
|
||||||
|
if result.mr_diffs_failed > 0 {
|
||||||
|
errors.push(format!("{} diff failures", result.mr_diffs_failed));
|
||||||
|
}
|
||||||
|
if result.status_enrichment_errors > 0 {
|
||||||
|
errors.push(format!("{} status errors", result.status_enrichment_errors));
|
||||||
|
}
|
||||||
|
if result.embedding_failed > 0 {
|
||||||
|
errors.push(format!("{} embedding failures", result.embedding_failed));
|
||||||
|
}
|
||||||
|
if !errors.is_empty() {
|
||||||
|
println!(" {}", Theme::error().render(&errors.join(" \u{b7} ")));
|
||||||
|
}
|
||||||
|
|
||||||
|
println!();
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(metrics) = metrics {
|
||||||
|
let stages = metrics.extract_timings();
|
||||||
|
if should_print_timings(show_timings, &stages) {
|
||||||
|
print_timing_summary(&stages);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn issue_sub_rows(projects: &[ProjectSummary]) -> Vec<String> {
|
||||||
|
projects
|
||||||
|
.iter()
|
||||||
|
.map(|p| {
|
||||||
|
let mut parts: Vec<String> = Vec::new();
|
||||||
|
parts.push(format!(
|
||||||
|
"{} {}",
|
||||||
|
p.items_upserted,
|
||||||
|
if p.items_upserted == 1 {
|
||||||
|
"issue"
|
||||||
|
} else {
|
||||||
|
"issues"
|
||||||
|
}
|
||||||
|
));
|
||||||
|
if p.discussions_synced > 0 {
|
||||||
|
parts.push(format!("{} discussions", p.discussions_synced));
|
||||||
|
}
|
||||||
|
if p.statuses_seen > 0 || p.statuses_enriched > 0 {
|
||||||
|
parts.push(format!("{} statuses updated", p.statuses_enriched));
|
||||||
|
}
|
||||||
|
if p.events_fetched > 0 {
|
||||||
|
parts.push(format!("{} events", p.events_fetched));
|
||||||
|
}
|
||||||
|
if p.status_errors > 0 {
|
||||||
|
parts.push(Theme::warning().render(&format!("{} status errors", p.status_errors)));
|
||||||
|
}
|
||||||
|
if p.events_failed > 0 {
|
||||||
|
parts.push(Theme::warning().render(&format!("{} event failures", p.events_failed)));
|
||||||
|
}
|
||||||
|
let sep = Theme::dim().render(" \u{b7} ");
|
||||||
|
let detail = parts.join(&sep);
|
||||||
|
let path = Theme::muted().render(&format!("{:<30}", p.path));
|
||||||
|
format!(" {path} {detail}")
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn status_sub_rows(projects: &[ProjectStatusEnrichment]) -> Vec<String> {
|
||||||
|
projects
|
||||||
|
.iter()
|
||||||
|
.map(|p| {
|
||||||
|
let total_errors = p.partial_errors + usize::from(p.error.is_some());
|
||||||
|
let mut parts: Vec<String> = vec![format!("{} statuses updated", p.enriched)];
|
||||||
|
if p.cleared > 0 {
|
||||||
|
parts.push(format!("{} cleared", p.cleared));
|
||||||
|
}
|
||||||
|
if p.seen > 0 {
|
||||||
|
parts.push(format!("{} seen", p.seen));
|
||||||
|
}
|
||||||
|
if total_errors > 0 {
|
||||||
|
parts.push(Theme::warning().render(&format!("{} errors", total_errors)));
|
||||||
|
} else if p.mode == "skipped" {
|
||||||
|
if let Some(reason) = &p.reason {
|
||||||
|
parts.push(Theme::dim().render(&format!("skipped ({reason})")));
|
||||||
|
} else {
|
||||||
|
parts.push(Theme::dim().render("skipped"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let sep = Theme::dim().render(" \u{b7} ");
|
||||||
|
let detail = parts.join(&sep);
|
||||||
|
let path = Theme::muted().render(&format!("{:<30}", p.path));
|
||||||
|
format!(" {path} {detail}")
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn mr_sub_rows(projects: &[ProjectSummary]) -> Vec<String> {
|
||||||
|
projects
|
||||||
|
.iter()
|
||||||
|
.map(|p| {
|
||||||
|
let mut parts: Vec<String> = Vec::new();
|
||||||
|
parts.push(format!(
|
||||||
|
"{} {}",
|
||||||
|
p.items_upserted,
|
||||||
|
if p.items_upserted == 1 { "MR" } else { "MRs" }
|
||||||
|
));
|
||||||
|
if p.discussions_synced > 0 {
|
||||||
|
parts.push(format!("{} discussions", p.discussions_synced));
|
||||||
|
}
|
||||||
|
if p.mr_diffs_fetched > 0 {
|
||||||
|
parts.push(format!("{} diffs", p.mr_diffs_fetched));
|
||||||
|
}
|
||||||
|
if p.events_fetched > 0 {
|
||||||
|
parts.push(format!("{} events", p.events_fetched));
|
||||||
|
}
|
||||||
|
if p.mr_diffs_failed > 0 {
|
||||||
|
parts
|
||||||
|
.push(Theme::warning().render(&format!("{} diff failures", p.mr_diffs_failed)));
|
||||||
|
}
|
||||||
|
if p.events_failed > 0 {
|
||||||
|
parts.push(Theme::warning().render(&format!("{} event failures", p.events_failed)));
|
||||||
|
}
|
||||||
|
let sep = Theme::dim().render(" \u{b7} ");
|
||||||
|
let detail = parts.join(&sep);
|
||||||
|
let path = Theme::muted().render(&format!("{:<30}", p.path));
|
||||||
|
format!(" {path} {detail}")
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn emit_stage_line(
|
||||||
|
pb: &indicatif::ProgressBar,
|
||||||
|
icon: &str,
|
||||||
|
label: &str,
|
||||||
|
summary: &str,
|
||||||
|
elapsed: std::time::Duration,
|
||||||
|
) {
|
||||||
|
pb.finish_and_clear();
|
||||||
|
print_static_lines(&[format_stage_line(icon, label, summary, elapsed)]);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn emit_stage_block(
|
||||||
|
pb: &indicatif::ProgressBar,
|
||||||
|
icon: &str,
|
||||||
|
label: &str,
|
||||||
|
summary: &str,
|
||||||
|
elapsed: std::time::Duration,
|
||||||
|
sub_rows: &[String],
|
||||||
|
) {
|
||||||
|
pb.finish_and_clear();
|
||||||
|
let mut lines = Vec::with_capacity(1 + sub_rows.len());
|
||||||
|
lines.push(format_stage_line(icon, label, summary, elapsed));
|
||||||
|
lines.extend(sub_rows.iter().cloned());
|
||||||
|
print_static_lines(&lines);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn print_static_lines(lines: &[String]) {
|
||||||
|
crate::cli::progress::multi().suspend(|| {
|
||||||
|
for line in lines {
|
||||||
|
println!("{line}");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
fn should_print_timings(show_timings: bool, stages: &[StageTiming]) -> bool {
|
||||||
|
show_timings && !stages.is_empty()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn append_failures(summary: &mut String, failures: &[(&str, usize)]) {
|
||||||
|
let rendered: Vec<String> = failures
|
||||||
|
.iter()
|
||||||
|
.filter_map(|(label, count)| {
|
||||||
|
(*count > 0).then_some(Theme::warning().render(&format!("{count} {label}")))
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
if !rendered.is_empty() {
|
||||||
|
summary.push_str(&format!(" ({})", rendered.join(", ")));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn summarize_status_enrichment(projects: &[ProjectStatusEnrichment]) -> (String, bool) {
|
||||||
|
let statuses_enriched: usize = projects.iter().map(|p| p.enriched).sum();
|
||||||
|
let statuses_seen: usize = projects.iter().map(|p| p.seen).sum();
|
||||||
|
let statuses_cleared: usize = projects.iter().map(|p| p.cleared).sum();
|
||||||
|
let status_errors: usize = projects
|
||||||
|
.iter()
|
||||||
|
.map(|p| p.partial_errors + usize::from(p.error.is_some()))
|
||||||
|
.sum();
|
||||||
|
let skipped = projects.iter().filter(|p| p.mode == "skipped").count();
|
||||||
|
|
||||||
|
let mut parts = vec![format!(
|
||||||
|
"{} statuses updated",
|
||||||
|
format_number(statuses_enriched as i64)
|
||||||
|
)];
|
||||||
|
if statuses_cleared > 0 {
|
||||||
|
parts.push(format!(
|
||||||
|
"{} cleared",
|
||||||
|
format_number(statuses_cleared as i64)
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if statuses_seen > 0 {
|
||||||
|
parts.push(format!("{} seen", format_number(statuses_seen as i64)));
|
||||||
|
}
|
||||||
|
if status_errors > 0 {
|
||||||
|
parts.push(format!("{} errors", format_number(status_errors as i64)));
|
||||||
|
} else if projects.is_empty() || skipped == projects.len() {
|
||||||
|
parts.push("skipped".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
(parts.join(" \u{b7} "), status_errors > 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn section(title: &str) {
|
||||||
|
println!("{}", render::section_divider(title));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn print_timing_summary(stages: &[StageTiming]) {
|
||||||
|
section("Timing");
|
||||||
|
for stage in stages {
|
||||||
|
for sub in &stage.sub_stages {
|
||||||
|
print_stage_line(sub, 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn print_stage_line(stage: &StageTiming, depth: usize) {
|
||||||
|
let indent = " ".repeat(depth);
|
||||||
|
let name = if let Some(ref project) = stage.project {
|
||||||
|
format!("{} ({})", stage.name, project)
|
||||||
|
} else {
|
||||||
|
stage.name.clone()
|
||||||
|
};
|
||||||
|
let pad_width = 30_usize.saturating_sub(indent.len() + name.len());
|
||||||
|
let dots = Theme::dim().render(&".".repeat(pad_width.max(2)));
|
||||||
|
|
||||||
|
let time_str = Theme::bold().render(&format!("{:.1}s", stage.elapsed_ms as f64 / 1000.0));
|
||||||
|
|
||||||
|
let mut parts: Vec<String> = Vec::new();
|
||||||
|
if stage.items_processed > 0 {
|
||||||
|
parts.push(format!("{} items", stage.items_processed));
|
||||||
|
}
|
||||||
|
if stage.errors > 0 {
|
||||||
|
parts.push(Theme::error().render(&format!("{} errors", stage.errors)));
|
||||||
|
}
|
||||||
|
if stage.rate_limit_hits > 0 {
|
||||||
|
parts.push(Theme::warning().render(&format!("{} rate limits", stage.rate_limit_hits)));
|
||||||
|
}
|
||||||
|
|
||||||
|
if parts.is_empty() {
|
||||||
|
println!("{indent}{name} {dots} {time_str}");
|
||||||
|
} else {
|
||||||
|
let suffix = parts.join(" \u{b7} ");
|
||||||
|
println!("{indent}{name} {dots} {time_str} ({suffix})");
|
||||||
|
}
|
||||||
|
|
||||||
|
for sub in &stage.sub_stages {
|
||||||
|
print_stage_line(sub, depth + 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct SyncJsonOutput<'a> {
|
||||||
|
ok: bool,
|
||||||
|
data: &'a SyncResult,
|
||||||
|
meta: SyncMeta,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct SyncMeta {
|
||||||
|
run_id: String,
|
||||||
|
elapsed_ms: u64,
|
||||||
|
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||||
|
stages: Vec<StageTiming>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_sync_json(result: &SyncResult, elapsed_ms: u64, metrics: Option<&MetricsLayer>) {
|
||||||
|
let stages = metrics.map_or_else(Vec::new, MetricsLayer::extract_timings);
|
||||||
|
let output = SyncJsonOutput {
|
||||||
|
ok: true,
|
||||||
|
data: result,
|
||||||
|
meta: SyncMeta {
|
||||||
|
run_id: result.run_id.clone(),
|
||||||
|
elapsed_ms,
|
||||||
|
stages,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
match serde_json::to_string(&output) {
|
||||||
|
Ok(json) => println!("{json}"),
|
||||||
|
Err(e) => eprintln!("Error serializing to JSON: {e}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Default, Serialize)]
|
||||||
|
pub struct SyncDryRunResult {
|
||||||
|
pub issues_preview: DryRunPreview,
|
||||||
|
pub mrs_preview: DryRunPreview,
|
||||||
|
pub would_generate_docs: bool,
|
||||||
|
pub would_embed: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn run_sync_dry_run(config: &Config, options: &SyncOptions) -> Result<SyncResult> {
|
||||||
|
// Get dry run previews for both issues and MRs
|
||||||
|
let issues_preview = run_ingest_dry_run(config, "issues", None, options.full)?;
|
||||||
|
let mrs_preview = run_ingest_dry_run(config, "mrs", None, options.full)?;
|
||||||
|
|
||||||
|
let dry_result = SyncDryRunResult {
|
||||||
|
issues_preview,
|
||||||
|
mrs_preview,
|
||||||
|
would_generate_docs: !options.no_docs,
|
||||||
|
would_embed: !options.no_embed,
|
||||||
|
};
|
||||||
|
|
||||||
|
if options.robot_mode {
|
||||||
|
print_sync_dry_run_json(&dry_result);
|
||||||
|
} else {
|
||||||
|
print_sync_dry_run(&dry_result);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return an empty SyncResult since this is just a preview
|
||||||
|
Ok(SyncResult::default())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_sync_dry_run(result: &SyncDryRunResult) {
|
||||||
|
println!(
|
||||||
|
"\n {} {}",
|
||||||
|
Theme::info().bold().render("Dry run"),
|
||||||
|
Theme::dim().render("(no changes will be made)")
|
||||||
|
);
|
||||||
|
|
||||||
|
print_dry_run_entity("Issues", &result.issues_preview);
|
||||||
|
print_dry_run_entity("Merge Requests", &result.mrs_preview);
|
||||||
|
|
||||||
|
// Pipeline stages
|
||||||
|
section("Pipeline");
|
||||||
|
let mut stages: Vec<String> = Vec::new();
|
||||||
|
if result.would_generate_docs {
|
||||||
|
stages.push("generate-docs".to_string());
|
||||||
|
} else {
|
||||||
|
stages.push(Theme::dim().render("generate-docs (skip)"));
|
||||||
|
}
|
||||||
|
if result.would_embed {
|
||||||
|
stages.push("embed".to_string());
|
||||||
|
} else {
|
||||||
|
stages.push(Theme::dim().render("embed (skip)"));
|
||||||
|
}
|
||||||
|
println!(" {}", stages.join(" \u{b7} "));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn print_dry_run_entity(label: &str, preview: &DryRunPreview) {
|
||||||
|
section(label);
|
||||||
|
let mode = if preview.sync_mode == "full" {
|
||||||
|
Theme::warning().render("full")
|
||||||
|
} else {
|
||||||
|
Theme::success().render("incremental")
|
||||||
|
};
|
||||||
|
println!(" {} \u{b7} {} projects", mode, preview.projects.len());
|
||||||
|
for project in &preview.projects {
|
||||||
|
let sync_status = if !project.has_cursor {
|
||||||
|
Theme::warning().render("initial sync")
|
||||||
|
} else {
|
||||||
|
Theme::success().render("incremental")
|
||||||
|
};
|
||||||
|
if project.existing_count > 0 {
|
||||||
|
println!(
|
||||||
|
" {} \u{b7} {} \u{b7} {} existing",
|
||||||
|
&project.path, sync_status, project.existing_count
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
println!(" {} \u{b7} {}", &project.path, sync_status);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct SyncDryRunJsonOutput {
|
||||||
|
ok: bool,
|
||||||
|
dry_run: bool,
|
||||||
|
data: SyncDryRunJsonData,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct SyncDryRunJsonData {
|
||||||
|
stages: Vec<SyncDryRunStage>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct SyncDryRunStage {
|
||||||
|
name: String,
|
||||||
|
would_run: bool,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
preview: Option<DryRunPreview>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_sync_dry_run_json(result: &SyncDryRunResult) {
|
||||||
|
let output = SyncDryRunJsonOutput {
|
||||||
|
ok: true,
|
||||||
|
dry_run: true,
|
||||||
|
data: SyncDryRunJsonData {
|
||||||
|
stages: vec![
|
||||||
|
SyncDryRunStage {
|
||||||
|
name: "ingest_issues".to_string(),
|
||||||
|
would_run: true,
|
||||||
|
preview: Some(result.issues_preview.clone()),
|
||||||
|
},
|
||||||
|
SyncDryRunStage {
|
||||||
|
name: "ingest_mrs".to_string(),
|
||||||
|
would_run: true,
|
||||||
|
preview: Some(result.mrs_preview.clone()),
|
||||||
|
},
|
||||||
|
SyncDryRunStage {
|
||||||
|
name: "generate_docs".to_string(),
|
||||||
|
would_run: result.would_generate_docs,
|
||||||
|
preview: None,
|
||||||
|
},
|
||||||
|
SyncDryRunStage {
|
||||||
|
name: "embed".to_string(),
|
||||||
|
would_run: result.would_embed,
|
||||||
|
preview: None,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
match serde_json::to_string(&output) {
|
||||||
|
Ok(json) => println!("{json}"),
|
||||||
|
Err(e) => eprintln!("Error serializing to JSON: {e}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
#[path = "sync_tests.rs"]
|
||||||
|
mod tests;
|
||||||
380
src/cli/commands/sync/run.rs
Normal file
380
src/cli/commands/sync/run.rs
Normal file
@@ -0,0 +1,380 @@
|
|||||||
|
#[derive(Debug, Default)]
|
||||||
|
pub struct SyncOptions {
|
||||||
|
pub full: bool,
|
||||||
|
pub force: bool,
|
||||||
|
pub no_embed: bool,
|
||||||
|
pub no_docs: bool,
|
||||||
|
pub no_events: bool,
|
||||||
|
pub robot_mode: bool,
|
||||||
|
pub dry_run: bool,
|
||||||
|
pub issue_iids: Vec<u64>,
|
||||||
|
pub mr_iids: Vec<u64>,
|
||||||
|
pub project: Option<String>,
|
||||||
|
pub preflight_only: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SyncOptions {
|
||||||
|
pub const MAX_SURGICAL_TARGETS: usize = 100;
|
||||||
|
|
||||||
|
pub fn is_surgical(&self) -> bool {
|
||||||
|
!self.issue_iids.is_empty() || !self.mr_iids.is_empty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Default, Serialize)]
|
||||||
|
pub struct SurgicalIids {
|
||||||
|
pub issues: Vec<u64>,
|
||||||
|
pub merge_requests: Vec<u64>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct EntitySyncResult {
|
||||||
|
pub entity_type: String,
|
||||||
|
pub iid: u64,
|
||||||
|
pub outcome: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub error: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub toctou_reason: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Default, Serialize)]
|
||||||
|
pub struct SyncResult {
|
||||||
|
#[serde(skip)]
|
||||||
|
pub run_id: String,
|
||||||
|
pub issues_updated: usize,
|
||||||
|
pub mrs_updated: usize,
|
||||||
|
pub discussions_fetched: usize,
|
||||||
|
pub resource_events_fetched: usize,
|
||||||
|
pub resource_events_failed: usize,
|
||||||
|
pub mr_diffs_fetched: usize,
|
||||||
|
pub mr_diffs_failed: usize,
|
||||||
|
pub documents_regenerated: usize,
|
||||||
|
pub documents_errored: usize,
|
||||||
|
pub documents_embedded: usize,
|
||||||
|
pub embedding_failed: usize,
|
||||||
|
pub status_enrichment_errors: usize,
|
||||||
|
pub statuses_enriched: usize,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub surgical_mode: Option<bool>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub surgical_iids: Option<SurgicalIids>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub entity_results: Option<Vec<EntitySyncResult>>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub preflight_only: Option<bool>,
|
||||||
|
#[serde(skip)]
|
||||||
|
pub issue_projects: Vec<ProjectSummary>,
|
||||||
|
#[serde(skip)]
|
||||||
|
pub mr_projects: Vec<ProjectSummary>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Alias for [`Theme::color_icon`] to keep call sites concise.
|
||||||
|
fn color_icon(icon: &str, has_errors: bool) -> String {
|
||||||
|
Theme::color_icon(icon, has_errors)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn run_sync(
|
||||||
|
config: &Config,
|
||||||
|
options: SyncOptions,
|
||||||
|
run_id: Option<&str>,
|
||||||
|
signal: &ShutdownSignal,
|
||||||
|
) -> Result<SyncResult> {
|
||||||
|
// Surgical dispatch: if any IIDs specified, route to surgical pipeline
|
||||||
|
if options.is_surgical() {
|
||||||
|
return run_sync_surgical(config, options, run_id, signal).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
let generated_id;
|
||||||
|
let run_id = match run_id {
|
||||||
|
Some(id) => id,
|
||||||
|
None => {
|
||||||
|
generated_id = uuid::Uuid::new_v4().simple().to_string();
|
||||||
|
&generated_id[..8]
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let span = tracing::info_span!("sync", %run_id);
|
||||||
|
|
||||||
|
async move {
|
||||||
|
let mut result = SyncResult {
|
||||||
|
run_id: run_id.to_string(),
|
||||||
|
..SyncResult::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Handle dry_run mode - show preview without making any changes
|
||||||
|
if options.dry_run {
|
||||||
|
return run_sync_dry_run(config, &options).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
let ingest_display = if options.robot_mode {
|
||||||
|
IngestDisplay::silent()
|
||||||
|
} else {
|
||||||
|
IngestDisplay::progress_only()
|
||||||
|
};
|
||||||
|
|
||||||
|
// ── Stage: Issues ──
|
||||||
|
let stage_start = Instant::now();
|
||||||
|
let spinner = stage_spinner_v2(Icons::sync(), "Issues", "fetching...", options.robot_mode);
|
||||||
|
debug!("Sync: ingesting issues");
|
||||||
|
let issues_result = run_ingest(
|
||||||
|
config,
|
||||||
|
"issues",
|
||||||
|
None,
|
||||||
|
options.force,
|
||||||
|
options.full,
|
||||||
|
false, // dry_run - sync has its own dry_run handling
|
||||||
|
ingest_display,
|
||||||
|
Some(spinner.clone()),
|
||||||
|
signal,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
result.issues_updated = issues_result.issues_upserted;
|
||||||
|
result.discussions_fetched += issues_result.discussions_fetched;
|
||||||
|
result.resource_events_fetched += issues_result.resource_events_fetched;
|
||||||
|
result.resource_events_failed += issues_result.resource_events_failed;
|
||||||
|
result.status_enrichment_errors += issues_result.status_enrichment_errors;
|
||||||
|
for sep in &issues_result.status_enrichment_projects {
|
||||||
|
result.statuses_enriched += sep.enriched;
|
||||||
|
}
|
||||||
|
result.issue_projects = issues_result.project_summaries;
|
||||||
|
let issues_elapsed = stage_start.elapsed();
|
||||||
|
if !options.robot_mode {
|
||||||
|
let (status_summary, status_has_errors) =
|
||||||
|
summarize_status_enrichment(&issues_result.status_enrichment_projects);
|
||||||
|
let status_icon = color_icon(
|
||||||
|
if status_has_errors {
|
||||||
|
Icons::warning()
|
||||||
|
} else {
|
||||||
|
Icons::success()
|
||||||
|
},
|
||||||
|
status_has_errors,
|
||||||
|
);
|
||||||
|
let mut status_lines = vec![format_stage_line(
|
||||||
|
&status_icon,
|
||||||
|
"Status",
|
||||||
|
&status_summary,
|
||||||
|
issues_elapsed,
|
||||||
|
)];
|
||||||
|
status_lines.extend(status_sub_rows(&issues_result.status_enrichment_projects));
|
||||||
|
print_static_lines(&status_lines);
|
||||||
|
}
|
||||||
|
let mut issues_summary = format!(
|
||||||
|
"{} issues from {} {}",
|
||||||
|
format_number(result.issues_updated as i64),
|
||||||
|
issues_result.projects_synced,
|
||||||
|
if issues_result.projects_synced == 1 { "project" } else { "projects" }
|
||||||
|
);
|
||||||
|
append_failures(
|
||||||
|
&mut issues_summary,
|
||||||
|
&[
|
||||||
|
("event failures", issues_result.resource_events_failed),
|
||||||
|
("status errors", issues_result.status_enrichment_errors),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
let issues_icon = color_icon(
|
||||||
|
if issues_result.resource_events_failed > 0 || issues_result.status_enrichment_errors > 0
|
||||||
|
{
|
||||||
|
Icons::warning()
|
||||||
|
} else {
|
||||||
|
Icons::success()
|
||||||
|
},
|
||||||
|
issues_result.resource_events_failed > 0 || issues_result.status_enrichment_errors > 0,
|
||||||
|
);
|
||||||
|
if options.robot_mode {
|
||||||
|
emit_stage_line(&spinner, &issues_icon, "Issues", &issues_summary, issues_elapsed);
|
||||||
|
} else {
|
||||||
|
let sub_rows = issue_sub_rows(&result.issue_projects);
|
||||||
|
emit_stage_block(
|
||||||
|
&spinner,
|
||||||
|
&issues_icon,
|
||||||
|
"Issues",
|
||||||
|
&issues_summary,
|
||||||
|
issues_elapsed,
|
||||||
|
&sub_rows,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if signal.is_cancelled() {
|
||||||
|
debug!("Shutdown requested after issues stage, returning partial sync results");
|
||||||
|
return Ok(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Stage: MRs ──
|
||||||
|
let stage_start = Instant::now();
|
||||||
|
let spinner = stage_spinner_v2(Icons::sync(), "MRs", "fetching...", options.robot_mode);
|
||||||
|
debug!("Sync: ingesting merge requests");
|
||||||
|
let mrs_result = run_ingest(
|
||||||
|
config,
|
||||||
|
"mrs",
|
||||||
|
None,
|
||||||
|
options.force,
|
||||||
|
options.full,
|
||||||
|
false, // dry_run - sync has its own dry_run handling
|
||||||
|
ingest_display,
|
||||||
|
Some(spinner.clone()),
|
||||||
|
signal,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
result.mrs_updated = mrs_result.mrs_upserted;
|
||||||
|
result.discussions_fetched += mrs_result.discussions_fetched;
|
||||||
|
result.resource_events_fetched += mrs_result.resource_events_fetched;
|
||||||
|
result.resource_events_failed += mrs_result.resource_events_failed;
|
||||||
|
result.mr_diffs_fetched += mrs_result.mr_diffs_fetched;
|
||||||
|
result.mr_diffs_failed += mrs_result.mr_diffs_failed;
|
||||||
|
result.mr_projects = mrs_result.project_summaries;
|
||||||
|
let mrs_elapsed = stage_start.elapsed();
|
||||||
|
let mut mrs_summary = format!(
|
||||||
|
"{} merge requests from {} {}",
|
||||||
|
format_number(result.mrs_updated as i64),
|
||||||
|
mrs_result.projects_synced,
|
||||||
|
if mrs_result.projects_synced == 1 { "project" } else { "projects" }
|
||||||
|
);
|
||||||
|
append_failures(
|
||||||
|
&mut mrs_summary,
|
||||||
|
&[
|
||||||
|
("event failures", mrs_result.resource_events_failed),
|
||||||
|
("diff failures", mrs_result.mr_diffs_failed),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
let mrs_icon = color_icon(
|
||||||
|
if mrs_result.resource_events_failed > 0 || mrs_result.mr_diffs_failed > 0 {
|
||||||
|
Icons::warning()
|
||||||
|
} else {
|
||||||
|
Icons::success()
|
||||||
|
},
|
||||||
|
mrs_result.resource_events_failed > 0 || mrs_result.mr_diffs_failed > 0,
|
||||||
|
);
|
||||||
|
if options.robot_mode {
|
||||||
|
emit_stage_line(&spinner, &mrs_icon, "MRs", &mrs_summary, mrs_elapsed);
|
||||||
|
} else {
|
||||||
|
let sub_rows = mr_sub_rows(&result.mr_projects);
|
||||||
|
emit_stage_block(&spinner, &mrs_icon, "MRs", &mrs_summary, mrs_elapsed, &sub_rows);
|
||||||
|
}
|
||||||
|
|
||||||
|
if signal.is_cancelled() {
|
||||||
|
debug!("Shutdown requested after MRs stage, returning partial sync results");
|
||||||
|
return Ok(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Stage: Docs ──
|
||||||
|
if !options.no_docs {
|
||||||
|
let stage_start = Instant::now();
|
||||||
|
let spinner = stage_spinner_v2(Icons::sync(), "Docs", "generating...", options.robot_mode);
|
||||||
|
debug!("Sync: generating documents");
|
||||||
|
|
||||||
|
let docs_bar = nested_progress("Docs", 0, options.robot_mode);
|
||||||
|
let docs_bar_clone = docs_bar.clone();
|
||||||
|
let docs_cb: Box<dyn Fn(usize, usize)> = Box::new(move |processed, total| {
|
||||||
|
if total > 0 {
|
||||||
|
docs_bar_clone.set_length(total as u64);
|
||||||
|
docs_bar_clone.set_position(processed as u64);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
let docs_result = run_generate_docs(config, options.full, None, Some(docs_cb))?;
|
||||||
|
result.documents_regenerated = docs_result.regenerated;
|
||||||
|
result.documents_errored = docs_result.errored;
|
||||||
|
docs_bar.finish_and_clear();
|
||||||
|
let mut docs_summary = format!(
|
||||||
|
"{} documents generated",
|
||||||
|
format_number(result.documents_regenerated as i64),
|
||||||
|
);
|
||||||
|
append_failures(&mut docs_summary, &[("errors", docs_result.errored)]);
|
||||||
|
let docs_icon = color_icon(
|
||||||
|
if docs_result.errored > 0 {
|
||||||
|
Icons::warning()
|
||||||
|
} else {
|
||||||
|
Icons::success()
|
||||||
|
},
|
||||||
|
docs_result.errored > 0,
|
||||||
|
);
|
||||||
|
emit_stage_line(&spinner, &docs_icon, "Docs", &docs_summary, stage_start.elapsed());
|
||||||
|
} else {
|
||||||
|
debug!("Sync: skipping document generation (--no-docs)");
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Stage: Embed ──
|
||||||
|
if !options.no_embed {
|
||||||
|
let stage_start = Instant::now();
|
||||||
|
let spinner = stage_spinner_v2(Icons::sync(), "Embed", "preparing...", options.robot_mode);
|
||||||
|
debug!("Sync: embedding documents");
|
||||||
|
|
||||||
|
let embed_bar = nested_progress("Embed", 0, options.robot_mode);
|
||||||
|
let embed_bar_clone = embed_bar.clone();
|
||||||
|
let embed_cb: Box<dyn Fn(usize, usize)> = Box::new(move |processed, total| {
|
||||||
|
if total > 0 {
|
||||||
|
embed_bar_clone.set_length(total as u64);
|
||||||
|
embed_bar_clone.set_position(processed as u64);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
match run_embed(config, options.full, false, Some(embed_cb), signal).await {
|
||||||
|
Ok(embed_result) => {
|
||||||
|
result.documents_embedded = embed_result.docs_embedded;
|
||||||
|
result.embedding_failed = embed_result.failed;
|
||||||
|
embed_bar.finish_and_clear();
|
||||||
|
let mut embed_summary = format!(
|
||||||
|
"{} chunks embedded",
|
||||||
|
format_number(embed_result.chunks_embedded as i64),
|
||||||
|
);
|
||||||
|
let mut tail_parts = Vec::new();
|
||||||
|
if embed_result.failed > 0 {
|
||||||
|
tail_parts.push(format!("{} failed", embed_result.failed));
|
||||||
|
}
|
||||||
|
if embed_result.skipped > 0 {
|
||||||
|
tail_parts.push(format!("{} skipped", embed_result.skipped));
|
||||||
|
}
|
||||||
|
if !tail_parts.is_empty() {
|
||||||
|
embed_summary.push_str(&format!(" ({})", tail_parts.join(", ")));
|
||||||
|
}
|
||||||
|
let embed_icon = color_icon(
|
||||||
|
if embed_result.failed > 0 {
|
||||||
|
Icons::warning()
|
||||||
|
} else {
|
||||||
|
Icons::success()
|
||||||
|
},
|
||||||
|
embed_result.failed > 0,
|
||||||
|
);
|
||||||
|
emit_stage_line(
|
||||||
|
&spinner,
|
||||||
|
&embed_icon,
|
||||||
|
"Embed",
|
||||||
|
&embed_summary,
|
||||||
|
stage_start.elapsed(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
embed_bar.finish_and_clear();
|
||||||
|
let warn_summary = format!("skipped ({})", e);
|
||||||
|
let warn_icon = color_icon(Icons::warning(), true);
|
||||||
|
emit_stage_line(
|
||||||
|
&spinner,
|
||||||
|
&warn_icon,
|
||||||
|
"Embed",
|
||||||
|
&warn_summary,
|
||||||
|
stage_start.elapsed(),
|
||||||
|
);
|
||||||
|
warn!(error = %e, "Embedding stage failed (Ollama may be unavailable), continuing");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
debug!("Sync: skipping embedding (--no-embed)");
|
||||||
|
}
|
||||||
|
|
||||||
|
debug!(
|
||||||
|
issues = result.issues_updated,
|
||||||
|
mrs = result.mrs_updated,
|
||||||
|
discussions = result.discussions_fetched,
|
||||||
|
resource_events = result.resource_events_fetched,
|
||||||
|
resource_events_failed = result.resource_events_failed,
|
||||||
|
mr_diffs = result.mr_diffs_fetched,
|
||||||
|
mr_diffs_failed = result.mr_diffs_failed,
|
||||||
|
docs = result.documents_regenerated,
|
||||||
|
embedded = result.documents_embedded,
|
||||||
|
"Sync pipeline complete"
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
.instrument(span)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
@@ -12,11 +12,11 @@ use crate::core::lock::{AppLock, LockOptions};
|
|||||||
use crate::core::paths::get_db_path;
|
use crate::core::paths::get_db_path;
|
||||||
use crate::core::project::resolve_project;
|
use crate::core::project::resolve_project;
|
||||||
use crate::core::shutdown::ShutdownSignal;
|
use crate::core::shutdown::ShutdownSignal;
|
||||||
use crate::core::sync_run::SyncRunRecorder;
|
|
||||||
use crate::documents::{SourceType, regenerate_dirty_documents_for_sources};
|
use crate::documents::{SourceType, regenerate_dirty_documents_for_sources};
|
||||||
use crate::embedding::ollama::{OllamaClient, OllamaConfig};
|
use crate::embedding::ollama::{OllamaClient, OllamaConfig};
|
||||||
use crate::embedding::pipeline::{DEFAULT_EMBED_CONCURRENCY, embed_documents_by_ids};
|
use crate::embedding::pipeline::{DEFAULT_EMBED_CONCURRENCY, embed_documents_by_ids};
|
||||||
use crate::gitlab::GitLabClient;
|
use crate::gitlab::GitLabClient;
|
||||||
|
use crate::ingestion::storage::sync_run::SyncRunRecorder;
|
||||||
use crate::ingestion::surgical::{
|
use crate::ingestion::surgical::{
|
||||||
fetch_dependents_for_issue, fetch_dependents_for_mr, ingest_issue_by_iid, ingest_mr_by_iid,
|
fetch_dependents_for_issue, fetch_dependents_for_mr, ingest_issue_by_iid, ingest_mr_by_iid,
|
||||||
preflight_fetch,
|
preflight_fetch,
|
||||||
268
src/cli/commands/sync/sync_tests.rs
Normal file
268
src/cli/commands/sync/sync_tests.rs
Normal file
@@ -0,0 +1,268 @@
|
|||||||
|
use super::*;
|
||||||
|
|
||||||
|
fn default_options() -> SyncOptions {
|
||||||
|
SyncOptions {
|
||||||
|
full: false,
|
||||||
|
force: false,
|
||||||
|
no_embed: false,
|
||||||
|
no_docs: false,
|
||||||
|
no_events: false,
|
||||||
|
robot_mode: false,
|
||||||
|
dry_run: false,
|
||||||
|
issue_iids: vec![],
|
||||||
|
mr_iids: vec![],
|
||||||
|
project: None,
|
||||||
|
preflight_only: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn append_failures_skips_zeroes() {
|
||||||
|
let mut summary = "base".to_string();
|
||||||
|
append_failures(&mut summary, &[("errors", 0), ("failures", 0)]);
|
||||||
|
assert_eq!(summary, "base");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn append_failures_renders_non_zero_counts() {
|
||||||
|
let mut summary = "base".to_string();
|
||||||
|
append_failures(&mut summary, &[("errors", 2), ("failures", 1)]);
|
||||||
|
assert!(summary.contains("base"));
|
||||||
|
assert!(summary.contains("2 errors"));
|
||||||
|
assert!(summary.contains("1 failures"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn summarize_status_enrichment_reports_skipped_when_all_skipped() {
|
||||||
|
let projects = vec![ProjectStatusEnrichment {
|
||||||
|
path: "vs/typescript-code".to_string(),
|
||||||
|
mode: "skipped".to_string(),
|
||||||
|
reason: None,
|
||||||
|
seen: 0,
|
||||||
|
enriched: 0,
|
||||||
|
cleared: 0,
|
||||||
|
without_widget: 0,
|
||||||
|
partial_errors: 0,
|
||||||
|
first_partial_error: None,
|
||||||
|
error: None,
|
||||||
|
}];
|
||||||
|
|
||||||
|
let (summary, has_errors) = summarize_status_enrichment(&projects);
|
||||||
|
assert!(summary.contains("0 statuses updated"));
|
||||||
|
assert!(summary.contains("skipped"));
|
||||||
|
assert!(!has_errors);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn summarize_status_enrichment_reports_errors() {
|
||||||
|
let projects = vec![ProjectStatusEnrichment {
|
||||||
|
path: "vs/typescript-code".to_string(),
|
||||||
|
mode: "fetched".to_string(),
|
||||||
|
reason: None,
|
||||||
|
seen: 3,
|
||||||
|
enriched: 1,
|
||||||
|
cleared: 1,
|
||||||
|
without_widget: 0,
|
||||||
|
partial_errors: 2,
|
||||||
|
first_partial_error: None,
|
||||||
|
error: Some("boom".to_string()),
|
||||||
|
}];
|
||||||
|
|
||||||
|
let (summary, has_errors) = summarize_status_enrichment(&projects);
|
||||||
|
assert!(summary.contains("1 statuses updated"));
|
||||||
|
assert!(summary.contains("1 cleared"));
|
||||||
|
assert!(summary.contains("3 seen"));
|
||||||
|
assert!(summary.contains("3 errors"));
|
||||||
|
assert!(has_errors);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_print_timings_only_when_enabled_and_non_empty() {
|
||||||
|
let stages = vec![StageTiming {
|
||||||
|
name: "x".to_string(),
|
||||||
|
elapsed_ms: 10,
|
||||||
|
items_processed: 0,
|
||||||
|
items_skipped: 0,
|
||||||
|
errors: 0,
|
||||||
|
rate_limit_hits: 0,
|
||||||
|
retries: 0,
|
||||||
|
project: None,
|
||||||
|
sub_stages: vec![],
|
||||||
|
}];
|
||||||
|
|
||||||
|
assert!(should_print_timings(true, &stages));
|
||||||
|
assert!(!should_print_timings(false, &stages));
|
||||||
|
assert!(!should_print_timings(true, &[]));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn issue_sub_rows_include_project_and_statuses() {
|
||||||
|
let rows = issue_sub_rows(&[ProjectSummary {
|
||||||
|
path: "vs/typescript-code".to_string(),
|
||||||
|
items_upserted: 2,
|
||||||
|
discussions_synced: 0,
|
||||||
|
events_fetched: 0,
|
||||||
|
events_failed: 0,
|
||||||
|
statuses_enriched: 1,
|
||||||
|
statuses_seen: 5,
|
||||||
|
status_errors: 0,
|
||||||
|
mr_diffs_fetched: 0,
|
||||||
|
mr_diffs_failed: 0,
|
||||||
|
}]);
|
||||||
|
|
||||||
|
assert_eq!(rows.len(), 1);
|
||||||
|
assert!(rows[0].contains("vs/typescript-code"));
|
||||||
|
assert!(rows[0].contains("2 issues"));
|
||||||
|
assert!(rows[0].contains("1 statuses updated"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn mr_sub_rows_include_project_and_diff_failures() {
|
||||||
|
let rows = mr_sub_rows(&[ProjectSummary {
|
||||||
|
path: "vs/python-code".to_string(),
|
||||||
|
items_upserted: 3,
|
||||||
|
discussions_synced: 0,
|
||||||
|
events_fetched: 0,
|
||||||
|
events_failed: 0,
|
||||||
|
statuses_enriched: 0,
|
||||||
|
statuses_seen: 0,
|
||||||
|
status_errors: 0,
|
||||||
|
mr_diffs_fetched: 4,
|
||||||
|
mr_diffs_failed: 1,
|
||||||
|
}]);
|
||||||
|
|
||||||
|
assert_eq!(rows.len(), 1);
|
||||||
|
assert!(rows[0].contains("vs/python-code"));
|
||||||
|
assert!(rows[0].contains("3 MRs"));
|
||||||
|
assert!(rows[0].contains("4 diffs"));
|
||||||
|
assert!(rows[0].contains("1 diff failures"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn status_sub_rows_include_project_and_skip_reason() {
|
||||||
|
let rows = status_sub_rows(&[ProjectStatusEnrichment {
|
||||||
|
path: "vs/python-code".to_string(),
|
||||||
|
mode: "skipped".to_string(),
|
||||||
|
reason: Some("disabled".to_string()),
|
||||||
|
seen: 0,
|
||||||
|
enriched: 0,
|
||||||
|
cleared: 0,
|
||||||
|
without_widget: 0,
|
||||||
|
partial_errors: 0,
|
||||||
|
first_partial_error: None,
|
||||||
|
error: None,
|
||||||
|
}]);
|
||||||
|
|
||||||
|
assert_eq!(rows.len(), 1);
|
||||||
|
assert!(rows[0].contains("vs/python-code"));
|
||||||
|
assert!(rows[0].contains("0 statuses updated"));
|
||||||
|
assert!(rows[0].contains("skipped (disabled)"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn is_surgical_with_issues() {
|
||||||
|
let opts = SyncOptions {
|
||||||
|
issue_iids: vec![1],
|
||||||
|
..default_options()
|
||||||
|
};
|
||||||
|
assert!(opts.is_surgical());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn is_surgical_with_mrs() {
|
||||||
|
let opts = SyncOptions {
|
||||||
|
mr_iids: vec![10],
|
||||||
|
..default_options()
|
||||||
|
};
|
||||||
|
assert!(opts.is_surgical());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn is_surgical_empty() {
|
||||||
|
let opts = default_options();
|
||||||
|
assert!(!opts.is_surgical());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn max_surgical_targets_is_100() {
|
||||||
|
assert_eq!(SyncOptions::MAX_SURGICAL_TARGETS, 100);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn sync_result_default_omits_surgical_fields() {
|
||||||
|
let result = SyncResult::default();
|
||||||
|
let json = serde_json::to_value(&result).unwrap();
|
||||||
|
assert!(json.get("surgical_mode").is_none());
|
||||||
|
assert!(json.get("surgical_iids").is_none());
|
||||||
|
assert!(json.get("entity_results").is_none());
|
||||||
|
assert!(json.get("preflight_only").is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn sync_result_with_surgical_fields_serializes_correctly() {
|
||||||
|
let result = SyncResult {
|
||||||
|
surgical_mode: Some(true),
|
||||||
|
surgical_iids: Some(SurgicalIids {
|
||||||
|
issues: vec![7, 42],
|
||||||
|
merge_requests: vec![10],
|
||||||
|
}),
|
||||||
|
entity_results: Some(vec![
|
||||||
|
EntitySyncResult {
|
||||||
|
entity_type: "issue".to_string(),
|
||||||
|
iid: 7,
|
||||||
|
outcome: "synced".to_string(),
|
||||||
|
error: None,
|
||||||
|
toctou_reason: None,
|
||||||
|
},
|
||||||
|
EntitySyncResult {
|
||||||
|
entity_type: "issue".to_string(),
|
||||||
|
iid: 42,
|
||||||
|
outcome: "skipped_toctou".to_string(),
|
||||||
|
error: None,
|
||||||
|
toctou_reason: Some("updated_at changed".to_string()),
|
||||||
|
},
|
||||||
|
]),
|
||||||
|
preflight_only: Some(false),
|
||||||
|
..SyncResult::default()
|
||||||
|
};
|
||||||
|
let json = serde_json::to_value(&result).unwrap();
|
||||||
|
assert_eq!(json["surgical_mode"], true);
|
||||||
|
assert_eq!(json["surgical_iids"]["issues"], serde_json::json!([7, 42]));
|
||||||
|
assert_eq!(json["entity_results"].as_array().unwrap().len(), 2);
|
||||||
|
assert_eq!(json["entity_results"][1]["outcome"], "skipped_toctou");
|
||||||
|
assert_eq!(json["preflight_only"], false);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn entity_sync_result_omits_none_fields() {
|
||||||
|
let entity = EntitySyncResult {
|
||||||
|
entity_type: "merge_request".to_string(),
|
||||||
|
iid: 10,
|
||||||
|
outcome: "synced".to_string(),
|
||||||
|
error: None,
|
||||||
|
toctou_reason: None,
|
||||||
|
};
|
||||||
|
let json = serde_json::to_value(&entity).unwrap();
|
||||||
|
assert!(json.get("error").is_none());
|
||||||
|
assert!(json.get("toctou_reason").is_none());
|
||||||
|
assert!(json.get("entity_type").is_some());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn is_surgical_with_both_issues_and_mrs() {
|
||||||
|
let opts = SyncOptions {
|
||||||
|
issue_iids: vec![1, 2],
|
||||||
|
mr_iids: vec![10],
|
||||||
|
..default_options()
|
||||||
|
};
|
||||||
|
assert!(opts.is_surgical());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn is_not_surgical_with_only_project() {
|
||||||
|
let opts = SyncOptions {
|
||||||
|
project: Some("group/repo".to_string()),
|
||||||
|
..default_options()
|
||||||
|
};
|
||||||
|
assert!(!opts.is_surgical());
|
||||||
|
}
|
||||||
@@ -313,7 +313,7 @@ pub fn print_sync_status_json(result: &SyncStatusResult, elapsed_ms: u64) {
|
|||||||
system_notes: result.summary.system_note_count,
|
system_notes: result.summary.system_note_count,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta::new(elapsed_ms),
|
||||||
};
|
};
|
||||||
|
|
||||||
match serde_json::to_string(&output) {
|
match serde_json::to_string(&output) {
|
||||||
|
|||||||
@@ -8,13 +8,13 @@ use crate::core::error::{LoreError, Result};
|
|||||||
use crate::core::paths::get_db_path;
|
use crate::core::paths::get_db_path;
|
||||||
use crate::core::project::resolve_project;
|
use crate::core::project::resolve_project;
|
||||||
use crate::core::time::{ms_to_iso, parse_since};
|
use crate::core::time::{ms_to_iso, parse_since};
|
||||||
use crate::core::timeline::{
|
use crate::embedding::ollama::{OllamaClient, OllamaConfig};
|
||||||
|
use crate::timeline::collect::collect_events;
|
||||||
|
use crate::timeline::expand::expand_timeline;
|
||||||
|
use crate::timeline::seed::{seed_timeline, seed_timeline_direct};
|
||||||
|
use crate::timeline::{
|
||||||
EntityRef, ExpandedEntityRef, TimelineEvent, TimelineEventType, TimelineResult, UnresolvedRef,
|
EntityRef, ExpandedEntityRef, TimelineEvent, TimelineEventType, TimelineResult, UnresolvedRef,
|
||||||
};
|
};
|
||||||
use crate::core::timeline_collect::collect_events;
|
|
||||||
use crate::core::timeline_expand::expand_timeline;
|
|
||||||
use crate::core::timeline_seed::{seed_timeline, seed_timeline_direct};
|
|
||||||
use crate::embedding::ollama::{OllamaClient, OllamaConfig};
|
|
||||||
|
|
||||||
/// Parameters for running the timeline pipeline.
|
/// Parameters for running the timeline pipeline.
|
||||||
pub struct TimelineParams {
|
pub struct TimelineParams {
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
use crate::cli::render::{Icons, Theme};
|
use crate::cli::render::{Icons, Theme};
|
||||||
|
use crate::core::error::{LoreError, Result};
|
||||||
use crate::core::trace::{TraceChain, TraceResult};
|
use crate::core::trace::{TraceChain, TraceResult};
|
||||||
|
|
||||||
/// Parse a path with optional `:line` suffix.
|
/// Parse a path with optional `:line` suffix.
|
||||||
@@ -152,7 +153,11 @@ fn truncate_body(body: &str, max: usize) -> String {
|
|||||||
format!("{}...", &body[..boundary])
|
format!("{}...", &body[..boundary])
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_trace_json(result: &TraceResult, elapsed_ms: u64, line_requested: Option<u32>) {
|
pub fn print_trace_json(
|
||||||
|
result: &TraceResult,
|
||||||
|
elapsed_ms: u64,
|
||||||
|
line_requested: Option<u32>,
|
||||||
|
) -> Result<()> {
|
||||||
// Truncate discussion bodies for token efficiency in robot mode
|
// Truncate discussion bodies for token efficiency in robot mode
|
||||||
let chains: Vec<serde_json::Value> = result
|
let chains: Vec<serde_json::Value> = result
|
||||||
.trace_chains
|
.trace_chains
|
||||||
@@ -205,7 +210,12 @@ pub fn print_trace_json(result: &TraceResult, elapsed_ms: u64, line_requested: O
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
println!("{}", serde_json::to_string(&output).unwrap_or_default());
|
println!(
|
||||||
|
"{}",
|
||||||
|
serde_json::to_string(&output)
|
||||||
|
.map_err(|e| LoreError::Other(format!("JSON serialization failed: {e}")))?
|
||||||
|
);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|||||||
@@ -376,7 +376,7 @@ pub fn print_who_json(run: &WhoRun, args: &WhoArgs, elapsed_ms: u64) {
|
|||||||
resolved_input,
|
resolved_input,
|
||||||
result: data,
|
result: data,
|
||||||
},
|
},
|
||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta::new(elapsed_ms),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut value = serde_json::to_value(&output).unwrap_or_else(|e| {
|
let mut value = serde_json::to_value(&output).unwrap_or_else(|e| {
|
||||||
|
|||||||
@@ -1,12 +1,5 @@
|
|||||||
use super::*;
|
use super::*;
|
||||||
use crate::core::db::{create_connection, run_migrations};
|
use crate::test_support::{insert_project, setup_test_db};
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
fn setup_test_db() -> Connection {
|
|
||||||
let conn = create_connection(Path::new(":memory:")).unwrap();
|
|
||||||
run_migrations(&conn).unwrap();
|
|
||||||
conn
|
|
||||||
}
|
|
||||||
|
|
||||||
fn default_scoring() -> ScoringConfig {
|
fn default_scoring() -> ScoringConfig {
|
||||||
ScoringConfig::default()
|
ScoringConfig::default()
|
||||||
@@ -17,20 +10,6 @@ fn test_as_of_ms() -> i64 {
|
|||||||
now_ms() + 1000
|
now_ms() + 1000
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert_project(conn: &Connection, id: i64, path: &str) {
|
|
||||||
conn.execute(
|
|
||||||
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace, web_url)
|
|
||||||
VALUES (?1, ?2, ?3, ?4)",
|
|
||||||
rusqlite::params![
|
|
||||||
id,
|
|
||||||
id * 100,
|
|
||||||
path,
|
|
||||||
format!("https://git.example.com/{}", path)
|
|
||||||
],
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn insert_mr(conn: &Connection, id: i64, project_id: i64, iid: i64, author: &str, state: &str) {
|
fn insert_mr(conn: &Connection, id: i64, project_id: i64, iid: i64, author: &str, state: &str) {
|
||||||
let ts = now_ms();
|
let ts = now_ms();
|
||||||
conn.execute(
|
conn.execute(
|
||||||
|
|||||||
925
src/cli/mod.rs
925
src/cli/mod.rs
@@ -1,10 +1,11 @@
|
|||||||
|
pub mod args;
|
||||||
pub mod autocorrect;
|
pub mod autocorrect;
|
||||||
pub mod commands;
|
pub mod commands;
|
||||||
pub mod progress;
|
pub mod progress;
|
||||||
pub mod render;
|
pub mod render;
|
||||||
pub mod robot;
|
pub mod robot;
|
||||||
|
|
||||||
use clap::{Args, Parser, Subcommand};
|
use clap::{Parser, Subcommand};
|
||||||
use std::io::IsTerminal;
|
use std::io::IsTerminal;
|
||||||
|
|
||||||
#[derive(Parser)]
|
#[derive(Parser)]
|
||||||
@@ -276,6 +277,44 @@ pub enum Commands {
|
|||||||
/// Trace why code was introduced: file -> MR -> issue -> discussion
|
/// Trace why code was introduced: file -> MR -> issue -> discussion
|
||||||
Trace(TraceArgs),
|
Trace(TraceArgs),
|
||||||
|
|
||||||
|
/// Auto-generate a structured narrative of an issue or MR
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore explain issues 42 # Narrative for issue #42
|
||||||
|
lore explain mrs 99 -p group/repo # Narrative for MR !99 in specific project
|
||||||
|
lore -J explain issues 42 # JSON output for automation
|
||||||
|
lore explain issues 42 --sections key_decisions,open_threads # Specific sections only
|
||||||
|
lore explain issues 42 --since 30d # Narrative scoped to last 30 days
|
||||||
|
lore explain issues 42 --no-timeline # Skip timeline (faster)")]
|
||||||
|
Explain {
|
||||||
|
/// Entity type: "issues" or "mrs" (singular forms also accepted)
|
||||||
|
#[arg(value_parser = ["issues", "mrs", "issue", "mr"])]
|
||||||
|
entity_type: String,
|
||||||
|
|
||||||
|
/// Entity IID
|
||||||
|
iid: i64,
|
||||||
|
|
||||||
|
/// Scope to project (fuzzy match)
|
||||||
|
#[arg(short, long)]
|
||||||
|
project: Option<String>,
|
||||||
|
|
||||||
|
/// Select specific sections (comma-separated)
|
||||||
|
/// Valid: entity, description, key_decisions, activity, open_threads, related, timeline
|
||||||
|
#[arg(long, value_delimiter = ',', help_heading = "Output")]
|
||||||
|
sections: Option<Vec<String>>,
|
||||||
|
|
||||||
|
/// Skip timeline excerpt (faster execution)
|
||||||
|
#[arg(long, help_heading = "Output")]
|
||||||
|
no_timeline: bool,
|
||||||
|
|
||||||
|
/// Maximum key decisions to include
|
||||||
|
#[arg(long, default_value = "10", help_heading = "Output")]
|
||||||
|
max_decisions: usize,
|
||||||
|
|
||||||
|
/// Time scope for events/notes (e.g. 7d, 2w, 1m, or YYYY-MM-DD)
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
since: Option<String>,
|
||||||
|
},
|
||||||
|
|
||||||
/// Detect discussion divergence from original intent
|
/// Detect discussion divergence from original intent
|
||||||
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
lore drift issues 42 # Check drift on issue #42
|
lore drift issues 42 # Check drift on issue #42
|
||||||
@@ -380,17 +419,6 @@ pub enum Commands {
|
|||||||
source_branch: Option<String>,
|
source_branch: Option<String>,
|
||||||
},
|
},
|
||||||
|
|
||||||
#[command(hide = true)]
|
|
||||||
Show {
|
|
||||||
#[arg(value_parser = ["issue", "mr"])]
|
|
||||||
entity: String,
|
|
||||||
|
|
||||||
iid: i64,
|
|
||||||
|
|
||||||
#[arg(long)]
|
|
||||||
project: Option<String>,
|
|
||||||
},
|
|
||||||
|
|
||||||
#[command(hide = true, name = "auth-test")]
|
#[command(hide = true, name = "auth-test")]
|
||||||
AuthTest,
|
AuthTest,
|
||||||
|
|
||||||
@@ -398,871 +426,8 @@ pub enum Commands {
|
|||||||
SyncStatus,
|
SyncStatus,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Parser)]
|
pub use args::{
|
||||||
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
CountArgs, CronAction, CronArgs, EmbedArgs, FileHistoryArgs, GenerateDocsArgs, IngestArgs,
|
||||||
lore issues -n 10 # List 10 most recently updated issues
|
IssuesArgs, MeArgs, MrsArgs, NotesArgs, SearchArgs, StatsArgs, SyncArgs, TimelineArgs,
|
||||||
lore issues -s opened -l bug # Open issues labeled 'bug'
|
TokenAction, TokenArgs, TraceArgs, WhoArgs,
|
||||||
lore issues 42 -p group/repo # Show issue #42 in a specific project
|
};
|
||||||
lore issues --since 7d -a jsmith # Issues updated in last 7 days by jsmith")]
|
|
||||||
pub struct IssuesArgs {
|
|
||||||
/// Issue IID (omit to list, provide to show details)
|
|
||||||
pub iid: Option<i64>,
|
|
||||||
|
|
||||||
/// Maximum results
|
|
||||||
#[arg(
|
|
||||||
short = 'n',
|
|
||||||
long = "limit",
|
|
||||||
default_value = "50",
|
|
||||||
help_heading = "Output"
|
|
||||||
)]
|
|
||||||
pub limit: usize,
|
|
||||||
|
|
||||||
/// Select output fields (comma-separated, or 'minimal' preset: iid,title,state,updated_at_iso)
|
|
||||||
#[arg(long, help_heading = "Output", value_delimiter = ',')]
|
|
||||||
pub fields: Option<Vec<String>>,
|
|
||||||
|
|
||||||
/// Filter by state (opened, closed, all)
|
|
||||||
#[arg(short = 's', long, help_heading = "Filters", value_parser = ["opened", "closed", "all"])]
|
|
||||||
pub state: Option<String>,
|
|
||||||
|
|
||||||
/// Filter by project path
|
|
||||||
#[arg(short = 'p', long, help_heading = "Filters")]
|
|
||||||
pub project: Option<String>,
|
|
||||||
|
|
||||||
/// Filter by author username
|
|
||||||
#[arg(short = 'a', long, help_heading = "Filters")]
|
|
||||||
pub author: Option<String>,
|
|
||||||
|
|
||||||
/// Filter by assignee username
|
|
||||||
#[arg(short = 'A', long, help_heading = "Filters")]
|
|
||||||
pub assignee: Option<String>,
|
|
||||||
|
|
||||||
/// Filter by label (repeatable, AND logic)
|
|
||||||
#[arg(short = 'l', long, help_heading = "Filters")]
|
|
||||||
pub label: Option<Vec<String>>,
|
|
||||||
|
|
||||||
/// Filter by milestone title
|
|
||||||
#[arg(short = 'm', long, help_heading = "Filters")]
|
|
||||||
pub milestone: Option<String>,
|
|
||||||
|
|
||||||
/// Filter by work-item status name (repeatable, OR logic)
|
|
||||||
#[arg(long, help_heading = "Filters")]
|
|
||||||
pub status: Vec<String>,
|
|
||||||
|
|
||||||
/// Filter by time (7d, 2w, 1m, or YYYY-MM-DD)
|
|
||||||
#[arg(long, help_heading = "Filters")]
|
|
||||||
pub since: Option<String>,
|
|
||||||
|
|
||||||
/// Filter by due date (before this date, YYYY-MM-DD)
|
|
||||||
#[arg(long = "due-before", help_heading = "Filters")]
|
|
||||||
pub due_before: Option<String>,
|
|
||||||
|
|
||||||
/// Show only issues with a due date
|
|
||||||
#[arg(
|
|
||||||
long = "has-due",
|
|
||||||
help_heading = "Filters",
|
|
||||||
overrides_with = "no_has_due"
|
|
||||||
)]
|
|
||||||
pub has_due: bool,
|
|
||||||
|
|
||||||
#[arg(long = "no-has-due", hide = true, overrides_with = "has_due")]
|
|
||||||
pub no_has_due: bool,
|
|
||||||
|
|
||||||
/// Sort field (updated, created, iid)
|
|
||||||
#[arg(long, value_parser = ["updated", "created", "iid"], default_value = "updated", help_heading = "Sorting")]
|
|
||||||
pub sort: String,
|
|
||||||
|
|
||||||
/// Sort ascending (default: descending)
|
|
||||||
#[arg(long, help_heading = "Sorting", overrides_with = "no_asc")]
|
|
||||||
pub asc: bool,
|
|
||||||
|
|
||||||
#[arg(long = "no-asc", hide = true, overrides_with = "asc")]
|
|
||||||
pub no_asc: bool,
|
|
||||||
|
|
||||||
/// Open first matching item in browser
|
|
||||||
#[arg(
|
|
||||||
short = 'o',
|
|
||||||
long,
|
|
||||||
help_heading = "Actions",
|
|
||||||
overrides_with = "no_open"
|
|
||||||
)]
|
|
||||||
pub open: bool,
|
|
||||||
|
|
||||||
#[arg(long = "no-open", hide = true, overrides_with = "open")]
|
|
||||||
pub no_open: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
|
||||||
lore mrs -s opened # List open merge requests
|
|
||||||
lore mrs -s merged --since 2w # MRs merged in the last 2 weeks
|
|
||||||
lore mrs 99 -p group/repo # Show MR !99 in a specific project
|
|
||||||
lore mrs -D --reviewer jsmith # Non-draft MRs reviewed by jsmith")]
|
|
||||||
pub struct MrsArgs {
|
|
||||||
/// MR IID (omit to list, provide to show details)
|
|
||||||
pub iid: Option<i64>,
|
|
||||||
|
|
||||||
/// Maximum results
|
|
||||||
#[arg(
|
|
||||||
short = 'n',
|
|
||||||
long = "limit",
|
|
||||||
default_value = "50",
|
|
||||||
help_heading = "Output"
|
|
||||||
)]
|
|
||||||
pub limit: usize,
|
|
||||||
|
|
||||||
/// Select output fields (comma-separated, or 'minimal' preset: iid,title,state,updated_at_iso)
|
|
||||||
#[arg(long, help_heading = "Output", value_delimiter = ',')]
|
|
||||||
pub fields: Option<Vec<String>>,
|
|
||||||
|
|
||||||
/// Filter by state (opened, merged, closed, locked, all)
|
|
||||||
#[arg(short = 's', long, help_heading = "Filters", value_parser = ["opened", "merged", "closed", "locked", "all"])]
|
|
||||||
pub state: Option<String>,
|
|
||||||
|
|
||||||
/// Filter by project path
|
|
||||||
#[arg(short = 'p', long, help_heading = "Filters")]
|
|
||||||
pub project: Option<String>,
|
|
||||||
|
|
||||||
/// Filter by author username
|
|
||||||
#[arg(short = 'a', long, help_heading = "Filters")]
|
|
||||||
pub author: Option<String>,
|
|
||||||
|
|
||||||
/// Filter by assignee username
|
|
||||||
#[arg(short = 'A', long, help_heading = "Filters")]
|
|
||||||
pub assignee: Option<String>,
|
|
||||||
|
|
||||||
/// Filter by reviewer username
|
|
||||||
#[arg(short = 'r', long, help_heading = "Filters")]
|
|
||||||
pub reviewer: Option<String>,
|
|
||||||
|
|
||||||
/// Filter by label (repeatable, AND logic)
|
|
||||||
#[arg(short = 'l', long, help_heading = "Filters")]
|
|
||||||
pub label: Option<Vec<String>>,
|
|
||||||
|
|
||||||
/// Filter by time (7d, 2w, 1m, or YYYY-MM-DD)
|
|
||||||
#[arg(long, help_heading = "Filters")]
|
|
||||||
pub since: Option<String>,
|
|
||||||
|
|
||||||
/// Show only draft MRs
|
|
||||||
#[arg(
|
|
||||||
short = 'd',
|
|
||||||
long,
|
|
||||||
conflicts_with = "no_draft",
|
|
||||||
help_heading = "Filters"
|
|
||||||
)]
|
|
||||||
pub draft: bool,
|
|
||||||
|
|
||||||
/// Exclude draft MRs
|
|
||||||
#[arg(
|
|
||||||
short = 'D',
|
|
||||||
long = "no-draft",
|
|
||||||
conflicts_with = "draft",
|
|
||||||
help_heading = "Filters"
|
|
||||||
)]
|
|
||||||
pub no_draft: bool,
|
|
||||||
|
|
||||||
/// Filter by target branch
|
|
||||||
#[arg(long, help_heading = "Filters")]
|
|
||||||
pub target: Option<String>,
|
|
||||||
|
|
||||||
/// Filter by source branch
|
|
||||||
#[arg(long, help_heading = "Filters")]
|
|
||||||
pub source: Option<String>,
|
|
||||||
|
|
||||||
/// Sort field (updated, created, iid)
|
|
||||||
#[arg(long, value_parser = ["updated", "created", "iid"], default_value = "updated", help_heading = "Sorting")]
|
|
||||||
pub sort: String,
|
|
||||||
|
|
||||||
/// Sort ascending (default: descending)
|
|
||||||
#[arg(long, help_heading = "Sorting", overrides_with = "no_asc")]
|
|
||||||
pub asc: bool,
|
|
||||||
|
|
||||||
#[arg(long = "no-asc", hide = true, overrides_with = "asc")]
|
|
||||||
pub no_asc: bool,
|
|
||||||
|
|
||||||
/// Open first matching item in browser
|
|
||||||
#[arg(
|
|
||||||
short = 'o',
|
|
||||||
long,
|
|
||||||
help_heading = "Actions",
|
|
||||||
overrides_with = "no_open"
|
|
||||||
)]
|
|
||||||
pub open: bool,
|
|
||||||
|
|
||||||
#[arg(long = "no-open", hide = true, overrides_with = "open")]
|
|
||||||
pub no_open: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
|
||||||
lore notes # List 50 most recent notes
|
|
||||||
lore notes --author alice --since 7d # Notes by alice in last 7 days
|
|
||||||
lore notes --for-issue 42 -p group/repo # Notes on issue #42
|
|
||||||
lore notes --path src/ --resolution unresolved # Unresolved diff notes in src/")]
|
|
||||||
pub struct NotesArgs {
|
|
||||||
/// Maximum results
|
|
||||||
#[arg(
|
|
||||||
short = 'n',
|
|
||||||
long = "limit",
|
|
||||||
default_value = "50",
|
|
||||||
help_heading = "Output"
|
|
||||||
)]
|
|
||||||
pub limit: usize,
|
|
||||||
|
|
||||||
/// Select output fields (comma-separated, or 'minimal' preset: id,author_username,body,created_at_iso)
|
|
||||||
#[arg(long, help_heading = "Output", value_delimiter = ',')]
|
|
||||||
pub fields: Option<Vec<String>>,
|
|
||||||
|
|
||||||
/// Filter by author username
|
|
||||||
#[arg(short = 'a', long, help_heading = "Filters")]
|
|
||||||
pub author: Option<String>,
|
|
||||||
|
|
||||||
/// Filter by note type (DiffNote, DiscussionNote)
|
|
||||||
#[arg(long, help_heading = "Filters")]
|
|
||||||
pub note_type: Option<String>,
|
|
||||||
|
|
||||||
/// Filter by body text (substring match)
|
|
||||||
#[arg(long, help_heading = "Filters")]
|
|
||||||
pub contains: Option<String>,
|
|
||||||
|
|
||||||
/// Filter by internal note ID
|
|
||||||
#[arg(long, help_heading = "Filters")]
|
|
||||||
pub note_id: Option<i64>,
|
|
||||||
|
|
||||||
/// Filter by GitLab note ID
|
|
||||||
#[arg(long, help_heading = "Filters")]
|
|
||||||
pub gitlab_note_id: Option<i64>,
|
|
||||||
|
|
||||||
/// Filter by discussion ID
|
|
||||||
#[arg(long, help_heading = "Filters")]
|
|
||||||
pub discussion_id: Option<String>,
|
|
||||||
|
|
||||||
/// Include system notes (excluded by default)
|
|
||||||
#[arg(long, help_heading = "Filters")]
|
|
||||||
pub include_system: bool,
|
|
||||||
|
|
||||||
/// Filter to notes on a specific issue IID (requires --project or default_project)
|
|
||||||
#[arg(long, conflicts_with = "for_mr", help_heading = "Filters")]
|
|
||||||
pub for_issue: Option<i64>,
|
|
||||||
|
|
||||||
/// Filter to notes on a specific MR IID (requires --project or default_project)
|
|
||||||
#[arg(long, conflicts_with = "for_issue", help_heading = "Filters")]
|
|
||||||
pub for_mr: Option<i64>,
|
|
||||||
|
|
||||||
/// Filter by project path
|
|
||||||
#[arg(short = 'p', long, help_heading = "Filters")]
|
|
||||||
pub project: Option<String>,
|
|
||||||
|
|
||||||
/// Filter by time (7d, 2w, 1m, or YYYY-MM-DD)
|
|
||||||
#[arg(long, help_heading = "Filters")]
|
|
||||||
pub since: Option<String>,
|
|
||||||
|
|
||||||
/// Filter until date (YYYY-MM-DD, inclusive end-of-day)
|
|
||||||
#[arg(long, help_heading = "Filters")]
|
|
||||||
pub until: Option<String>,
|
|
||||||
|
|
||||||
/// Filter by file path (exact match or prefix with trailing /)
|
|
||||||
#[arg(long, help_heading = "Filters")]
|
|
||||||
pub path: Option<String>,
|
|
||||||
|
|
||||||
/// Filter by resolution status (any, unresolved, resolved)
|
|
||||||
#[arg(
|
|
||||||
long,
|
|
||||||
value_parser = ["any", "unresolved", "resolved"],
|
|
||||||
help_heading = "Filters"
|
|
||||||
)]
|
|
||||||
pub resolution: Option<String>,
|
|
||||||
|
|
||||||
/// Sort field (created, updated)
|
|
||||||
#[arg(
|
|
||||||
long,
|
|
||||||
value_parser = ["created", "updated"],
|
|
||||||
default_value = "created",
|
|
||||||
help_heading = "Sorting"
|
|
||||||
)]
|
|
||||||
pub sort: String,
|
|
||||||
|
|
||||||
/// Sort ascending (default: descending)
|
|
||||||
#[arg(long, help_heading = "Sorting")]
|
|
||||||
pub asc: bool,
|
|
||||||
|
|
||||||
/// Open first matching item in browser
|
|
||||||
#[arg(long, help_heading = "Actions")]
|
|
||||||
pub open: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
pub struct IngestArgs {
|
|
||||||
/// Entity to ingest (issues, mrs). Omit to ingest everything
|
|
||||||
#[arg(value_parser = ["issues", "mrs"])]
|
|
||||||
pub entity: Option<String>,
|
|
||||||
|
|
||||||
/// Filter to single project
|
|
||||||
#[arg(short = 'p', long)]
|
|
||||||
pub project: Option<String>,
|
|
||||||
|
|
||||||
/// Override stale sync lock
|
|
||||||
#[arg(short = 'f', long, overrides_with = "no_force")]
|
|
||||||
pub force: bool,
|
|
||||||
|
|
||||||
#[arg(long = "no-force", hide = true, overrides_with = "force")]
|
|
||||||
pub no_force: bool,
|
|
||||||
|
|
||||||
/// Full re-sync: reset cursors and fetch all data from scratch
|
|
||||||
#[arg(long, overrides_with = "no_full")]
|
|
||||||
pub full: bool,
|
|
||||||
|
|
||||||
#[arg(long = "no-full", hide = true, overrides_with = "full")]
|
|
||||||
pub no_full: bool,
|
|
||||||
|
|
||||||
/// Preview what would be synced without making changes
|
|
||||||
#[arg(long, overrides_with = "no_dry_run")]
|
|
||||||
pub dry_run: bool,
|
|
||||||
|
|
||||||
#[arg(long = "no-dry-run", hide = true, overrides_with = "dry_run")]
|
|
||||||
pub no_dry_run: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
|
||||||
lore stats # Show document and index statistics
|
|
||||||
lore stats --check # Run integrity checks
|
|
||||||
lore stats --repair --dry-run # Preview what repair would fix
|
|
||||||
lore --robot stats # JSON output for automation")]
|
|
||||||
pub struct StatsArgs {
|
|
||||||
/// Run integrity checks
|
|
||||||
#[arg(long, overrides_with = "no_check")]
|
|
||||||
pub check: bool,
|
|
||||||
|
|
||||||
#[arg(long = "no-check", hide = true, overrides_with = "check")]
|
|
||||||
pub no_check: bool,
|
|
||||||
|
|
||||||
/// Repair integrity issues (auto-enables --check)
|
|
||||||
#[arg(long)]
|
|
||||||
pub repair: bool,
|
|
||||||
|
|
||||||
/// Preview what would be repaired without making changes (requires --repair)
|
|
||||||
#[arg(long, overrides_with = "no_dry_run")]
|
|
||||||
pub dry_run: bool,
|
|
||||||
|
|
||||||
#[arg(long = "no-dry-run", hide = true, overrides_with = "dry_run")]
|
|
||||||
pub no_dry_run: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
|
||||||
lore search 'authentication bug' # Hybrid search (default)
|
|
||||||
lore search 'deploy' --mode lexical --type mr # Lexical search, MRs only
|
|
||||||
lore search 'API rate limit' --since 30d # Recent results only
|
|
||||||
lore search 'config' -p group/repo --explain # With ranking explanation")]
|
|
||||||
pub struct SearchArgs {
|
|
||||||
/// Search query string
|
|
||||||
pub query: String,
|
|
||||||
|
|
||||||
/// Search mode (lexical, hybrid, semantic)
|
|
||||||
#[arg(long, default_value = "hybrid", value_parser = ["lexical", "hybrid", "semantic"], help_heading = "Mode")]
|
|
||||||
pub mode: String,
|
|
||||||
|
|
||||||
/// Filter by source type (issue, mr, discussion, note)
|
|
||||||
#[arg(long = "type", value_name = "TYPE", value_parser = ["issue", "mr", "discussion", "note"], help_heading = "Filters")]
|
|
||||||
pub source_type: Option<String>,
|
|
||||||
|
|
||||||
/// Filter by author username
|
|
||||||
#[arg(long, help_heading = "Filters")]
|
|
||||||
pub author: Option<String>,
|
|
||||||
|
|
||||||
/// Filter by project path
|
|
||||||
#[arg(short = 'p', long, help_heading = "Filters")]
|
|
||||||
pub project: Option<String>,
|
|
||||||
|
|
||||||
/// Filter by label (repeatable, AND logic)
|
|
||||||
#[arg(long, action = clap::ArgAction::Append, help_heading = "Filters")]
|
|
||||||
pub label: Vec<String>,
|
|
||||||
|
|
||||||
/// Filter by file path (trailing / for prefix match)
|
|
||||||
#[arg(long, help_heading = "Filters")]
|
|
||||||
pub path: Option<String>,
|
|
||||||
|
|
||||||
/// Filter by created since (7d, 2w, or YYYY-MM-DD)
|
|
||||||
#[arg(long, help_heading = "Filters")]
|
|
||||||
pub since: Option<String>,
|
|
||||||
|
|
||||||
/// Filter by updated since (7d, 2w, or YYYY-MM-DD)
|
|
||||||
#[arg(long = "updated-since", help_heading = "Filters")]
|
|
||||||
pub updated_since: Option<String>,
|
|
||||||
|
|
||||||
/// Maximum results (default 20, max 100)
|
|
||||||
#[arg(
|
|
||||||
short = 'n',
|
|
||||||
long = "limit",
|
|
||||||
default_value = "20",
|
|
||||||
help_heading = "Output"
|
|
||||||
)]
|
|
||||||
pub limit: usize,
|
|
||||||
|
|
||||||
/// Select output fields (comma-separated, or 'minimal' preset: document_id,title,source_type,score)
|
|
||||||
#[arg(long, help_heading = "Output", value_delimiter = ',')]
|
|
||||||
pub fields: Option<Vec<String>>,
|
|
||||||
|
|
||||||
/// Show ranking explanation per result
|
|
||||||
#[arg(long, help_heading = "Output", overrides_with = "no_explain")]
|
|
||||||
pub explain: bool,
|
|
||||||
|
|
||||||
#[arg(long = "no-explain", hide = true, overrides_with = "explain")]
|
|
||||||
pub no_explain: bool,
|
|
||||||
|
|
||||||
/// FTS query mode: safe (default) or raw
|
|
||||||
#[arg(long = "fts-mode", default_value = "safe", value_parser = ["safe", "raw"], help_heading = "Mode")]
|
|
||||||
pub fts_mode: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
|
||||||
lore generate-docs # Generate docs for dirty entities
|
|
||||||
lore generate-docs --full # Full rebuild of all documents
|
|
||||||
lore generate-docs --full -p group/repo # Full rebuild for one project")]
|
|
||||||
pub struct GenerateDocsArgs {
|
|
||||||
/// Full rebuild: seed all entities into dirty queue, then drain
|
|
||||||
#[arg(long)]
|
|
||||||
pub full: bool,
|
|
||||||
|
|
||||||
/// Filter to single project
|
|
||||||
#[arg(short = 'p', long)]
|
|
||||||
pub project: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
|
||||||
lore sync # Full pipeline: ingest + docs + embed
|
|
||||||
lore sync --no-embed # Skip embedding step
|
|
||||||
lore sync --no-status # Skip work-item status enrichment
|
|
||||||
lore sync --full --force # Full re-sync, override stale lock
|
|
||||||
lore sync --dry-run # Preview what would change
|
|
||||||
lore sync --issue 42 -p group/repo # Surgically sync one issue
|
|
||||||
lore sync --mr 10 --mr 20 -p g/r # Surgically sync two MRs")]
|
|
||||||
pub struct SyncArgs {
|
|
||||||
/// Reset cursors, fetch everything
|
|
||||||
#[arg(long, overrides_with = "no_full")]
|
|
||||||
pub full: bool,
|
|
||||||
|
|
||||||
#[arg(long = "no-full", hide = true, overrides_with = "full")]
|
|
||||||
pub no_full: bool,
|
|
||||||
|
|
||||||
/// Override stale lock
|
|
||||||
#[arg(long, overrides_with = "no_force")]
|
|
||||||
pub force: bool,
|
|
||||||
|
|
||||||
#[arg(long = "no-force", hide = true, overrides_with = "force")]
|
|
||||||
pub no_force: bool,
|
|
||||||
|
|
||||||
/// Skip embedding step
|
|
||||||
#[arg(long)]
|
|
||||||
pub no_embed: bool,
|
|
||||||
|
|
||||||
/// Skip document regeneration
|
|
||||||
#[arg(long)]
|
|
||||||
pub no_docs: bool,
|
|
||||||
|
|
||||||
/// Skip resource event fetching (overrides config)
|
|
||||||
#[arg(long = "no-events")]
|
|
||||||
pub no_events: bool,
|
|
||||||
|
|
||||||
/// Skip MR file change fetching (overrides config)
|
|
||||||
#[arg(long = "no-file-changes")]
|
|
||||||
pub no_file_changes: bool,
|
|
||||||
|
|
||||||
/// Skip work-item status enrichment via GraphQL (overrides config)
|
|
||||||
#[arg(long = "no-status")]
|
|
||||||
pub no_status: bool,
|
|
||||||
|
|
||||||
/// Preview what would be synced without making changes
|
|
||||||
#[arg(long, overrides_with = "no_dry_run")]
|
|
||||||
pub dry_run: bool,
|
|
||||||
|
|
||||||
#[arg(long = "no-dry-run", hide = true, overrides_with = "dry_run")]
|
|
||||||
pub no_dry_run: bool,
|
|
||||||
|
|
||||||
/// Show detailed timing breakdown for sync stages
|
|
||||||
#[arg(short = 't', long = "timings")]
|
|
||||||
pub timings: bool,
|
|
||||||
|
|
||||||
/// Acquire file lock before syncing (skip if another sync is running)
|
|
||||||
#[arg(long)]
|
|
||||||
pub lock: bool,
|
|
||||||
|
|
||||||
/// Surgically sync specific issues by IID (repeatable, must be positive)
|
|
||||||
#[arg(long, value_parser = clap::value_parser!(u64).range(1..), action = clap::ArgAction::Append)]
|
|
||||||
pub issue: Vec<u64>,
|
|
||||||
|
|
||||||
/// Surgically sync specific merge requests by IID (repeatable, must be positive)
|
|
||||||
#[arg(long, value_parser = clap::value_parser!(u64).range(1..), action = clap::ArgAction::Append)]
|
|
||||||
pub mr: Vec<u64>,
|
|
||||||
|
|
||||||
/// Scope to a single project (required when --issue or --mr is used)
|
|
||||||
#[arg(short = 'p', long)]
|
|
||||||
pub project: Option<String>,
|
|
||||||
|
|
||||||
/// Validate remote entities exist without DB writes (preflight only)
|
|
||||||
#[arg(long)]
|
|
||||||
pub preflight_only: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
|
||||||
lore embed # Embed new/changed documents
|
|
||||||
lore embed --full # Re-embed all documents from scratch
|
|
||||||
lore embed --retry-failed # Retry previously failed embeddings")]
|
|
||||||
pub struct EmbedArgs {
|
|
||||||
/// Re-embed all documents (clears existing embeddings first)
|
|
||||||
#[arg(long, overrides_with = "no_full")]
|
|
||||||
pub full: bool,
|
|
||||||
|
|
||||||
#[arg(long = "no-full", hide = true, overrides_with = "full")]
|
|
||||||
pub no_full: bool,
|
|
||||||
|
|
||||||
/// Retry previously failed embeddings
|
|
||||||
#[arg(long, overrides_with = "no_retry_failed")]
|
|
||||||
pub retry_failed: bool,
|
|
||||||
|
|
||||||
#[arg(long = "no-retry-failed", hide = true, overrides_with = "retry_failed")]
|
|
||||||
pub no_retry_failed: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
|
||||||
lore timeline 'deployment' # Search-based seeding
|
|
||||||
lore timeline issue:42 # Direct: issue #42 and related entities
|
|
||||||
lore timeline i:42 # Shorthand for issue:42
|
|
||||||
lore timeline mr:99 # Direct: MR !99 and related entities
|
|
||||||
lore timeline 'auth' --since 30d -p group/repo # Scoped to project and time
|
|
||||||
lore timeline 'migration' --depth 2 # Deep cross-reference expansion
|
|
||||||
lore timeline 'auth' --no-mentions # Only 'closes' and 'related' edges")]
|
|
||||||
pub struct TimelineArgs {
|
|
||||||
/// Search text or entity reference (issue:N, i:N, mr:N, m:N)
|
|
||||||
pub query: String,
|
|
||||||
|
|
||||||
/// Scope to a specific project (fuzzy match)
|
|
||||||
#[arg(short = 'p', long, help_heading = "Filters")]
|
|
||||||
pub project: Option<String>,
|
|
||||||
|
|
||||||
/// Only show events after this date (e.g. "6m", "2w", "2024-01-01")
|
|
||||||
#[arg(long, help_heading = "Filters")]
|
|
||||||
pub since: Option<String>,
|
|
||||||
|
|
||||||
/// Cross-reference expansion depth (0 = no expansion)
|
|
||||||
#[arg(long, default_value = "1", help_heading = "Expansion")]
|
|
||||||
pub depth: u32,
|
|
||||||
|
|
||||||
/// Skip 'mentioned' edges during expansion (only follow 'closes' and 'related')
|
|
||||||
#[arg(long = "no-mentions", help_heading = "Expansion")]
|
|
||||||
pub no_mentions: bool,
|
|
||||||
|
|
||||||
/// Maximum number of events to display
|
|
||||||
#[arg(
|
|
||||||
short = 'n',
|
|
||||||
long = "limit",
|
|
||||||
default_value = "100",
|
|
||||||
help_heading = "Output"
|
|
||||||
)]
|
|
||||||
pub limit: usize,
|
|
||||||
|
|
||||||
/// Select output fields (comma-separated, or 'minimal' preset: timestamp,type,entity_iid,detail)
|
|
||||||
#[arg(long, help_heading = "Output", value_delimiter = ',')]
|
|
||||||
pub fields: Option<Vec<String>>,
|
|
||||||
|
|
||||||
/// Maximum seed entities from search
|
|
||||||
#[arg(long = "max-seeds", default_value = "10", help_heading = "Expansion")]
|
|
||||||
pub max_seeds: usize,
|
|
||||||
|
|
||||||
/// Maximum expanded entities via cross-references
|
|
||||||
#[arg(
|
|
||||||
long = "max-entities",
|
|
||||||
default_value = "50",
|
|
||||||
help_heading = "Expansion"
|
|
||||||
)]
|
|
||||||
pub max_entities: usize,
|
|
||||||
|
|
||||||
/// Maximum evidence notes included
|
|
||||||
#[arg(
|
|
||||||
long = "max-evidence",
|
|
||||||
default_value = "10",
|
|
||||||
help_heading = "Expansion"
|
|
||||||
)]
|
|
||||||
pub max_evidence: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
|
||||||
lore who src/features/auth/ # Who knows about this area?
|
|
||||||
lore who @asmith # What is asmith working on?
|
|
||||||
lore who @asmith --reviews # What review patterns does asmith have?
|
|
||||||
lore who --active # What discussions need attention?
|
|
||||||
lore who --overlap src/features/auth/ # Who else is touching these files?
|
|
||||||
lore who --path README.md # Expert lookup for a root file
|
|
||||||
lore who --path Makefile # Expert lookup for a dotless root file")]
|
|
||||||
pub struct WhoArgs {
|
|
||||||
/// Username or file path (path if contains /)
|
|
||||||
pub target: Option<String>,
|
|
||||||
|
|
||||||
/// Force expert mode for a file/directory path.
|
|
||||||
/// Root files (README.md, LICENSE, Makefile) are treated as exact matches.
|
|
||||||
/// Use a trailing `/` to force directory-prefix matching.
|
|
||||||
#[arg(long, help_heading = "Mode", conflicts_with_all = ["active", "overlap", "reviews"])]
|
|
||||||
pub path: Option<String>,
|
|
||||||
|
|
||||||
/// Show active unresolved discussions
|
|
||||||
#[arg(long, help_heading = "Mode", conflicts_with_all = ["target", "overlap", "reviews", "path"])]
|
|
||||||
pub active: bool,
|
|
||||||
|
|
||||||
/// Find users with MRs/notes touching this file path
|
|
||||||
#[arg(long, help_heading = "Mode", conflicts_with_all = ["target", "active", "reviews", "path"])]
|
|
||||||
pub overlap: Option<String>,
|
|
||||||
|
|
||||||
/// Show review pattern analysis (requires username target)
|
|
||||||
#[arg(long, help_heading = "Mode", requires = "target", conflicts_with_all = ["active", "overlap", "path"])]
|
|
||||||
pub reviews: bool,
|
|
||||||
|
|
||||||
/// Time window (7d, 2w, 6m, YYYY-MM-DD). Default varies by mode.
|
|
||||||
#[arg(long, help_heading = "Filters")]
|
|
||||||
pub since: Option<String>,
|
|
||||||
|
|
||||||
/// Scope to a project (supports fuzzy matching)
|
|
||||||
#[arg(short = 'p', long, help_heading = "Filters")]
|
|
||||||
pub project: Option<String>,
|
|
||||||
|
|
||||||
/// Maximum results per section (1..=500); omit for unlimited
|
|
||||||
#[arg(
|
|
||||||
short = 'n',
|
|
||||||
long = "limit",
|
|
||||||
value_parser = clap::value_parser!(u16).range(1..=500),
|
|
||||||
help_heading = "Output"
|
|
||||||
)]
|
|
||||||
pub limit: Option<u16>,
|
|
||||||
|
|
||||||
/// Select output fields (comma-separated, or 'minimal' preset; varies by mode)
|
|
||||||
#[arg(long, help_heading = "Output", value_delimiter = ',')]
|
|
||||||
pub fields: Option<Vec<String>>,
|
|
||||||
|
|
||||||
/// Show per-MR detail breakdown (expert mode only)
|
|
||||||
#[arg(
|
|
||||||
long,
|
|
||||||
help_heading = "Output",
|
|
||||||
overrides_with = "no_detail",
|
|
||||||
conflicts_with = "explain_score"
|
|
||||||
)]
|
|
||||||
pub detail: bool,
|
|
||||||
|
|
||||||
#[arg(long = "no-detail", hide = true, overrides_with = "detail")]
|
|
||||||
pub no_detail: bool,
|
|
||||||
|
|
||||||
/// Score as if "now" is this date (ISO 8601 or duration like 30d). Expert mode only.
|
|
||||||
#[arg(long = "as-of", help_heading = "Scoring")]
|
|
||||||
pub as_of: Option<String>,
|
|
||||||
|
|
||||||
/// Show per-component score breakdown in output. Expert mode only.
|
|
||||||
#[arg(long = "explain-score", help_heading = "Scoring")]
|
|
||||||
pub explain_score: bool,
|
|
||||||
|
|
||||||
/// Include bot users in results (normally excluded via scoring.excluded_usernames).
|
|
||||||
#[arg(long = "include-bots", help_heading = "Scoring")]
|
|
||||||
pub include_bots: bool,
|
|
||||||
|
|
||||||
/// Include discussions on closed issues and merged/closed MRs
|
|
||||||
#[arg(long, help_heading = "Filters")]
|
|
||||||
pub include_closed: bool,
|
|
||||||
|
|
||||||
/// Remove the default time window (query all history). Conflicts with --since.
|
|
||||||
#[arg(
|
|
||||||
long = "all-history",
|
|
||||||
help_heading = "Filters",
|
|
||||||
conflicts_with = "since"
|
|
||||||
)]
|
|
||||||
pub all_history: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
|
||||||
lore me # Full dashboard (default project or all)
|
|
||||||
lore me --issues # Issues section only
|
|
||||||
lore me --mrs # MRs section only
|
|
||||||
lore me --activity # Activity feed only
|
|
||||||
lore me --all # All synced projects
|
|
||||||
lore me --since 2d # Activity window (default: 30d)
|
|
||||||
lore me --project group/repo # Scope to one project
|
|
||||||
lore me --user jdoe # Override configured username")]
|
|
||||||
pub struct MeArgs {
|
|
||||||
/// Show open issues section
|
|
||||||
#[arg(long, help_heading = "Sections")]
|
|
||||||
pub issues: bool,
|
|
||||||
|
|
||||||
/// Show authored + reviewing MRs section
|
|
||||||
#[arg(long, help_heading = "Sections")]
|
|
||||||
pub mrs: bool,
|
|
||||||
|
|
||||||
/// Show activity feed section
|
|
||||||
#[arg(long, help_heading = "Sections")]
|
|
||||||
pub activity: bool,
|
|
||||||
|
|
||||||
/// Show items you're @mentioned in (not assigned/authored/reviewing)
|
|
||||||
#[arg(long, help_heading = "Sections")]
|
|
||||||
pub mentions: bool,
|
|
||||||
|
|
||||||
/// Activity window (e.g. 7d, 2w, 30d). Default: 30d. Only affects activity section.
|
|
||||||
#[arg(long, help_heading = "Filters")]
|
|
||||||
pub since: Option<String>,
|
|
||||||
|
|
||||||
/// Scope to a project (supports fuzzy matching)
|
|
||||||
#[arg(short = 'p', long, help_heading = "Filters", conflicts_with = "all")]
|
|
||||||
pub project: Option<String>,
|
|
||||||
|
|
||||||
/// Show all synced projects (overrides default_project)
|
|
||||||
#[arg(long, help_heading = "Filters", conflicts_with = "project")]
|
|
||||||
pub all: bool,
|
|
||||||
|
|
||||||
/// Override configured username
|
|
||||||
#[arg(long = "user", help_heading = "Filters")]
|
|
||||||
pub user: Option<String>,
|
|
||||||
|
|
||||||
/// Select output fields (comma-separated, or 'minimal' preset)
|
|
||||||
#[arg(long, help_heading = "Output", value_delimiter = ',')]
|
|
||||||
pub fields: Option<Vec<String>>,
|
|
||||||
|
|
||||||
/// Reset the since-last-check cursor (next run shows no new events)
|
|
||||||
#[arg(long, help_heading = "Output")]
|
|
||||||
pub reset_cursor: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MeArgs {
|
|
||||||
/// Returns true if no section flags were passed (show all sections).
|
|
||||||
pub fn show_all_sections(&self) -> bool {
|
|
||||||
!self.issues && !self.mrs && !self.activity && !self.mentions
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
|
||||||
lore file-history src/main.rs # MRs that touched this file
|
|
||||||
lore file-history src/auth/ -p group/repo # Scoped to project
|
|
||||||
lore file-history src/foo.rs --discussions # Include DiffNote snippets
|
|
||||||
lore file-history src/bar.rs --no-follow-renames # Skip rename chain")]
|
|
||||||
pub struct FileHistoryArgs {
|
|
||||||
/// File path to trace history for
|
|
||||||
pub path: String,
|
|
||||||
|
|
||||||
/// Scope to a specific project (fuzzy match)
|
|
||||||
#[arg(short = 'p', long, help_heading = "Filters")]
|
|
||||||
pub project: Option<String>,
|
|
||||||
|
|
||||||
/// Include discussion snippets from DiffNotes on this file
|
|
||||||
#[arg(long, help_heading = "Output")]
|
|
||||||
pub discussions: bool,
|
|
||||||
|
|
||||||
/// Disable rename chain resolution
|
|
||||||
#[arg(long = "no-follow-renames", help_heading = "Filters")]
|
|
||||||
pub no_follow_renames: bool,
|
|
||||||
|
|
||||||
/// Only show merged MRs
|
|
||||||
#[arg(long, help_heading = "Filters")]
|
|
||||||
pub merged: bool,
|
|
||||||
|
|
||||||
/// Maximum results
|
|
||||||
#[arg(
|
|
||||||
short = 'n',
|
|
||||||
long = "limit",
|
|
||||||
default_value = "50",
|
|
||||||
help_heading = "Output"
|
|
||||||
)]
|
|
||||||
pub limit: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
|
||||||
lore trace src/main.rs # Why was this file changed?
|
|
||||||
lore trace src/auth/ -p group/repo # Scoped to project
|
|
||||||
lore trace src/foo.rs --discussions # Include DiffNote context
|
|
||||||
lore trace src/bar.rs:42 # Line hint (Tier 2 warning)")]
|
|
||||||
pub struct TraceArgs {
|
|
||||||
/// File path to trace (supports :line suffix for future Tier 2)
|
|
||||||
pub path: String,
|
|
||||||
|
|
||||||
/// Scope to a specific project (fuzzy match)
|
|
||||||
#[arg(short = 'p', long, help_heading = "Filters")]
|
|
||||||
pub project: Option<String>,
|
|
||||||
|
|
||||||
/// Include DiffNote discussion snippets
|
|
||||||
#[arg(long, help_heading = "Output")]
|
|
||||||
pub discussions: bool,
|
|
||||||
|
|
||||||
/// Disable rename chain resolution
|
|
||||||
#[arg(long = "no-follow-renames", help_heading = "Filters")]
|
|
||||||
pub no_follow_renames: bool,
|
|
||||||
|
|
||||||
/// Maximum trace chains to display
|
|
||||||
#[arg(
|
|
||||||
short = 'n',
|
|
||||||
long = "limit",
|
|
||||||
default_value = "20",
|
|
||||||
help_heading = "Output"
|
|
||||||
)]
|
|
||||||
pub limit: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
|
||||||
lore count issues # Total issues in local database
|
|
||||||
lore count notes --for mr # Notes on merge requests only
|
|
||||||
lore count discussions --for issue # Discussions on issues only")]
|
|
||||||
pub struct CountArgs {
|
|
||||||
/// Entity type to count (issues, mrs, discussions, notes, events)
|
|
||||||
#[arg(value_parser = ["issues", "mrs", "discussions", "notes", "events"])]
|
|
||||||
pub entity: String,
|
|
||||||
|
|
||||||
/// Parent type filter: issue or mr (for discussions/notes)
|
|
||||||
#[arg(short = 'f', long = "for", value_parser = ["issue", "mr"])]
|
|
||||||
pub for_entity: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
pub struct CronArgs {
|
|
||||||
#[command(subcommand)]
|
|
||||||
pub action: CronAction,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Subcommand)]
|
|
||||||
pub enum CronAction {
|
|
||||||
/// Install cron job for automatic syncing
|
|
||||||
Install {
|
|
||||||
/// Sync interval in minutes (default: 8)
|
|
||||||
#[arg(long, default_value = "8")]
|
|
||||||
interval: u32,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Remove cron job
|
|
||||||
Uninstall,
|
|
||||||
|
|
||||||
/// Show current cron configuration
|
|
||||||
Status,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Args)]
|
|
||||||
pub struct TokenArgs {
|
|
||||||
#[command(subcommand)]
|
|
||||||
pub action: TokenAction,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Subcommand)]
|
|
||||||
pub enum TokenAction {
|
|
||||||
/// Store a GitLab token in the config file
|
|
||||||
Set {
|
|
||||||
/// Token value (reads from stdin if omitted in non-interactive mode)
|
|
||||||
#[arg(long)]
|
|
||||||
token: Option<String>,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Show the current token (masked by default)
|
|
||||||
Show {
|
|
||||||
/// Show the full unmasked token
|
|
||||||
#[arg(long)]
|
|
||||||
unmask: bool,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -569,6 +569,32 @@ pub fn terminal_width() -> usize {
|
|||||||
80
|
80
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Strip ANSI escape codes (SGR sequences) from a string.
|
||||||
|
pub fn strip_ansi(s: &str) -> String {
|
||||||
|
let mut out = String::with_capacity(s.len());
|
||||||
|
let mut chars = s.chars();
|
||||||
|
while let Some(c) = chars.next() {
|
||||||
|
if c == '\x1b' {
|
||||||
|
// Consume `[`, then digits/semicolons, then the final letter
|
||||||
|
if chars.next() == Some('[') {
|
||||||
|
for c in chars.by_ref() {
|
||||||
|
if c.is_ascii_alphabetic() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
out.push(c);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
out
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Compute the visible width of a string that may contain ANSI escape sequences.
|
||||||
|
pub fn visible_width(s: &str) -> usize {
|
||||||
|
strip_ansi(s).chars().count()
|
||||||
|
}
|
||||||
|
|
||||||
/// Truncate a string to `max` characters, appending "..." if truncated.
|
/// Truncate a string to `max` characters, appending "..." if truncated.
|
||||||
pub fn truncate(s: &str, max: usize) -> String {
|
pub fn truncate(s: &str, max: usize) -> String {
|
||||||
if max < 4 {
|
if max < 4 {
|
||||||
@@ -1459,24 +1485,19 @@ mod tests {
|
|||||||
|
|
||||||
// ── helpers ──
|
// ── helpers ──
|
||||||
|
|
||||||
/// Strip ANSI escape codes (SGR sequences) for content assertions.
|
/// Delegate to the public `strip_ansi` for test assertions.
|
||||||
fn strip_ansi(s: &str) -> String {
|
fn strip_ansi(s: &str) -> String {
|
||||||
let mut out = String::with_capacity(s.len());
|
super::strip_ansi(s)
|
||||||
let mut chars = s.chars();
|
}
|
||||||
while let Some(c) = chars.next() {
|
|
||||||
if c == '\x1b' {
|
#[test]
|
||||||
// Consume `[`, then digits/semicolons, then the final letter
|
fn visible_width_strips_ansi() {
|
||||||
if chars.next() == Some('[') {
|
let styled = "\x1b[1mhello\x1b[0m".to_string();
|
||||||
for c in chars.by_ref() {
|
assert_eq!(super::visible_width(&styled), 5);
|
||||||
if c.is_ascii_alphabetic() {
|
}
|
||||||
break;
|
|
||||||
}
|
#[test]
|
||||||
}
|
fn visible_width_plain_string() {
|
||||||
}
|
assert_eq!(super::visible_width("hello"), 5);
|
||||||
} else {
|
|
||||||
out.push(c);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
out
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,6 +3,26 @@ use serde::Serialize;
|
|||||||
#[derive(Debug, Serialize)]
|
#[derive(Debug, Serialize)]
|
||||||
pub struct RobotMeta {
|
pub struct RobotMeta {
|
||||||
pub elapsed_ms: u64,
|
pub elapsed_ms: u64,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub gitlab_base_url: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RobotMeta {
|
||||||
|
/// Standard meta with timing only.
|
||||||
|
pub fn new(elapsed_ms: u64) -> Self {
|
||||||
|
Self {
|
||||||
|
elapsed_ms,
|
||||||
|
gitlab_base_url: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Meta with GitLab base URL for URL construction by consumers.
|
||||||
|
pub fn with_base_url(elapsed_ms: u64, base_url: &str) -> Self {
|
||||||
|
Self {
|
||||||
|
elapsed_ms,
|
||||||
|
gitlab_base_url: Some(base_url.trim_end_matches('/').to_string()),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Filter JSON object fields in-place for `--fields` support.
|
/// Filter JSON object fields in-place for `--fields` support.
|
||||||
@@ -36,10 +56,16 @@ pub fn expand_fields_preset(fields: &[String], entity: &str) -> Vec<String> {
|
|||||||
.iter()
|
.iter()
|
||||||
.map(|s| (*s).to_string())
|
.map(|s| (*s).to_string())
|
||||||
.collect(),
|
.collect(),
|
||||||
"search" => ["document_id", "title", "source_type", "score"]
|
"search" => [
|
||||||
.iter()
|
"document_id",
|
||||||
.map(|s| (*s).to_string())
|
"title",
|
||||||
.collect(),
|
"source_type",
|
||||||
|
"source_entity_iid",
|
||||||
|
"score",
|
||||||
|
]
|
||||||
|
.iter()
|
||||||
|
.map(|s| (*s).to_string())
|
||||||
|
.collect(),
|
||||||
"timeline" => ["timestamp", "type", "entity_iid", "detail"]
|
"timeline" => ["timestamp", "type", "entity_iid", "detail"]
|
||||||
.iter()
|
.iter()
|
||||||
.map(|s| (*s).to_string())
|
.map(|s| (*s).to_string())
|
||||||
@@ -133,4 +159,27 @@ mod tests {
|
|||||||
let expanded = expand_fields_preset(&fields, "notes");
|
let expanded = expand_fields_preset(&fields, "notes");
|
||||||
assert_eq!(expanded, ["id", "body"]);
|
assert_eq!(expanded, ["id", "body"]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn meta_new_omits_base_url() {
|
||||||
|
let meta = RobotMeta::new(42);
|
||||||
|
let json = serde_json::to_value(&meta).unwrap();
|
||||||
|
assert_eq!(json["elapsed_ms"], 42);
|
||||||
|
assert!(json.get("gitlab_base_url").is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn meta_with_base_url_includes_it() {
|
||||||
|
let meta = RobotMeta::with_base_url(99, "https://gitlab.example.com");
|
||||||
|
let json = serde_json::to_value(&meta).unwrap();
|
||||||
|
assert_eq!(json["elapsed_ms"], 99);
|
||||||
|
assert_eq!(json["gitlab_base_url"], "https://gitlab.example.com");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn meta_with_base_url_strips_trailing_slash() {
|
||||||
|
let meta = RobotMeta::with_base_url(0, "https://gitlab.example.com/");
|
||||||
|
let json = serde_json::to_value(&meta).unwrap();
|
||||||
|
assert_eq!(json["gitlab_base_url"], "https://gitlab.example.com");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,15 @@
|
|||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||||
|
pub enum NetworkErrorKind {
|
||||||
|
Timeout,
|
||||||
|
ConnectionRefused,
|
||||||
|
DnsResolution,
|
||||||
|
Tls,
|
||||||
|
Other,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||||
pub enum ErrorCode {
|
pub enum ErrorCode {
|
||||||
ConfigNotFound,
|
ConfigNotFound,
|
||||||
@@ -19,8 +28,11 @@ pub enum ErrorCode {
|
|||||||
OllamaUnavailable,
|
OllamaUnavailable,
|
||||||
OllamaModelNotFound,
|
OllamaModelNotFound,
|
||||||
EmbeddingFailed,
|
EmbeddingFailed,
|
||||||
|
EmbeddingsNotBuilt,
|
||||||
NotFound,
|
NotFound,
|
||||||
Ambiguous,
|
Ambiguous,
|
||||||
|
HealthCheckFailed,
|
||||||
|
UsageError,
|
||||||
SurgicalPreflightFailed,
|
SurgicalPreflightFailed,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -43,8 +55,11 @@ impl std::fmt::Display for ErrorCode {
|
|||||||
Self::OllamaUnavailable => "OLLAMA_UNAVAILABLE",
|
Self::OllamaUnavailable => "OLLAMA_UNAVAILABLE",
|
||||||
Self::OllamaModelNotFound => "OLLAMA_MODEL_NOT_FOUND",
|
Self::OllamaModelNotFound => "OLLAMA_MODEL_NOT_FOUND",
|
||||||
Self::EmbeddingFailed => "EMBEDDING_FAILED",
|
Self::EmbeddingFailed => "EMBEDDING_FAILED",
|
||||||
|
Self::EmbeddingsNotBuilt => "EMBEDDINGS_NOT_BUILT",
|
||||||
Self::NotFound => "NOT_FOUND",
|
Self::NotFound => "NOT_FOUND",
|
||||||
Self::Ambiguous => "AMBIGUOUS",
|
Self::Ambiguous => "AMBIGUOUS",
|
||||||
|
Self::HealthCheckFailed => "HEALTH_CHECK_FAILED",
|
||||||
|
Self::UsageError => "USAGE_ERROR",
|
||||||
Self::SurgicalPreflightFailed => "SURGICAL_PREFLIGHT_FAILED",
|
Self::SurgicalPreflightFailed => "SURGICAL_PREFLIGHT_FAILED",
|
||||||
};
|
};
|
||||||
write!(f, "{code}")
|
write!(f, "{code}")
|
||||||
@@ -70,8 +85,11 @@ impl ErrorCode {
|
|||||||
Self::OllamaUnavailable => 14,
|
Self::OllamaUnavailable => 14,
|
||||||
Self::OllamaModelNotFound => 15,
|
Self::OllamaModelNotFound => 15,
|
||||||
Self::EmbeddingFailed => 16,
|
Self::EmbeddingFailed => 16,
|
||||||
|
Self::EmbeddingsNotBuilt => 21,
|
||||||
Self::NotFound => 17,
|
Self::NotFound => 17,
|
||||||
Self::Ambiguous => 18,
|
Self::Ambiguous => 18,
|
||||||
|
Self::HealthCheckFailed => 19,
|
||||||
|
Self::UsageError => 2,
|
||||||
// Shares exit code 6 with GitLabNotFound — same semantic category (resource not found).
|
// Shares exit code 6 with GitLabNotFound — same semantic category (resource not found).
|
||||||
// Robot consumers distinguish via ErrorCode string, not exit code.
|
// Robot consumers distinguish via ErrorCode string, not exit code.
|
||||||
Self::SurgicalPreflightFailed => 6,
|
Self::SurgicalPreflightFailed => 6,
|
||||||
@@ -99,8 +117,8 @@ pub enum LoreError {
|
|||||||
#[error("Cannot connect to GitLab at {base_url}")]
|
#[error("Cannot connect to GitLab at {base_url}")]
|
||||||
GitLabNetworkError {
|
GitLabNetworkError {
|
||||||
base_url: String,
|
base_url: String,
|
||||||
#[source]
|
kind: NetworkErrorKind,
|
||||||
source: Option<reqwest::Error>,
|
detail: Option<String>,
|
||||||
},
|
},
|
||||||
|
|
||||||
#[error(
|
#[error(
|
||||||
@@ -122,9 +140,6 @@ pub enum LoreError {
|
|||||||
#[error("Database error: {0}")]
|
#[error("Database error: {0}")]
|
||||||
Database(#[from] rusqlite::Error),
|
Database(#[from] rusqlite::Error),
|
||||||
|
|
||||||
#[error("HTTP error: {0}")]
|
|
||||||
Http(#[from] reqwest::Error),
|
|
||||||
|
|
||||||
#[error("JSON error: {0}")]
|
#[error("JSON error: {0}")]
|
||||||
Json(#[from] serde_json::Error),
|
Json(#[from] serde_json::Error),
|
||||||
|
|
||||||
@@ -146,8 +161,7 @@ pub enum LoreError {
|
|||||||
#[error("Cannot connect to Ollama at {base_url}. Is it running?")]
|
#[error("Cannot connect to Ollama at {base_url}. Is it running?")]
|
||||||
OllamaUnavailable {
|
OllamaUnavailable {
|
||||||
base_url: String,
|
base_url: String,
|
||||||
#[source]
|
detail: Option<String>,
|
||||||
source: Option<reqwest::Error>,
|
|
||||||
},
|
},
|
||||||
|
|
||||||
#[error("Ollama model '{model}' not found. Run: ollama pull {model}")]
|
#[error("Ollama model '{model}' not found. Run: ollama pull {model}")]
|
||||||
@@ -187,7 +201,6 @@ impl LoreError {
|
|||||||
ErrorCode::DatabaseError
|
ErrorCode::DatabaseError
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Self::Http(_) => ErrorCode::GitLabNetworkError,
|
|
||||||
Self::Json(_) => ErrorCode::InternalError,
|
Self::Json(_) => ErrorCode::InternalError,
|
||||||
Self::Io(_) => ErrorCode::IoError,
|
Self::Io(_) => ErrorCode::IoError,
|
||||||
Self::Transform(_) => ErrorCode::TransformError,
|
Self::Transform(_) => ErrorCode::TransformError,
|
||||||
@@ -197,7 +210,7 @@ impl LoreError {
|
|||||||
Self::OllamaUnavailable { .. } => ErrorCode::OllamaUnavailable,
|
Self::OllamaUnavailable { .. } => ErrorCode::OllamaUnavailable,
|
||||||
Self::OllamaModelNotFound { .. } => ErrorCode::OllamaModelNotFound,
|
Self::OllamaModelNotFound { .. } => ErrorCode::OllamaModelNotFound,
|
||||||
Self::EmbeddingFailed { .. } => ErrorCode::EmbeddingFailed,
|
Self::EmbeddingFailed { .. } => ErrorCode::EmbeddingFailed,
|
||||||
Self::EmbeddingsNotBuilt => ErrorCode::EmbeddingFailed,
|
Self::EmbeddingsNotBuilt => ErrorCode::EmbeddingsNotBuilt,
|
||||||
Self::SurgicalPreflightFailed { .. } => ErrorCode::SurgicalPreflightFailed,
|
Self::SurgicalPreflightFailed { .. } => ErrorCode::SurgicalPreflightFailed,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -238,7 +251,6 @@ impl LoreError {
|
|||||||
Some("Check database file permissions.\n\n Example:\n lore doctor")
|
Some("Check database file permissions.\n\n Example:\n lore doctor")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Self::Http(_) => Some("Check network connection"),
|
|
||||||
Self::NotFound(_) => {
|
Self::NotFound(_) => {
|
||||||
Some("Verify the entity exists.\n\n Example:\n lore issues\n lore mrs")
|
Some("Verify the entity exists.\n\n Example:\n lore issues\n lore mrs")
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,26 +4,17 @@ pub mod config;
|
|||||||
pub mod cron;
|
pub mod cron;
|
||||||
pub mod cursor;
|
pub mod cursor;
|
||||||
pub mod db;
|
pub mod db;
|
||||||
pub mod dependent_queue;
|
|
||||||
pub mod error;
|
pub mod error;
|
||||||
pub mod events_db;
|
|
||||||
pub mod file_history;
|
pub mod file_history;
|
||||||
pub mod lock;
|
pub mod lock;
|
||||||
pub mod logging;
|
pub mod logging;
|
||||||
pub mod metrics;
|
pub mod metrics;
|
||||||
pub mod note_parser;
|
pub mod ollama_mgmt;
|
||||||
pub mod path_resolver;
|
pub mod path_resolver;
|
||||||
pub mod paths;
|
pub mod paths;
|
||||||
pub mod payloads;
|
|
||||||
pub mod project;
|
pub mod project;
|
||||||
pub mod references;
|
|
||||||
pub mod shutdown;
|
pub mod shutdown;
|
||||||
pub mod sync_run;
|
|
||||||
pub mod time;
|
pub mod time;
|
||||||
pub mod timeline;
|
|
||||||
pub mod timeline_collect;
|
|
||||||
pub mod timeline_expand;
|
|
||||||
pub mod timeline_seed;
|
|
||||||
pub mod trace;
|
pub mod trace;
|
||||||
|
|
||||||
pub use config::Config;
|
pub use config::Config;
|
||||||
|
|||||||
542
src/core/ollama_mgmt.rs
Normal file
542
src/core/ollama_mgmt.rs
Normal file
@@ -0,0 +1,542 @@
|
|||||||
|
use std::net::{TcpStream, ToSocketAddrs};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::process::Command;
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
|
// ── URL parsing helpers ──
|
||||||
|
|
||||||
|
/// Extract the hostname from a URL like `http://gpu-server:11434`.
|
||||||
|
/// Handles bracketed IPv6 addresses like `http://[::1]:11434`.
|
||||||
|
fn extract_host(base_url: &str) -> &str {
|
||||||
|
let without_scheme = base_url
|
||||||
|
.strip_prefix("http://")
|
||||||
|
.or_else(|| base_url.strip_prefix("https://"))
|
||||||
|
.unwrap_or(base_url);
|
||||||
|
// Handle bracketed IPv6: [::1]:port
|
||||||
|
if without_scheme.starts_with('[') {
|
||||||
|
return without_scheme
|
||||||
|
.find(']')
|
||||||
|
.map_or(without_scheme, |end| &without_scheme[..=end]);
|
||||||
|
}
|
||||||
|
// Take host part (before port colon or path slash)
|
||||||
|
let host = without_scheme.split(':').next().unwrap_or(without_scheme);
|
||||||
|
host.split('/').next().unwrap_or(host)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract port from a URL like `http://localhost:11434`.
|
||||||
|
/// Handles trailing paths and slashes (e.g. `http://host:8080/api`).
|
||||||
|
fn extract_port(base_url: &str) -> u16 {
|
||||||
|
base_url
|
||||||
|
.rsplit(':')
|
||||||
|
.next()
|
||||||
|
.and_then(|s| {
|
||||||
|
// Strip any path/fragment after the port digits
|
||||||
|
let port_str = s.split('/').next().unwrap_or(s);
|
||||||
|
port_str.parse().ok()
|
||||||
|
})
|
||||||
|
.unwrap_or(11434)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Is this URL pointing at the local machine?
|
||||||
|
fn is_local_url(base_url: &str) -> bool {
|
||||||
|
let host = extract_host(base_url);
|
||||||
|
matches!(host, "localhost" | "127.0.0.1" | "::1" | "[::1]")
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Detection (sync, fast) ──
|
||||||
|
|
||||||
|
/// Find the `ollama` binary. Checks PATH first, then well-known install
|
||||||
|
/// locations as fallback (cron jobs have a minimal PATH that typically
|
||||||
|
/// excludes Homebrew and other user-installed paths).
|
||||||
|
pub fn find_ollama_binary() -> Option<PathBuf> {
|
||||||
|
// Try PATH first (works in interactive shells)
|
||||||
|
let from_path = Command::new("which")
|
||||||
|
.arg("ollama")
|
||||||
|
.output()
|
||||||
|
.ok()
|
||||||
|
.filter(|o| o.status.success())
|
||||||
|
.map(|o| PathBuf::from(String::from_utf8_lossy(&o.stdout).trim().to_string()));
|
||||||
|
|
||||||
|
if from_path.is_some() {
|
||||||
|
return from_path;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback: check well-known locations (for cron/launchd contexts)
|
||||||
|
const WELL_KNOWN: &[&str] = &[
|
||||||
|
"/opt/homebrew/bin/ollama", // macOS Apple Silicon (Homebrew)
|
||||||
|
"/usr/local/bin/ollama", // macOS Intel (Homebrew) / Linux manual
|
||||||
|
"/usr/bin/ollama", // Linux package manager
|
||||||
|
"/snap/bin/ollama", // Linux Snap
|
||||||
|
];
|
||||||
|
|
||||||
|
WELL_KNOWN
|
||||||
|
.iter()
|
||||||
|
.map(PathBuf::from)
|
||||||
|
.find(|p| p.is_file())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Quick sync check: can we TCP-connect to Ollama's HTTP port?
|
||||||
|
/// Resolves the hostname from the URL (supports both local and remote hosts).
|
||||||
|
pub fn is_ollama_reachable(base_url: &str) -> bool {
|
||||||
|
let port = extract_port(base_url);
|
||||||
|
let host = extract_host(base_url);
|
||||||
|
let addr_str = format!("{host}:{port}");
|
||||||
|
|
||||||
|
let Ok(mut addrs) = addr_str.to_socket_addrs() else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
let Some(addr) = addrs.next() else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
TcpStream::connect_timeout(&addr, Duration::from_secs(2)).is_ok()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Platform-appropriate installation instructions.
|
||||||
|
pub fn install_instructions() -> &'static str {
|
||||||
|
if cfg!(target_os = "macos") {
|
||||||
|
"Install Ollama: brew install ollama (or https://ollama.ai/download)"
|
||||||
|
} else if cfg!(target_os = "linux") {
|
||||||
|
"Install Ollama: curl -fsSL https://ollama.ai/install.sh | sh"
|
||||||
|
} else {
|
||||||
|
"Install Ollama: https://ollama.ai/download"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Ensure (sync, spawns ollama if needed) ──
|
||||||
|
|
||||||
|
/// Result of attempting to ensure Ollama is running.
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct OllamaEnsureResult {
|
||||||
|
/// Whether the `ollama` binary was found.
|
||||||
|
pub installed: bool,
|
||||||
|
/// Whether Ollama was already running before we tried anything.
|
||||||
|
pub was_running: bool,
|
||||||
|
/// Whether we successfully spawned `ollama serve`.
|
||||||
|
pub started: bool,
|
||||||
|
/// Whether Ollama is reachable now (after any start attempt).
|
||||||
|
pub running: bool,
|
||||||
|
/// Error message if something went wrong.
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub error: Option<String>,
|
||||||
|
/// Installation instructions (set when ollama is not installed).
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub install_hint: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Ensure Ollama is running. If not installed, returns error with install
|
||||||
|
/// instructions. If installed but not running, attempts to start it.
|
||||||
|
///
|
||||||
|
/// Only attempts to start `ollama serve` when the configured URL points at
|
||||||
|
/// localhost. For remote URLs, only checks reachability.
|
||||||
|
///
|
||||||
|
/// After spawning, waits only briefly (5 seconds) for hot restarts. Cold
|
||||||
|
/// starts can take 30-60 seconds, but the embed stage runs much later
|
||||||
|
/// (after ingestion, typically 60-90s) and will find Ollama ready by then.
|
||||||
|
/// This avoids blocking the sync pipeline unnecessarily.
|
||||||
|
pub fn ensure_ollama(base_url: &str) -> OllamaEnsureResult {
|
||||||
|
let is_local = is_local_url(base_url);
|
||||||
|
|
||||||
|
// Step 1: Is the binary installed? (only relevant for local)
|
||||||
|
let binary_path = if is_local {
|
||||||
|
let path = find_ollama_binary();
|
||||||
|
if path.is_none() {
|
||||||
|
return OllamaEnsureResult {
|
||||||
|
installed: false,
|
||||||
|
was_running: false,
|
||||||
|
started: false,
|
||||||
|
running: false,
|
||||||
|
error: Some("Ollama is not installed".to_string()),
|
||||||
|
install_hint: Some(install_instructions().to_string()),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
path
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
// Step 2: Already running?
|
||||||
|
if is_ollama_reachable(base_url) {
|
||||||
|
return OllamaEnsureResult {
|
||||||
|
installed: true,
|
||||||
|
was_running: true,
|
||||||
|
started: false,
|
||||||
|
running: true,
|
||||||
|
error: None,
|
||||||
|
install_hint: None,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 3: For remote URLs, we can't start ollama — just report unreachable
|
||||||
|
if !is_local {
|
||||||
|
return OllamaEnsureResult {
|
||||||
|
installed: true, // unknown, but irrelevant for remote
|
||||||
|
was_running: false,
|
||||||
|
started: false,
|
||||||
|
running: false,
|
||||||
|
error: Some(format!(
|
||||||
|
"Ollama at {base_url} is not reachable (remote — cannot auto-start)"
|
||||||
|
)),
|
||||||
|
install_hint: None,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 4: Try to start it (local only, using discovered absolute path)
|
||||||
|
// Using the absolute path is critical — cron has a minimal PATH that
|
||||||
|
// typically excludes Homebrew and other user-installed locations.
|
||||||
|
let ollama_bin = binary_path.expect("binary_path is Some for local URLs after step 1");
|
||||||
|
let spawn_result = Command::new(&ollama_bin)
|
||||||
|
.arg("serve")
|
||||||
|
.stdout(std::process::Stdio::null())
|
||||||
|
.stderr(std::process::Stdio::null())
|
||||||
|
.spawn();
|
||||||
|
|
||||||
|
if let Err(e) = spawn_result {
|
||||||
|
return OllamaEnsureResult {
|
||||||
|
installed: true,
|
||||||
|
was_running: false,
|
||||||
|
started: false,
|
||||||
|
running: false,
|
||||||
|
error: Some(format!("Failed to spawn 'ollama serve': {e}")),
|
||||||
|
install_hint: None,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 5: Brief wait for hot restarts (5 seconds).
|
||||||
|
// Cold starts take 30-60s but we don't block for that — ingestion runs
|
||||||
|
// for 60-90s before the embed stage needs Ollama, giving it plenty of
|
||||||
|
// time to boot in the background.
|
||||||
|
for _ in 0..10 {
|
||||||
|
std::thread::sleep(Duration::from_millis(500));
|
||||||
|
if is_ollama_reachable(base_url) {
|
||||||
|
return OllamaEnsureResult {
|
||||||
|
installed: true,
|
||||||
|
was_running: false,
|
||||||
|
started: true,
|
||||||
|
running: true,
|
||||||
|
error: None,
|
||||||
|
install_hint: None,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Spawn succeeded but Ollama is still starting up — report as started
|
||||||
|
// (not an error). It should be ready by the time the embed stage runs.
|
||||||
|
OllamaEnsureResult {
|
||||||
|
installed: true,
|
||||||
|
was_running: false,
|
||||||
|
started: true,
|
||||||
|
running: false,
|
||||||
|
error: None,
|
||||||
|
install_hint: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Brief status (for cron status display) ──
|
||||||
|
|
||||||
|
/// Lightweight status snapshot for display in `cron status`.
|
||||||
|
#[derive(Debug, Clone, Serialize)]
|
||||||
|
pub struct OllamaStatusBrief {
|
||||||
|
pub installed: bool,
|
||||||
|
pub running: bool,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub binary_path: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub install_hint: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Quick, non-blocking Ollama status check for display purposes.
|
||||||
|
pub fn ollama_status_brief(base_url: &str) -> OllamaStatusBrief {
|
||||||
|
let is_local = is_local_url(base_url);
|
||||||
|
|
||||||
|
// For remote URLs, only check reachability (binary check is irrelevant)
|
||||||
|
if !is_local {
|
||||||
|
let running = is_ollama_reachable(base_url);
|
||||||
|
return OllamaStatusBrief {
|
||||||
|
installed: true, // unknown for remote, but not actionable
|
||||||
|
running,
|
||||||
|
binary_path: None,
|
||||||
|
install_hint: None,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
let binary_path = find_ollama_binary();
|
||||||
|
let installed = binary_path.is_some();
|
||||||
|
|
||||||
|
if !installed {
|
||||||
|
return OllamaStatusBrief {
|
||||||
|
installed: false,
|
||||||
|
running: false,
|
||||||
|
binary_path: None,
|
||||||
|
install_hint: Some(install_instructions().to_string()),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
let running = is_ollama_reachable(base_url);
|
||||||
|
|
||||||
|
OllamaStatusBrief {
|
||||||
|
installed: true,
|
||||||
|
running,
|
||||||
|
binary_path: binary_path.map(|p| p.display().to_string()),
|
||||||
|
install_hint: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
// ── URL parsing ──
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn extract_port_default_url() {
|
||||||
|
assert_eq!(extract_port("http://localhost:11434"), 11434);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn extract_port_custom() {
|
||||||
|
assert_eq!(extract_port("http://192.168.1.5:9999"), 9999);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn extract_port_trailing_slash() {
|
||||||
|
assert_eq!(extract_port("http://localhost:11434/"), 11434);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn extract_port_with_path() {
|
||||||
|
assert_eq!(extract_port("http://localhost:8080/api/generate"), 8080);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn extract_port_no_port() {
|
||||||
|
assert_eq!(extract_port("http://localhost"), 11434);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn extract_port_https() {
|
||||||
|
assert_eq!(extract_port("https://ollama.internal:8080"), 8080);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn extract_host_localhost() {
|
||||||
|
assert_eq!(extract_host("http://localhost:11434"), "localhost");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn extract_host_ip() {
|
||||||
|
assert_eq!(extract_host("http://192.168.1.5:9999"), "192.168.1.5");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn extract_host_remote() {
|
||||||
|
assert_eq!(extract_host("http://gpu-server:11434"), "gpu-server");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn extract_host_no_port() {
|
||||||
|
assert_eq!(extract_host("http://localhost"), "localhost");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn extract_host_https() {
|
||||||
|
assert_eq!(
|
||||||
|
extract_host("https://ollama.internal:8080"),
|
||||||
|
"ollama.internal"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn extract_host_no_scheme() {
|
||||||
|
assert_eq!(extract_host("localhost:11434"), "localhost");
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── is_local_url ──
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn is_local_url_localhost() {
|
||||||
|
assert!(is_local_url("http://localhost:11434"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn is_local_url_loopback() {
|
||||||
|
assert!(is_local_url("http://127.0.0.1:11434"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn is_local_url_ipv6_loopback() {
|
||||||
|
assert!(is_local_url("http://[::1]:11434"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn is_local_url_remote() {
|
||||||
|
assert!(!is_local_url("http://gpu-server:11434"));
|
||||||
|
assert!(!is_local_url("http://192.168.1.5:11434"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn is_local_url_fqdn_not_local() {
|
||||||
|
assert!(!is_local_url("http://ollama.example.com:11434"));
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── install_instructions ──
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn install_instructions_not_empty() {
|
||||||
|
assert!(!install_instructions().is_empty());
|
||||||
|
assert!(install_instructions().contains("ollama"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn install_instructions_contains_url() {
|
||||||
|
assert!(install_instructions().contains("ollama.ai"));
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── is_ollama_reachable ──
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn reachable_returns_false_for_closed_port() {
|
||||||
|
// Port 1 is almost never open and requires root to bind
|
||||||
|
assert!(!is_ollama_reachable("http://127.0.0.1:1"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn reachable_returns_false_for_unresolvable_host() {
|
||||||
|
assert!(!is_ollama_reachable(
|
||||||
|
"http://this-host-does-not-exist-xyzzy:11434"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── OllamaEnsureResult serialization ──
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ensure_result_serializes_installed_running() {
|
||||||
|
let result = OllamaEnsureResult {
|
||||||
|
installed: true,
|
||||||
|
was_running: true,
|
||||||
|
started: false,
|
||||||
|
running: true,
|
||||||
|
error: None,
|
||||||
|
install_hint: None,
|
||||||
|
};
|
||||||
|
let json: serde_json::Value = serde_json::to_value(&result).unwrap();
|
||||||
|
assert_eq!(json["installed"], true);
|
||||||
|
assert_eq!(json["was_running"], true);
|
||||||
|
assert_eq!(json["started"], false);
|
||||||
|
assert_eq!(json["running"], true);
|
||||||
|
// skip_serializing_if: None fields should be absent
|
||||||
|
assert!(json.get("error").is_none());
|
||||||
|
assert!(json.get("install_hint").is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ensure_result_serializes_not_installed() {
|
||||||
|
let result = OllamaEnsureResult {
|
||||||
|
installed: false,
|
||||||
|
was_running: false,
|
||||||
|
started: false,
|
||||||
|
running: false,
|
||||||
|
error: Some("Ollama is not installed".to_string()),
|
||||||
|
install_hint: Some("Install Ollama: brew install ollama".to_string()),
|
||||||
|
};
|
||||||
|
let json: serde_json::Value = serde_json::to_value(&result).unwrap();
|
||||||
|
assert_eq!(json["installed"], false);
|
||||||
|
assert_eq!(json["running"], false);
|
||||||
|
assert_eq!(json["error"], "Ollama is not installed");
|
||||||
|
assert!(
|
||||||
|
json["install_hint"]
|
||||||
|
.as_str()
|
||||||
|
.unwrap()
|
||||||
|
.contains("brew install")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── OllamaStatusBrief serialization ──
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn status_brief_serializes_with_optional_fields() {
|
||||||
|
let brief = OllamaStatusBrief {
|
||||||
|
installed: true,
|
||||||
|
running: true,
|
||||||
|
binary_path: Some("/usr/local/bin/ollama".to_string()),
|
||||||
|
install_hint: None,
|
||||||
|
};
|
||||||
|
let json: serde_json::Value = serde_json::to_value(&brief).unwrap();
|
||||||
|
assert_eq!(json["installed"], true);
|
||||||
|
assert_eq!(json["running"], true);
|
||||||
|
assert_eq!(json["binary_path"], "/usr/local/bin/ollama");
|
||||||
|
assert!(json.get("install_hint").is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn status_brief_serializes_not_installed() {
|
||||||
|
let brief = OllamaStatusBrief {
|
||||||
|
installed: false,
|
||||||
|
running: false,
|
||||||
|
binary_path: None,
|
||||||
|
install_hint: Some("Install Ollama".to_string()),
|
||||||
|
};
|
||||||
|
let json: serde_json::Value = serde_json::to_value(&brief).unwrap();
|
||||||
|
assert_eq!(json["installed"], false);
|
||||||
|
assert_eq!(json["running"], false);
|
||||||
|
assert!(json.get("binary_path").is_none());
|
||||||
|
assert_eq!(json["install_hint"], "Install Ollama");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn status_brief_clone() {
|
||||||
|
let original = OllamaStatusBrief {
|
||||||
|
installed: true,
|
||||||
|
running: false,
|
||||||
|
binary_path: Some("/opt/bin/ollama".to_string()),
|
||||||
|
install_hint: None,
|
||||||
|
};
|
||||||
|
let cloned = original.clone();
|
||||||
|
assert_eq!(original.installed, cloned.installed);
|
||||||
|
assert_eq!(original.running, cloned.running);
|
||||||
|
assert_eq!(original.binary_path, cloned.binary_path);
|
||||||
|
assert_eq!(original.install_hint, cloned.install_hint);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── ensure_ollama with remote URL ──
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ensure_remote_unreachable_does_not_set_install_hint() {
|
||||||
|
// A remote URL that nothing listens on — should NOT suggest local install
|
||||||
|
let result = ensure_ollama("http://192.0.2.1:1"); // TEST-NET, will fail fast
|
||||||
|
assert!(!result.started);
|
||||||
|
assert!(!result.running);
|
||||||
|
assert!(
|
||||||
|
result.install_hint.is_none(),
|
||||||
|
"remote URLs should not suggest local install"
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
result.error.as_deref().unwrap_or("").contains("remote"),
|
||||||
|
"error should mention 'remote': {:?}",
|
||||||
|
result.error,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── ensure_ollama with local URL (binary check) ──
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ensure_local_closed_port_not_already_running() {
|
||||||
|
// Local URL pointing at a port nothing listens on
|
||||||
|
let result = ensure_ollama("http://127.0.0.1:1");
|
||||||
|
// Should NOT report was_running since port 1 is closed
|
||||||
|
assert!(!result.was_running);
|
||||||
|
assert!(!result.running);
|
||||||
|
// If ollama binary is not installed, should get install hint
|
||||||
|
if !result.installed {
|
||||||
|
assert!(result.install_hint.is_some());
|
||||||
|
assert!(
|
||||||
|
result
|
||||||
|
.error
|
||||||
|
.as_deref()
|
||||||
|
.unwrap_or("")
|
||||||
|
.contains("not installed")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -154,3 +154,25 @@ fn test_percent_not_wildcard() {
|
|||||||
let id = resolve_project(&conn, "a%b").unwrap();
|
let id = resolve_project(&conn, "a%b").unwrap();
|
||||||
assert_eq!(id, 1);
|
assert_eq!(id, 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_lookup_by_gitlab_project_id() {
|
||||||
|
use crate::test_support::{insert_project as insert_proj, setup_test_db};
|
||||||
|
|
||||||
|
let conn = setup_test_db();
|
||||||
|
insert_proj(&conn, 1, "team/alpha");
|
||||||
|
insert_proj(&conn, 2, "team/beta");
|
||||||
|
|
||||||
|
// insert_project sets gitlab_project_id = id * 100
|
||||||
|
let path: String = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT path_with_namespace FROM projects
|
||||||
|
WHERE gitlab_project_id = ?1
|
||||||
|
ORDER BY id LIMIT 1",
|
||||||
|
rusqlite::params![200_i64],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(path, "team/beta");
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,6 +1,22 @@
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::sync::atomic::{AtomicBool, Ordering};
|
use std::sync::atomic::{AtomicBool, Ordering};
|
||||||
|
|
||||||
|
use asupersync::runtime::RuntimeHandle;
|
||||||
|
|
||||||
|
/// Spawn a background task that listens for Ctrl+C.
|
||||||
|
///
|
||||||
|
/// First press: cancels `signal` and prints an interrupt message.
|
||||||
|
/// Second press: force-exits with code 130.
|
||||||
|
pub fn install_ctrl_c_handler(handle: &RuntimeHandle, signal: ShutdownSignal) {
|
||||||
|
handle.spawn(async move {
|
||||||
|
let _ = asupersync::signal::ctrl_c().await;
|
||||||
|
eprintln!("\nInterrupted, finishing current batch... (Ctrl+C again to force quit)");
|
||||||
|
signal.cancel();
|
||||||
|
let _ = asupersync::signal::ctrl_c().await;
|
||||||
|
std::process::exit(130);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
/// A cooperative cancellation token for graceful shutdown.
|
/// A cooperative cancellation token for graceful shutdown.
|
||||||
///
|
///
|
||||||
/// Clone-able and cheaply checkable from any thread or async task.
|
/// Clone-able and cheaply checkable from any thread or async task.
|
||||||
|
|||||||
@@ -1,512 +0,0 @@
|
|||||||
use super::*;
|
|
||||||
use crate::core::db::{create_connection, run_migrations};
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
fn setup_test_db() -> Connection {
|
|
||||||
let conn = create_connection(Path::new(":memory:")).unwrap();
|
|
||||||
run_migrations(&conn).unwrap();
|
|
||||||
conn
|
|
||||||
}
|
|
||||||
|
|
||||||
fn insert_test_project(conn: &Connection) -> i64 {
|
|
||||||
conn.execute(
|
|
||||||
"INSERT INTO projects (gitlab_project_id, path_with_namespace, web_url) VALUES (1, 'group/project', 'https://gitlab.com/group/project')",
|
|
||||||
[],
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
conn.last_insert_rowid()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn insert_test_issue(conn: &Connection, project_id: i64, iid: i64) -> i64 {
|
|
||||||
conn.execute(
|
|
||||||
"INSERT INTO issues (gitlab_id, project_id, iid, title, state, author_username, created_at, updated_at, last_seen_at) VALUES (?1, ?2, ?3, 'Test issue', 'opened', 'alice', 1000, 2000, 3000)",
|
|
||||||
rusqlite::params![iid * 100, project_id, iid],
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
conn.last_insert_rowid()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn insert_test_mr(conn: &Connection, project_id: i64, iid: i64) -> i64 {
|
|
||||||
conn.execute(
|
|
||||||
"INSERT INTO merge_requests (gitlab_id, project_id, iid, title, state, author_username, created_at, updated_at, last_seen_at) VALUES (?1, ?2, ?3, 'Test MR', 'opened', 'bob', 1000, 2000, 3000)",
|
|
||||||
rusqlite::params![iid * 100, project_id, iid],
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
conn.last_insert_rowid()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn insert_document(
|
|
||||||
conn: &Connection,
|
|
||||||
source_type: &str,
|
|
||||||
source_id: i64,
|
|
||||||
project_id: i64,
|
|
||||||
content: &str,
|
|
||||||
) -> i64 {
|
|
||||||
conn.execute(
|
|
||||||
"INSERT INTO documents (source_type, source_id, project_id, content_text, content_hash) VALUES (?1, ?2, ?3, ?4, ?5)",
|
|
||||||
rusqlite::params![source_type, source_id, project_id, content, format!("hash_{source_id}")],
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
conn.last_insert_rowid()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn insert_discussion(
|
|
||||||
conn: &Connection,
|
|
||||||
project_id: i64,
|
|
||||||
issue_id: Option<i64>,
|
|
||||||
mr_id: Option<i64>,
|
|
||||||
) -> i64 {
|
|
||||||
let noteable_type = if issue_id.is_some() {
|
|
||||||
"Issue"
|
|
||||||
} else {
|
|
||||||
"MergeRequest"
|
|
||||||
};
|
|
||||||
conn.execute(
|
|
||||||
"INSERT INTO discussions (gitlab_discussion_id, project_id, issue_id, merge_request_id, noteable_type, last_seen_at) VALUES (?1, ?2, ?3, ?4, ?5, 0)",
|
|
||||||
rusqlite::params![format!("disc_{}", rand::random::<u32>()), project_id, issue_id, mr_id, noteable_type],
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
conn.last_insert_rowid()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn insert_note(
|
|
||||||
conn: &Connection,
|
|
||||||
discussion_id: i64,
|
|
||||||
project_id: i64,
|
|
||||||
body: &str,
|
|
||||||
is_system: bool,
|
|
||||||
) -> i64 {
|
|
||||||
let gitlab_id: i64 = rand::random::<u32>().into();
|
|
||||||
conn.execute(
|
|
||||||
"INSERT INTO notes (gitlab_id, discussion_id, project_id, is_system, author_username, body, created_at, updated_at, last_seen_at) VALUES (?1, ?2, ?3, ?4, 'alice', ?5, 5000, 5000, 5000)",
|
|
||||||
rusqlite::params![gitlab_id, discussion_id, project_id, is_system as i32, body],
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
conn.last_insert_rowid()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_seed_empty_query_returns_empty() {
|
|
||||||
let conn = setup_test_db();
|
|
||||||
let result = seed_timeline(&conn, None, "", None, None, 50, 10)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
assert!(result.seed_entities.is_empty());
|
|
||||||
assert!(result.evidence_notes.is_empty());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_seed_no_matches_returns_empty() {
|
|
||||||
let conn = setup_test_db();
|
|
||||||
let project_id = insert_test_project(&conn);
|
|
||||||
let issue_id = insert_test_issue(&conn, project_id, 1);
|
|
||||||
insert_document(
|
|
||||||
&conn,
|
|
||||||
"issue",
|
|
||||||
issue_id,
|
|
||||||
project_id,
|
|
||||||
"unrelated content here",
|
|
||||||
);
|
|
||||||
|
|
||||||
let result = seed_timeline(&conn, None, "nonexistent_xyzzy_query", None, None, 50, 10)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
assert!(result.seed_entities.is_empty());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_seed_finds_issue() {
|
|
||||||
let conn = setup_test_db();
|
|
||||||
let project_id = insert_test_project(&conn);
|
|
||||||
let issue_id = insert_test_issue(&conn, project_id, 42);
|
|
||||||
insert_document(
|
|
||||||
&conn,
|
|
||||||
"issue",
|
|
||||||
issue_id,
|
|
||||||
project_id,
|
|
||||||
"authentication error in login flow",
|
|
||||||
);
|
|
||||||
|
|
||||||
let result = seed_timeline(&conn, None, "authentication", None, None, 50, 10)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(result.seed_entities.len(), 1);
|
|
||||||
assert_eq!(result.seed_entities[0].entity_type, "issue");
|
|
||||||
assert_eq!(result.seed_entities[0].entity_iid, 42);
|
|
||||||
assert_eq!(result.seed_entities[0].project_path, "group/project");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_seed_finds_mr() {
|
|
||||||
let conn = setup_test_db();
|
|
||||||
let project_id = insert_test_project(&conn);
|
|
||||||
let mr_id = insert_test_mr(&conn, project_id, 99);
|
|
||||||
insert_document(
|
|
||||||
&conn,
|
|
||||||
"merge_request",
|
|
||||||
mr_id,
|
|
||||||
project_id,
|
|
||||||
"fix authentication bug",
|
|
||||||
);
|
|
||||||
|
|
||||||
let result = seed_timeline(&conn, None, "authentication", None, None, 50, 10)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(result.seed_entities.len(), 1);
|
|
||||||
assert_eq!(result.seed_entities[0].entity_type, "merge_request");
|
|
||||||
assert_eq!(result.seed_entities[0].entity_iid, 99);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_seed_deduplicates_entities() {
|
|
||||||
let conn = setup_test_db();
|
|
||||||
let project_id = insert_test_project(&conn);
|
|
||||||
let issue_id = insert_test_issue(&conn, project_id, 10);
|
|
||||||
|
|
||||||
// Two documents referencing the same issue
|
|
||||||
insert_document(
|
|
||||||
&conn,
|
|
||||||
"issue",
|
|
||||||
issue_id,
|
|
||||||
project_id,
|
|
||||||
"authentication error first doc",
|
|
||||||
);
|
|
||||||
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
|
|
||||||
insert_document(
|
|
||||||
&conn,
|
|
||||||
"discussion",
|
|
||||||
disc_id,
|
|
||||||
project_id,
|
|
||||||
"authentication error second doc",
|
|
||||||
);
|
|
||||||
|
|
||||||
let result = seed_timeline(&conn, None, "authentication", None, None, 50, 10)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
// Should deduplicate: both map to the same issue
|
|
||||||
assert_eq!(result.seed_entities.len(), 1);
|
|
||||||
assert_eq!(result.seed_entities[0].entity_iid, 10);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_seed_resolves_discussion_to_parent() {
|
|
||||||
let conn = setup_test_db();
|
|
||||||
let project_id = insert_test_project(&conn);
|
|
||||||
let issue_id = insert_test_issue(&conn, project_id, 7);
|
|
||||||
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
|
|
||||||
insert_document(
|
|
||||||
&conn,
|
|
||||||
"discussion",
|
|
||||||
disc_id,
|
|
||||||
project_id,
|
|
||||||
"deployment pipeline failed",
|
|
||||||
);
|
|
||||||
|
|
||||||
let result = seed_timeline(&conn, None, "deployment", None, None, 50, 10)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(result.seed_entities.len(), 1);
|
|
||||||
assert_eq!(result.seed_entities[0].entity_type, "issue");
|
|
||||||
assert_eq!(result.seed_entities[0].entity_iid, 7);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_seed_evidence_capped() {
|
|
||||||
let conn = setup_test_db();
|
|
||||||
let project_id = insert_test_project(&conn);
|
|
||||||
let issue_id = insert_test_issue(&conn, project_id, 1);
|
|
||||||
|
|
||||||
// Create 15 discussion documents with notes about "deployment"
|
|
||||||
for i in 0..15 {
|
|
||||||
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
|
|
||||||
insert_document(
|
|
||||||
&conn,
|
|
||||||
"discussion",
|
|
||||||
disc_id,
|
|
||||||
project_id,
|
|
||||||
&format!("deployment issue number {i}"),
|
|
||||||
);
|
|
||||||
insert_note(
|
|
||||||
&conn,
|
|
||||||
disc_id,
|
|
||||||
project_id,
|
|
||||||
&format!("deployment note {i}"),
|
|
||||||
false,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
let result = seed_timeline(&conn, None, "deployment", None, None, 50, 5)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
assert!(result.evidence_notes.len() <= 5);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_seed_evidence_snippet_truncated() {
|
|
||||||
let conn = setup_test_db();
|
|
||||||
let project_id = insert_test_project(&conn);
|
|
||||||
let issue_id = insert_test_issue(&conn, project_id, 1);
|
|
||||||
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
|
|
||||||
insert_document(
|
|
||||||
&conn,
|
|
||||||
"discussion",
|
|
||||||
disc_id,
|
|
||||||
project_id,
|
|
||||||
"deployment configuration",
|
|
||||||
);
|
|
||||||
|
|
||||||
let long_body = "x".repeat(500);
|
|
||||||
insert_note(&conn, disc_id, project_id, &long_body, false);
|
|
||||||
|
|
||||||
let result = seed_timeline(&conn, None, "deployment", None, None, 50, 10)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
assert!(!result.evidence_notes.is_empty());
|
|
||||||
if let TimelineEventType::NoteEvidence { snippet, .. } = &result.evidence_notes[0].event_type {
|
|
||||||
assert!(snippet.chars().count() <= 200);
|
|
||||||
} else {
|
|
||||||
panic!("Expected NoteEvidence");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_seed_respects_project_filter() {
|
|
||||||
let conn = setup_test_db();
|
|
||||||
let project_id = insert_test_project(&conn);
|
|
||||||
|
|
||||||
// Insert a second project
|
|
||||||
conn.execute(
|
|
||||||
"INSERT INTO projects (gitlab_project_id, path_with_namespace, web_url) VALUES (2, 'other/repo', 'https://gitlab.com/other/repo')",
|
|
||||||
[],
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
let project2_id = conn.last_insert_rowid();
|
|
||||||
|
|
||||||
let issue1_id = insert_test_issue(&conn, project_id, 1);
|
|
||||||
insert_document(
|
|
||||||
&conn,
|
|
||||||
"issue",
|
|
||||||
issue1_id,
|
|
||||||
project_id,
|
|
||||||
"authentication error",
|
|
||||||
);
|
|
||||||
|
|
||||||
let issue2_id = insert_test_issue(&conn, project2_id, 2);
|
|
||||||
insert_document(
|
|
||||||
&conn,
|
|
||||||
"issue",
|
|
||||||
issue2_id,
|
|
||||||
project2_id,
|
|
||||||
"authentication error",
|
|
||||||
);
|
|
||||||
|
|
||||||
// Filter to project 1 only
|
|
||||||
let result = seed_timeline(
|
|
||||||
&conn,
|
|
||||||
None,
|
|
||||||
"authentication",
|
|
||||||
Some(project_id),
|
|
||||||
None,
|
|
||||||
50,
|
|
||||||
10,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(result.seed_entities.len(), 1);
|
|
||||||
assert_eq!(result.seed_entities[0].project_path, "group/project");
|
|
||||||
}
|
|
||||||
|
|
||||||
// ─── Matched discussion tests ───────────────────────────────────────────────
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_seed_captures_matched_discussions_from_discussion_doc() {
|
|
||||||
let conn = setup_test_db();
|
|
||||||
let project_id = insert_test_project(&conn);
|
|
||||||
let issue_id = insert_test_issue(&conn, project_id, 1);
|
|
||||||
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
|
|
||||||
insert_document(
|
|
||||||
&conn,
|
|
||||||
"discussion",
|
|
||||||
disc_id,
|
|
||||||
project_id,
|
|
||||||
"deployment pipeline authentication",
|
|
||||||
);
|
|
||||||
|
|
||||||
let result = seed_timeline(&conn, None, "deployment", None, None, 50, 10)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(result.matched_discussions.len(), 1);
|
|
||||||
assert_eq!(result.matched_discussions[0].discussion_id, disc_id);
|
|
||||||
assert_eq!(result.matched_discussions[0].entity_type, "issue");
|
|
||||||
assert_eq!(result.matched_discussions[0].entity_id, issue_id);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_seed_captures_matched_discussions_from_note_doc() {
|
|
||||||
let conn = setup_test_db();
|
|
||||||
let project_id = insert_test_project(&conn);
|
|
||||||
let issue_id = insert_test_issue(&conn, project_id, 1);
|
|
||||||
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
|
|
||||||
let note_id = insert_note(&conn, disc_id, project_id, "note about deployment", false);
|
|
||||||
insert_document(
|
|
||||||
&conn,
|
|
||||||
"note",
|
|
||||||
note_id,
|
|
||||||
project_id,
|
|
||||||
"deployment configuration details",
|
|
||||||
);
|
|
||||||
|
|
||||||
let result = seed_timeline(&conn, None, "deployment", None, None, 50, 10)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
result.matched_discussions.len(),
|
|
||||||
1,
|
|
||||||
"Note doc should resolve to parent discussion"
|
|
||||||
);
|
|
||||||
assert_eq!(result.matched_discussions[0].discussion_id, disc_id);
|
|
||||||
assert_eq!(result.matched_discussions[0].entity_type, "issue");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_seed_deduplicates_matched_discussions() {
|
|
||||||
let conn = setup_test_db();
|
|
||||||
let project_id = insert_test_project(&conn);
|
|
||||||
let issue_id = insert_test_issue(&conn, project_id, 1);
|
|
||||||
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
|
|
||||||
|
|
||||||
// Two docs referencing the same discussion
|
|
||||||
insert_document(
|
|
||||||
&conn,
|
|
||||||
"discussion",
|
|
||||||
disc_id,
|
|
||||||
project_id,
|
|
||||||
"deployment pipeline first doc",
|
|
||||||
);
|
|
||||||
let note_id = insert_note(&conn, disc_id, project_id, "deployment note", false);
|
|
||||||
insert_document(
|
|
||||||
&conn,
|
|
||||||
"note",
|
|
||||||
note_id,
|
|
||||||
project_id,
|
|
||||||
"deployment pipeline second doc",
|
|
||||||
);
|
|
||||||
|
|
||||||
let result = seed_timeline(&conn, None, "deployment", None, None, 50, 10)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
result.matched_discussions.len(),
|
|
||||||
1,
|
|
||||||
"Same discussion_id from two docs should deduplicate"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_seed_matched_discussions_have_correct_parent_entity() {
|
|
||||||
let conn = setup_test_db();
|
|
||||||
let project_id = insert_test_project(&conn);
|
|
||||||
let mr_id = insert_test_mr(&conn, project_id, 99);
|
|
||||||
let disc_id = insert_discussion(&conn, project_id, None, Some(mr_id));
|
|
||||||
insert_document(
|
|
||||||
&conn,
|
|
||||||
"discussion",
|
|
||||||
disc_id,
|
|
||||||
project_id,
|
|
||||||
"deployment pipeline for merge request",
|
|
||||||
);
|
|
||||||
|
|
||||||
let result = seed_timeline(&conn, None, "deployment", None, None, 50, 10)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(result.matched_discussions.len(), 1);
|
|
||||||
assert_eq!(result.matched_discussions[0].entity_type, "merge_request");
|
|
||||||
assert_eq!(result.matched_discussions[0].entity_id, mr_id);
|
|
||||||
}
|
|
||||||
|
|
||||||
// ─── seed_timeline_direct tests ─────────────────────────────────────────────
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_direct_seed_resolves_entity() {
|
|
||||||
let conn = setup_test_db();
|
|
||||||
let project_id = insert_test_project(&conn);
|
|
||||||
insert_test_issue(&conn, project_id, 42);
|
|
||||||
|
|
||||||
let result = seed_timeline_direct(&conn, "issue", 42, None).unwrap();
|
|
||||||
assert_eq!(result.seed_entities.len(), 1);
|
|
||||||
assert_eq!(result.seed_entities[0].entity_type, "issue");
|
|
||||||
assert_eq!(result.seed_entities[0].entity_iid, 42);
|
|
||||||
assert_eq!(result.seed_entities[0].project_path, "group/project");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_direct_seed_gathers_all_discussions() {
|
|
||||||
let conn = setup_test_db();
|
|
||||||
let project_id = insert_test_project(&conn);
|
|
||||||
let issue_id = insert_test_issue(&conn, project_id, 42);
|
|
||||||
|
|
||||||
// Create 3 discussions for this issue
|
|
||||||
let disc1 = insert_discussion(&conn, project_id, Some(issue_id), None);
|
|
||||||
let disc2 = insert_discussion(&conn, project_id, Some(issue_id), None);
|
|
||||||
let disc3 = insert_discussion(&conn, project_id, Some(issue_id), None);
|
|
||||||
|
|
||||||
let result = seed_timeline_direct(&conn, "issue", 42, None).unwrap();
|
|
||||||
assert_eq!(result.matched_discussions.len(), 3);
|
|
||||||
let disc_ids: Vec<i64> = result
|
|
||||||
.matched_discussions
|
|
||||||
.iter()
|
|
||||||
.map(|d| d.discussion_id)
|
|
||||||
.collect();
|
|
||||||
assert!(disc_ids.contains(&disc1));
|
|
||||||
assert!(disc_ids.contains(&disc2));
|
|
||||||
assert!(disc_ids.contains(&disc3));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_direct_seed_no_evidence_notes() {
|
|
||||||
let conn = setup_test_db();
|
|
||||||
let project_id = insert_test_project(&conn);
|
|
||||||
let issue_id = insert_test_issue(&conn, project_id, 42);
|
|
||||||
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
|
|
||||||
insert_note(&conn, disc_id, project_id, "some note body", false);
|
|
||||||
|
|
||||||
let result = seed_timeline_direct(&conn, "issue", 42, None).unwrap();
|
|
||||||
assert!(
|
|
||||||
result.evidence_notes.is_empty(),
|
|
||||||
"Direct seeding should not produce evidence notes"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_direct_seed_search_mode_is_direct() {
|
|
||||||
let conn = setup_test_db();
|
|
||||||
let project_id = insert_test_project(&conn);
|
|
||||||
insert_test_issue(&conn, project_id, 42);
|
|
||||||
|
|
||||||
let result = seed_timeline_direct(&conn, "issue", 42, None).unwrap();
|
|
||||||
assert_eq!(result.search_mode, "direct");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_direct_seed_not_found() {
|
|
||||||
let conn = setup_test_db();
|
|
||||||
insert_test_project(&conn);
|
|
||||||
|
|
||||||
let result = seed_timeline_direct(&conn, "issue", 999, None);
|
|
||||||
assert!(result.is_err());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_direct_seed_mr() {
|
|
||||||
let conn = setup_test_db();
|
|
||||||
let project_id = insert_test_project(&conn);
|
|
||||||
let mr_id = insert_test_mr(&conn, project_id, 99);
|
|
||||||
let disc_id = insert_discussion(&conn, project_id, None, Some(mr_id));
|
|
||||||
|
|
||||||
let result = seed_timeline_direct(&conn, "merge_request", 99, None).unwrap();
|
|
||||||
assert_eq!(result.seed_entities.len(), 1);
|
|
||||||
assert_eq!(result.seed_entities[0].entity_type, "merge_request");
|
|
||||||
assert_eq!(result.seed_entities[0].entity_iid, 99);
|
|
||||||
assert_eq!(result.matched_discussions.len(), 1);
|
|
||||||
assert_eq!(result.matched_discussions[0].discussion_id, disc_id);
|
|
||||||
}
|
|
||||||
File diff suppressed because it is too large
Load Diff
133
src/documents/extractor/common.rs
Normal file
133
src/documents/extractor/common.rs
Normal file
@@ -0,0 +1,133 @@
|
|||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub enum SourceType {
|
||||||
|
Issue,
|
||||||
|
MergeRequest,
|
||||||
|
Discussion,
|
||||||
|
Note,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SourceType {
|
||||||
|
pub fn as_str(&self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
Self::Issue => "issue",
|
||||||
|
Self::MergeRequest => "merge_request",
|
||||||
|
Self::Discussion => "discussion",
|
||||||
|
Self::Note => "note",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse(s: &str) -> Option<Self> {
|
||||||
|
match s.to_lowercase().as_str() {
|
||||||
|
"issue" | "issues" => Some(Self::Issue),
|
||||||
|
"mr" | "mrs" | "merge_request" | "merge_requests" => Some(Self::MergeRequest),
|
||||||
|
"discussion" | "discussions" => Some(Self::Discussion),
|
||||||
|
"note" | "notes" => Some(Self::Note),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for SourceType {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "{}", self.as_str())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct DocumentData {
|
||||||
|
pub source_type: SourceType,
|
||||||
|
pub source_id: i64,
|
||||||
|
pub project_id: i64,
|
||||||
|
pub author_username: Option<String>,
|
||||||
|
pub labels: Vec<String>,
|
||||||
|
pub paths: Vec<String>,
|
||||||
|
pub labels_hash: String,
|
||||||
|
pub paths_hash: String,
|
||||||
|
pub created_at: i64,
|
||||||
|
pub updated_at: i64,
|
||||||
|
pub url: Option<String>,
|
||||||
|
pub title: Option<String>,
|
||||||
|
pub content_text: String,
|
||||||
|
pub content_hash: String,
|
||||||
|
pub is_truncated: bool,
|
||||||
|
pub truncated_reason: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn compute_content_hash(content: &str) -> String {
|
||||||
|
let mut hasher = Sha256::new();
|
||||||
|
hasher.update(content.as_bytes());
|
||||||
|
format!("{:x}", hasher.finalize())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn compute_list_hash(items: &[String]) -> String {
|
||||||
|
let mut indices: Vec<usize> = (0..items.len()).collect();
|
||||||
|
indices.sort_by(|a, b| items[*a].cmp(&items[*b]));
|
||||||
|
let mut hasher = Sha256::new();
|
||||||
|
for (i, &idx) in indices.iter().enumerate() {
|
||||||
|
if i > 0 {
|
||||||
|
hasher.update(b"\n");
|
||||||
|
}
|
||||||
|
hasher.update(items[idx].as_bytes());
|
||||||
|
}
|
||||||
|
format!("{:x}", hasher.finalize())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Strip GitLab-generated boilerplate from titles before embedding.
|
||||||
|
///
|
||||||
|
/// Common patterns that inflate embedding similarity between unrelated entities:
|
||||||
|
/// - `Draft: Resolve "Actual Title"` → `Actual Title`
|
||||||
|
/// - `Resolve "Actual Title"` → `Actual Title`
|
||||||
|
/// - `Draft: Some Title` → `Some Title`
|
||||||
|
/// - `WIP: Some Title` → `Some Title`
|
||||||
|
///
|
||||||
|
/// The original title is preserved in `DocumentData.title` for display;
|
||||||
|
/// this function only affects `content_text` (what gets embedded).
|
||||||
|
fn normalize_title_for_embedding(title: &str) -> &str {
|
||||||
|
let mut s = title;
|
||||||
|
|
||||||
|
// Strip leading "Draft: " and/or "WIP: " (case-insensitive, repeatable).
|
||||||
|
// Use `get()` for slicing — direct `str[..N]` panics if byte N is mid-character
|
||||||
|
// (e.g. titles starting with emoji or accented characters).
|
||||||
|
loop {
|
||||||
|
let trimmed = s.trim_start();
|
||||||
|
if trimmed
|
||||||
|
.get(..6)
|
||||||
|
.is_some_and(|p| p.eq_ignore_ascii_case("draft:"))
|
||||||
|
{
|
||||||
|
s = trimmed[6..].trim_start();
|
||||||
|
} else if trimmed
|
||||||
|
.get(..4)
|
||||||
|
.is_some_and(|p| p.eq_ignore_ascii_case("wip:"))
|
||||||
|
{
|
||||||
|
s = trimmed[4..].trim_start();
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Strip `Resolve "..."` wrapper (case-insensitive)
|
||||||
|
if s.len() >= 10
|
||||||
|
&& s.get(..8).is_some_and(|p| p.eq_ignore_ascii_case("resolve "))
|
||||||
|
&& s.as_bytes()[8] == b'"'
|
||||||
|
&& let Some(end) = s[9..].rfind('"')
|
||||||
|
{
|
||||||
|
let inner = &s[9..9 + end];
|
||||||
|
if !inner.is_empty() {
|
||||||
|
return inner;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Guard: if stripping left us with nothing, return the original
|
||||||
|
if s.is_empty() {
|
||||||
|
return title;
|
||||||
|
}
|
||||||
|
|
||||||
|
s
|
||||||
|
}
|
||||||
|
|
||||||
|
fn format_date(ms: i64) -> String {
|
||||||
|
DateTime::from_timestamp_millis(ms)
|
||||||
|
.map(|dt| dt.format("%Y-%m-%d").to_string())
|
||||||
|
.unwrap_or_else(|| "unknown".to_string())
|
||||||
|
}
|
||||||
217
src/documents/extractor/discussions.rs
Normal file
217
src/documents/extractor/discussions.rs
Normal file
@@ -0,0 +1,217 @@
|
|||||||
|
pub fn extract_discussion_document(
|
||||||
|
conn: &Connection,
|
||||||
|
discussion_id: i64,
|
||||||
|
) -> Result<Option<DocumentData>> {
|
||||||
|
let disc_row = conn.query_row(
|
||||||
|
"SELECT d.id, d.noteable_type, d.issue_id, d.merge_request_id,
|
||||||
|
p.path_with_namespace, p.id AS project_id
|
||||||
|
FROM discussions d
|
||||||
|
JOIN projects p ON p.id = d.project_id
|
||||||
|
WHERE d.id = ?1",
|
||||||
|
rusqlite::params![discussion_id],
|
||||||
|
|row| {
|
||||||
|
Ok((
|
||||||
|
row.get::<_, i64>(0)?,
|
||||||
|
row.get::<_, String>(1)?,
|
||||||
|
row.get::<_, Option<i64>>(2)?,
|
||||||
|
row.get::<_, Option<i64>>(3)?,
|
||||||
|
row.get::<_, String>(4)?,
|
||||||
|
row.get::<_, i64>(5)?,
|
||||||
|
))
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
let (id, noteable_type, issue_id, merge_request_id, path_with_namespace, project_id) =
|
||||||
|
match disc_row {
|
||||||
|
Ok(r) => r,
|
||||||
|
Err(rusqlite::Error::QueryReturnedNoRows) => return Ok(None),
|
||||||
|
Err(e) => return Err(e.into()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let (_parent_iid, parent_title, parent_web_url, parent_type_prefix, labels) =
|
||||||
|
match noteable_type.as_str() {
|
||||||
|
"Issue" => {
|
||||||
|
let parent_id = match issue_id {
|
||||||
|
Some(pid) => pid,
|
||||||
|
None => return Ok(None),
|
||||||
|
};
|
||||||
|
let parent = conn.query_row(
|
||||||
|
"SELECT i.iid, i.title, i.web_url FROM issues i WHERE i.id = ?1",
|
||||||
|
rusqlite::params![parent_id],
|
||||||
|
|row| {
|
||||||
|
Ok((
|
||||||
|
row.get::<_, i64>(0)?,
|
||||||
|
row.get::<_, Option<String>>(1)?,
|
||||||
|
row.get::<_, Option<String>>(2)?,
|
||||||
|
))
|
||||||
|
},
|
||||||
|
);
|
||||||
|
let (iid, title, web_url) = match parent {
|
||||||
|
Ok(r) => r,
|
||||||
|
Err(rusqlite::Error::QueryReturnedNoRows) => return Ok(None),
|
||||||
|
Err(e) => return Err(e.into()),
|
||||||
|
};
|
||||||
|
let mut label_stmt = conn.prepare_cached(
|
||||||
|
"SELECT l.name FROM issue_labels il
|
||||||
|
JOIN labels l ON l.id = il.label_id
|
||||||
|
WHERE il.issue_id = ?1
|
||||||
|
ORDER BY l.name",
|
||||||
|
)?;
|
||||||
|
let labels: Vec<String> = label_stmt
|
||||||
|
.query_map(rusqlite::params![parent_id], |row| row.get(0))?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
(iid, title, web_url, format!("Issue #{}", iid), labels)
|
||||||
|
}
|
||||||
|
"MergeRequest" => {
|
||||||
|
let parent_id = match merge_request_id {
|
||||||
|
Some(pid) => pid,
|
||||||
|
None => return Ok(None),
|
||||||
|
};
|
||||||
|
let parent = conn.query_row(
|
||||||
|
"SELECT m.iid, m.title, m.web_url FROM merge_requests m WHERE m.id = ?1",
|
||||||
|
rusqlite::params![parent_id],
|
||||||
|
|row| {
|
||||||
|
Ok((
|
||||||
|
row.get::<_, i64>(0)?,
|
||||||
|
row.get::<_, Option<String>>(1)?,
|
||||||
|
row.get::<_, Option<String>>(2)?,
|
||||||
|
))
|
||||||
|
},
|
||||||
|
);
|
||||||
|
let (iid, title, web_url) = match parent {
|
||||||
|
Ok(r) => r,
|
||||||
|
Err(rusqlite::Error::QueryReturnedNoRows) => return Ok(None),
|
||||||
|
Err(e) => return Err(e.into()),
|
||||||
|
};
|
||||||
|
let mut label_stmt = conn.prepare_cached(
|
||||||
|
"SELECT l.name FROM mr_labels ml
|
||||||
|
JOIN labels l ON l.id = ml.label_id
|
||||||
|
WHERE ml.merge_request_id = ?1
|
||||||
|
ORDER BY l.name",
|
||||||
|
)?;
|
||||||
|
let labels: Vec<String> = label_stmt
|
||||||
|
.query_map(rusqlite::params![parent_id], |row| row.get(0))?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
(iid, title, web_url, format!("MR !{}", iid), labels)
|
||||||
|
}
|
||||||
|
_ => return Ok(None),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut note_stmt = conn.prepare_cached(
|
||||||
|
"SELECT n.author_username, n.body, n.created_at, n.gitlab_id,
|
||||||
|
n.note_type, n.position_old_path, n.position_new_path
|
||||||
|
FROM notes n
|
||||||
|
WHERE n.discussion_id = ?1 AND n.is_system = 0
|
||||||
|
ORDER BY n.created_at ASC, n.id ASC",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
struct NoteRow {
|
||||||
|
author: Option<String>,
|
||||||
|
body: Option<String>,
|
||||||
|
created_at: i64,
|
||||||
|
gitlab_id: i64,
|
||||||
|
old_path: Option<String>,
|
||||||
|
new_path: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
let notes: Vec<NoteRow> = note_stmt
|
||||||
|
.query_map(rusqlite::params![id], |row| {
|
||||||
|
Ok(NoteRow {
|
||||||
|
author: row.get(0)?,
|
||||||
|
body: row.get(1)?,
|
||||||
|
created_at: row.get(2)?,
|
||||||
|
gitlab_id: row.get(3)?,
|
||||||
|
old_path: row.get(5)?,
|
||||||
|
new_path: row.get(6)?,
|
||||||
|
})
|
||||||
|
})?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
if notes.is_empty() {
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut path_set = BTreeSet::new();
|
||||||
|
for note in ¬es {
|
||||||
|
if let Some(ref p) = note.old_path
|
||||||
|
&& !p.is_empty()
|
||||||
|
{
|
||||||
|
path_set.insert(p.clone());
|
||||||
|
}
|
||||||
|
if let Some(ref p) = note.new_path
|
||||||
|
&& !p.is_empty()
|
||||||
|
{
|
||||||
|
path_set.insert(p.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let paths: Vec<String> = path_set.into_iter().collect();
|
||||||
|
|
||||||
|
let first_note_gitlab_id = notes[0].gitlab_id;
|
||||||
|
let url = parent_web_url
|
||||||
|
.as_ref()
|
||||||
|
.map(|wu| format!("{}#note_{}", wu, first_note_gitlab_id));
|
||||||
|
|
||||||
|
let author_username = notes[0].author.clone();
|
||||||
|
|
||||||
|
let display_title = parent_title.as_deref().unwrap_or("(untitled)");
|
||||||
|
let embed_title = normalize_title_for_embedding(display_title);
|
||||||
|
let labels_json = serde_json::to_string(&labels).unwrap_or_else(|_| "[]".to_string());
|
||||||
|
let paths_json = serde_json::to_string(&paths).unwrap_or_else(|_| "[]".to_string());
|
||||||
|
|
||||||
|
let mut content = format!(
|
||||||
|
"[[Discussion]] {}: {}\nProject: {}\n",
|
||||||
|
parent_type_prefix, embed_title, path_with_namespace
|
||||||
|
);
|
||||||
|
if let Some(ref u) = url {
|
||||||
|
let _ = writeln!(content, "URL: {}", u);
|
||||||
|
}
|
||||||
|
let _ = writeln!(content, "Labels: {}", labels_json);
|
||||||
|
if !paths.is_empty() {
|
||||||
|
let _ = writeln!(content, "Files: {}", paths_json);
|
||||||
|
}
|
||||||
|
|
||||||
|
let note_contents: Vec<NoteContent> = notes
|
||||||
|
.iter()
|
||||||
|
.map(|note| NoteContent {
|
||||||
|
author: note.author.as_deref().unwrap_or("unknown").to_string(),
|
||||||
|
date: format_date(note.created_at),
|
||||||
|
body: note.body.as_deref().unwrap_or("").to_string(),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let header_len = content.len() + "\n--- Thread ---\n\n".len();
|
||||||
|
let thread_budget = MAX_DISCUSSION_BYTES.saturating_sub(header_len);
|
||||||
|
|
||||||
|
let thread_result = truncate_discussion(¬e_contents, thread_budget);
|
||||||
|
content.push_str("\n--- Thread ---\n\n");
|
||||||
|
content.push_str(&thread_result.content);
|
||||||
|
|
||||||
|
let created_at = notes[0].created_at;
|
||||||
|
let updated_at = notes.last().map(|n| n.created_at).unwrap_or(created_at);
|
||||||
|
|
||||||
|
let content_hash = compute_content_hash(&content);
|
||||||
|
let labels_hash = compute_list_hash(&labels);
|
||||||
|
let paths_hash = compute_list_hash(&paths);
|
||||||
|
|
||||||
|
Ok(Some(DocumentData {
|
||||||
|
source_type: SourceType::Discussion,
|
||||||
|
source_id: id,
|
||||||
|
project_id,
|
||||||
|
author_username,
|
||||||
|
labels,
|
||||||
|
paths,
|
||||||
|
labels_hash,
|
||||||
|
paths_hash,
|
||||||
|
created_at,
|
||||||
|
updated_at,
|
||||||
|
url,
|
||||||
|
title: None,
|
||||||
|
content_text: content,
|
||||||
|
content_hash,
|
||||||
|
is_truncated: thread_result.is_truncated,
|
||||||
|
truncated_reason: thread_result.reason.map(|r| r.as_str().to_string()),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
@@ -1,5 +1,171 @@
|
|||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
|
// --- normalize_title_for_embedding tests ---
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_normalize_title_strips_draft_resolve_quotes() {
|
||||||
|
assert_eq!(
|
||||||
|
normalize_title_for_embedding("Draft: Resolve \"Analytics Studio: Subformulas\""),
|
||||||
|
"Analytics Studio: Subformulas"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_normalize_title_strips_resolve_quotes() {
|
||||||
|
assert_eq!(
|
||||||
|
normalize_title_for_embedding("Resolve \"RUL Report: Use param_trends from S3\""),
|
||||||
|
"RUL Report: Use param_trends from S3"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_normalize_title_strips_draft_prefix() {
|
||||||
|
assert_eq!(
|
||||||
|
normalize_title_for_embedding("Draft: Implement JWT authentication"),
|
||||||
|
"Implement JWT authentication"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_normalize_title_strips_wip_prefix() {
|
||||||
|
assert_eq!(
|
||||||
|
normalize_title_for_embedding("WIP: Implement JWT authentication"),
|
||||||
|
"Implement JWT authentication"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_normalize_title_strips_draft_wip_combined() {
|
||||||
|
assert_eq!(
|
||||||
|
normalize_title_for_embedding("Draft: WIP: Fix auth"),
|
||||||
|
"Fix auth"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_normalize_title_no_change_for_normal_title() {
|
||||||
|
assert_eq!(
|
||||||
|
normalize_title_for_embedding("Implement JWT authentication"),
|
||||||
|
"Implement JWT authentication"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_normalize_title_case_insensitive_draft() {
|
||||||
|
assert_eq!(
|
||||||
|
normalize_title_for_embedding("draft: Resolve \"Some Issue\""),
|
||||||
|
"Some Issue"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_normalize_title_case_insensitive_wip() {
|
||||||
|
assert_eq!(normalize_title_for_embedding("wip: Something"), "Something");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_normalize_title_untitled_passthrough() {
|
||||||
|
assert_eq!(normalize_title_for_embedding("(untitled)"), "(untitled)");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_normalize_title_resolve_without_quotes_unchanged() {
|
||||||
|
// "Resolve something" without quotes is not the GitLab pattern
|
||||||
|
assert_eq!(
|
||||||
|
normalize_title_for_embedding("Resolve the flaky test"),
|
||||||
|
"Resolve the flaky test"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_normalize_title_empty_after_strip_returns_original() {
|
||||||
|
// Edge case: "Draft: " with nothing after → return original
|
||||||
|
assert_eq!(normalize_title_for_embedding("Draft: "), "Draft: ");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_normalize_title_resolve_empty_quotes() {
|
||||||
|
// Edge case: Resolve "" → return original (empty inner text)
|
||||||
|
assert_eq!(
|
||||||
|
normalize_title_for_embedding("Resolve \"\""),
|
||||||
|
"Resolve \"\""
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_normalize_title_non_ascii_does_not_panic() {
|
||||||
|
// Emoji at start: byte offsets 4 and 8 fall mid-character.
|
||||||
|
// Must not panic — should return the title unchanged.
|
||||||
|
assert_eq!(
|
||||||
|
normalize_title_for_embedding("\u{1F389}\u{1F389} celebration"),
|
||||||
|
"\u{1F389}\u{1F389} celebration"
|
||||||
|
);
|
||||||
|
// Accented characters
|
||||||
|
assert_eq!(
|
||||||
|
normalize_title_for_embedding("\u{00DC}berpr\u{00FC}fung der Daten"),
|
||||||
|
"\u{00DC}berpr\u{00FC}fung der Daten"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- MR document uses normalized title in content_text ---
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_mr_document_normalizes_draft_resolve_title() {
|
||||||
|
let conn = setup_mr_test_db();
|
||||||
|
insert_mr(
|
||||||
|
&conn,
|
||||||
|
1,
|
||||||
|
4064,
|
||||||
|
Some("Draft: Resolve \"Analytics Studio: Subformulas\""),
|
||||||
|
Some("Implements subformula support"),
|
||||||
|
Some("opened"),
|
||||||
|
Some("dev"),
|
||||||
|
Some("feature/subformulas"),
|
||||||
|
Some("main"),
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
|
||||||
|
let doc = extract_mr_document(&conn, 1).unwrap().unwrap();
|
||||||
|
// content_text should use the normalized title (no boilerplate)
|
||||||
|
assert!(
|
||||||
|
doc.content_text
|
||||||
|
.starts_with("[[MergeRequest]] !4064: Analytics Studio: Subformulas\n")
|
||||||
|
);
|
||||||
|
// but DocumentData.title preserves the original for display
|
||||||
|
assert_eq!(
|
||||||
|
doc.title,
|
||||||
|
Some("Draft: Resolve \"Analytics Studio: Subformulas\"".to_string())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Issue document uses normalized title in content_text ---
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_issue_document_normalizes_draft_title() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
insert_issue(
|
||||||
|
&conn,
|
||||||
|
1,
|
||||||
|
100,
|
||||||
|
Some("Draft: WIP: Rethink caching strategy"),
|
||||||
|
Some("We should reconsider..."),
|
||||||
|
"opened",
|
||||||
|
Some("alice"),
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
|
||||||
|
let doc = extract_issue_document(&conn, 1).unwrap().unwrap();
|
||||||
|
assert!(
|
||||||
|
doc.content_text
|
||||||
|
.starts_with("[[Issue]] #100: Rethink caching strategy\n")
|
||||||
|
);
|
||||||
|
// Original title preserved for display
|
||||||
|
assert_eq!(
|
||||||
|
doc.title,
|
||||||
|
Some("Draft: WIP: Rethink caching strategy".to_string())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_source_type_parse_aliases() {
|
fn test_source_type_parse_aliases() {
|
||||||
assert_eq!(SourceType::parse("issue"), Some(SourceType::Issue));
|
assert_eq!(SourceType::parse("issue"), Some(SourceType::Issue));
|
||||||
111
src/documents/extractor/issues.rs
Normal file
111
src/documents/extractor/issues.rs
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
pub fn extract_issue_document(conn: &Connection, issue_id: i64) -> Result<Option<DocumentData>> {
|
||||||
|
let row = conn.query_row(
|
||||||
|
"SELECT i.id, i.iid, i.title, i.description, i.state, i.author_username,
|
||||||
|
i.created_at, i.updated_at, i.web_url,
|
||||||
|
p.path_with_namespace, p.id AS project_id
|
||||||
|
FROM issues i
|
||||||
|
JOIN projects p ON p.id = i.project_id
|
||||||
|
WHERE i.id = ?1",
|
||||||
|
rusqlite::params![issue_id],
|
||||||
|
|row| {
|
||||||
|
Ok((
|
||||||
|
row.get::<_, i64>(0)?,
|
||||||
|
row.get::<_, i64>(1)?,
|
||||||
|
row.get::<_, Option<String>>(2)?,
|
||||||
|
row.get::<_, Option<String>>(3)?,
|
||||||
|
row.get::<_, String>(4)?,
|
||||||
|
row.get::<_, Option<String>>(5)?,
|
||||||
|
row.get::<_, i64>(6)?,
|
||||||
|
row.get::<_, i64>(7)?,
|
||||||
|
row.get::<_, Option<String>>(8)?,
|
||||||
|
row.get::<_, String>(9)?,
|
||||||
|
row.get::<_, i64>(10)?,
|
||||||
|
))
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
let (
|
||||||
|
id,
|
||||||
|
iid,
|
||||||
|
title,
|
||||||
|
description,
|
||||||
|
state,
|
||||||
|
author_username,
|
||||||
|
created_at,
|
||||||
|
updated_at,
|
||||||
|
web_url,
|
||||||
|
path_with_namespace,
|
||||||
|
project_id,
|
||||||
|
) = match row {
|
||||||
|
Ok(r) => r,
|
||||||
|
Err(rusqlite::Error::QueryReturnedNoRows) => return Ok(None),
|
||||||
|
Err(e) => return Err(e.into()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut label_stmt = conn.prepare_cached(
|
||||||
|
"SELECT l.name FROM issue_labels il
|
||||||
|
JOIN labels l ON l.id = il.label_id
|
||||||
|
WHERE il.issue_id = ?1
|
||||||
|
ORDER BY l.name",
|
||||||
|
)?;
|
||||||
|
let labels: Vec<String> = label_stmt
|
||||||
|
.query_map(rusqlite::params![id], |row| row.get(0))?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
let labels_json = serde_json::to_string(&labels).unwrap_or_else(|_| "[]".to_string());
|
||||||
|
|
||||||
|
let display_title = title.as_deref().unwrap_or("(untitled)");
|
||||||
|
let embed_title = normalize_title_for_embedding(display_title);
|
||||||
|
let mut content = format!(
|
||||||
|
"[[Issue]] #{}: {}\nProject: {}\n",
|
||||||
|
iid, embed_title, path_with_namespace
|
||||||
|
);
|
||||||
|
if let Some(ref url) = web_url {
|
||||||
|
let _ = writeln!(content, "URL: {}", url);
|
||||||
|
}
|
||||||
|
let _ = writeln!(content, "Labels: {}", labels_json);
|
||||||
|
let _ = writeln!(content, "State: {}", state);
|
||||||
|
if let Some(ref author) = author_username {
|
||||||
|
let _ = writeln!(content, "Author: @{}", author);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(ref desc) = description {
|
||||||
|
content.push_str("\n--- Description ---\n\n");
|
||||||
|
// Pre-truncate to avoid unbounded memory allocation for huge descriptions
|
||||||
|
let pre_trunc = pre_truncate_description(desc, MAX_DOCUMENT_BYTES_HARD);
|
||||||
|
if pre_trunc.was_truncated {
|
||||||
|
warn!(
|
||||||
|
iid,
|
||||||
|
original_bytes = pre_trunc.original_bytes,
|
||||||
|
"Issue description truncated (oversized)"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
content.push_str(&pre_trunc.content);
|
||||||
|
}
|
||||||
|
|
||||||
|
let labels_hash = compute_list_hash(&labels);
|
||||||
|
let paths_hash = compute_list_hash(&[]);
|
||||||
|
|
||||||
|
let hard_cap = truncate_hard_cap(&content);
|
||||||
|
let content_hash = compute_content_hash(&hard_cap.content);
|
||||||
|
|
||||||
|
Ok(Some(DocumentData {
|
||||||
|
source_type: SourceType::Issue,
|
||||||
|
source_id: id,
|
||||||
|
project_id,
|
||||||
|
author_username,
|
||||||
|
labels,
|
||||||
|
paths: Vec::new(),
|
||||||
|
labels_hash,
|
||||||
|
paths_hash,
|
||||||
|
created_at,
|
||||||
|
updated_at,
|
||||||
|
url: web_url,
|
||||||
|
title: Some(display_title.to_string()),
|
||||||
|
content_text: hard_cap.content,
|
||||||
|
content_hash,
|
||||||
|
is_truncated: hard_cap.is_truncated,
|
||||||
|
truncated_reason: hard_cap.reason.map(|r| r.as_str().to_string()),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
24
src/documents/extractor/mod.rs
Normal file
24
src/documents/extractor/mod.rs
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
use chrono::DateTime;
|
||||||
|
use rusqlite::Connection;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sha2::{Digest, Sha256};
|
||||||
|
use std::collections::{BTreeSet, HashMap};
|
||||||
|
use std::fmt::Write as _;
|
||||||
|
|
||||||
|
use super::truncation::{
|
||||||
|
MAX_DISCUSSION_BYTES, MAX_DOCUMENT_BYTES_HARD, NoteContent, pre_truncate_description,
|
||||||
|
truncate_discussion, truncate_hard_cap,
|
||||||
|
};
|
||||||
|
use crate::core::error::Result;
|
||||||
|
use crate::core::time::ms_to_iso;
|
||||||
|
use tracing::warn;
|
||||||
|
|
||||||
|
include!("common.rs");
|
||||||
|
include!("issues.rs");
|
||||||
|
include!("mrs.rs");
|
||||||
|
include!("discussions.rs");
|
||||||
|
include!("notes.rs");
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
#[path = "extractor_tests.rs"]
|
||||||
|
mod tests;
|
||||||
120
src/documents/extractor/mrs.rs
Normal file
120
src/documents/extractor/mrs.rs
Normal file
@@ -0,0 +1,120 @@
|
|||||||
|
pub fn extract_mr_document(conn: &Connection, mr_id: i64) -> Result<Option<DocumentData>> {
|
||||||
|
let row = conn.query_row(
|
||||||
|
"SELECT m.id, m.iid, m.title, m.description, m.state, m.author_username,
|
||||||
|
m.source_branch, m.target_branch,
|
||||||
|
m.created_at, m.updated_at, m.web_url,
|
||||||
|
p.path_with_namespace, p.id AS project_id
|
||||||
|
FROM merge_requests m
|
||||||
|
JOIN projects p ON p.id = m.project_id
|
||||||
|
WHERE m.id = ?1",
|
||||||
|
rusqlite::params![mr_id],
|
||||||
|
|row| {
|
||||||
|
Ok((
|
||||||
|
row.get::<_, i64>(0)?,
|
||||||
|
row.get::<_, i64>(1)?,
|
||||||
|
row.get::<_, Option<String>>(2)?,
|
||||||
|
row.get::<_, Option<String>>(3)?,
|
||||||
|
row.get::<_, Option<String>>(4)?,
|
||||||
|
row.get::<_, Option<String>>(5)?,
|
||||||
|
row.get::<_, Option<String>>(6)?,
|
||||||
|
row.get::<_, Option<String>>(7)?,
|
||||||
|
row.get::<_, Option<i64>>(8)?,
|
||||||
|
row.get::<_, Option<i64>>(9)?,
|
||||||
|
row.get::<_, Option<String>>(10)?,
|
||||||
|
row.get::<_, String>(11)?,
|
||||||
|
row.get::<_, i64>(12)?,
|
||||||
|
))
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
let (
|
||||||
|
id,
|
||||||
|
iid,
|
||||||
|
title,
|
||||||
|
description,
|
||||||
|
state,
|
||||||
|
author_username,
|
||||||
|
source_branch,
|
||||||
|
target_branch,
|
||||||
|
created_at,
|
||||||
|
updated_at,
|
||||||
|
web_url,
|
||||||
|
path_with_namespace,
|
||||||
|
project_id,
|
||||||
|
) = match row {
|
||||||
|
Ok(r) => r,
|
||||||
|
Err(rusqlite::Error::QueryReturnedNoRows) => return Ok(None),
|
||||||
|
Err(e) => return Err(e.into()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut label_stmt = conn.prepare_cached(
|
||||||
|
"SELECT l.name FROM mr_labels ml
|
||||||
|
JOIN labels l ON l.id = ml.label_id
|
||||||
|
WHERE ml.merge_request_id = ?1
|
||||||
|
ORDER BY l.name",
|
||||||
|
)?;
|
||||||
|
let labels: Vec<String> = label_stmt
|
||||||
|
.query_map(rusqlite::params![id], |row| row.get(0))?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
let labels_json = serde_json::to_string(&labels).unwrap_or_else(|_| "[]".to_string());
|
||||||
|
|
||||||
|
let display_title = title.as_deref().unwrap_or("(untitled)");
|
||||||
|
let embed_title = normalize_title_for_embedding(display_title);
|
||||||
|
let display_state = state.as_deref().unwrap_or("unknown");
|
||||||
|
let mut content = format!(
|
||||||
|
"[[MergeRequest]] !{}: {}\nProject: {}\n",
|
||||||
|
iid, embed_title, path_with_namespace
|
||||||
|
);
|
||||||
|
if let Some(ref url) = web_url {
|
||||||
|
let _ = writeln!(content, "URL: {}", url);
|
||||||
|
}
|
||||||
|
let _ = writeln!(content, "Labels: {}", labels_json);
|
||||||
|
let _ = writeln!(content, "State: {}", display_state);
|
||||||
|
if let Some(ref author) = author_username {
|
||||||
|
let _ = writeln!(content, "Author: @{}", author);
|
||||||
|
}
|
||||||
|
if let (Some(src), Some(tgt)) = (&source_branch, &target_branch) {
|
||||||
|
let _ = writeln!(content, "Source: {} -> {}", src, tgt);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(ref desc) = description {
|
||||||
|
content.push_str("\n--- Description ---\n\n");
|
||||||
|
// Pre-truncate to avoid unbounded memory allocation for huge descriptions
|
||||||
|
let pre_trunc = pre_truncate_description(desc, MAX_DOCUMENT_BYTES_HARD);
|
||||||
|
if pre_trunc.was_truncated {
|
||||||
|
warn!(
|
||||||
|
iid,
|
||||||
|
original_bytes = pre_trunc.original_bytes,
|
||||||
|
"MR description truncated (oversized)"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
content.push_str(&pre_trunc.content);
|
||||||
|
}
|
||||||
|
|
||||||
|
let labels_hash = compute_list_hash(&labels);
|
||||||
|
let paths_hash = compute_list_hash(&[]);
|
||||||
|
|
||||||
|
let hard_cap = truncate_hard_cap(&content);
|
||||||
|
let content_hash = compute_content_hash(&hard_cap.content);
|
||||||
|
|
||||||
|
Ok(Some(DocumentData {
|
||||||
|
source_type: SourceType::MergeRequest,
|
||||||
|
source_id: id,
|
||||||
|
project_id,
|
||||||
|
author_username,
|
||||||
|
labels,
|
||||||
|
paths: Vec::new(),
|
||||||
|
labels_hash,
|
||||||
|
paths_hash,
|
||||||
|
created_at: created_at.unwrap_or(0),
|
||||||
|
updated_at: updated_at.unwrap_or(0),
|
||||||
|
url: web_url,
|
||||||
|
title: Some(display_title.to_string()),
|
||||||
|
content_text: hard_cap.content,
|
||||||
|
content_hash,
|
||||||
|
is_truncated: hard_cap.is_truncated,
|
||||||
|
truncated_reason: hard_cap.reason.map(|r| r.as_str().to_string()),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
516
src/documents/extractor/notes.rs
Normal file
516
src/documents/extractor/notes.rs
Normal file
@@ -0,0 +1,516 @@
|
|||||||
|
pub fn extract_note_document(conn: &Connection, note_id: i64) -> Result<Option<DocumentData>> {
|
||||||
|
let row = conn.query_row(
|
||||||
|
"SELECT n.id, n.gitlab_id, n.author_username, n.body, n.note_type, n.is_system,
|
||||||
|
n.created_at, n.updated_at, n.position_new_path, n.position_new_line,
|
||||||
|
n.position_old_path, n.position_old_line, n.resolvable, n.resolved, n.resolved_by,
|
||||||
|
d.noteable_type, d.issue_id, d.merge_request_id,
|
||||||
|
p.path_with_namespace, p.id AS project_id
|
||||||
|
FROM notes n
|
||||||
|
JOIN discussions d ON n.discussion_id = d.id
|
||||||
|
JOIN projects p ON n.project_id = p.id
|
||||||
|
WHERE n.id = ?1",
|
||||||
|
rusqlite::params![note_id],
|
||||||
|
|row| {
|
||||||
|
Ok((
|
||||||
|
row.get::<_, i64>(0)?,
|
||||||
|
row.get::<_, i64>(1)?,
|
||||||
|
row.get::<_, Option<String>>(2)?,
|
||||||
|
row.get::<_, Option<String>>(3)?,
|
||||||
|
row.get::<_, Option<String>>(4)?,
|
||||||
|
row.get::<_, bool>(5)?,
|
||||||
|
row.get::<_, i64>(6)?,
|
||||||
|
row.get::<_, i64>(7)?,
|
||||||
|
row.get::<_, Option<String>>(8)?,
|
||||||
|
row.get::<_, Option<i64>>(9)?,
|
||||||
|
row.get::<_, Option<String>>(10)?,
|
||||||
|
row.get::<_, Option<i64>>(11)?,
|
||||||
|
row.get::<_, bool>(12)?,
|
||||||
|
row.get::<_, bool>(13)?,
|
||||||
|
row.get::<_, Option<String>>(14)?,
|
||||||
|
row.get::<_, String>(15)?,
|
||||||
|
row.get::<_, Option<i64>>(16)?,
|
||||||
|
row.get::<_, Option<i64>>(17)?,
|
||||||
|
row.get::<_, String>(18)?,
|
||||||
|
row.get::<_, i64>(19)?,
|
||||||
|
))
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
let (
|
||||||
|
_id,
|
||||||
|
gitlab_id,
|
||||||
|
author_username,
|
||||||
|
body,
|
||||||
|
note_type,
|
||||||
|
is_system,
|
||||||
|
created_at,
|
||||||
|
updated_at,
|
||||||
|
position_new_path,
|
||||||
|
position_new_line,
|
||||||
|
position_old_path,
|
||||||
|
_position_old_line,
|
||||||
|
resolvable,
|
||||||
|
resolved,
|
||||||
|
_resolved_by,
|
||||||
|
noteable_type,
|
||||||
|
issue_id,
|
||||||
|
merge_request_id,
|
||||||
|
path_with_namespace,
|
||||||
|
project_id,
|
||||||
|
) = match row {
|
||||||
|
Ok(r) => r,
|
||||||
|
Err(rusqlite::Error::QueryReturnedNoRows) => return Ok(None),
|
||||||
|
Err(e) => return Err(e.into()),
|
||||||
|
};
|
||||||
|
|
||||||
|
if is_system {
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
let (parent_iid, parent_title, parent_web_url, parent_type_label, labels) =
|
||||||
|
match noteable_type.as_str() {
|
||||||
|
"Issue" => {
|
||||||
|
let parent_id = match issue_id {
|
||||||
|
Some(pid) => pid,
|
||||||
|
None => return Ok(None),
|
||||||
|
};
|
||||||
|
let parent = conn.query_row(
|
||||||
|
"SELECT i.iid, i.title, i.web_url FROM issues i WHERE i.id = ?1",
|
||||||
|
rusqlite::params![parent_id],
|
||||||
|
|row| {
|
||||||
|
Ok((
|
||||||
|
row.get::<_, i64>(0)?,
|
||||||
|
row.get::<_, Option<String>>(1)?,
|
||||||
|
row.get::<_, Option<String>>(2)?,
|
||||||
|
))
|
||||||
|
},
|
||||||
|
);
|
||||||
|
let (iid, title, web_url) = match parent {
|
||||||
|
Ok(r) => r,
|
||||||
|
Err(rusqlite::Error::QueryReturnedNoRows) => return Ok(None),
|
||||||
|
Err(e) => return Err(e.into()),
|
||||||
|
};
|
||||||
|
let mut label_stmt = conn.prepare_cached(
|
||||||
|
"SELECT l.name FROM issue_labels il
|
||||||
|
JOIN labels l ON l.id = il.label_id
|
||||||
|
WHERE il.issue_id = ?1
|
||||||
|
ORDER BY l.name",
|
||||||
|
)?;
|
||||||
|
let labels: Vec<String> = label_stmt
|
||||||
|
.query_map(rusqlite::params![parent_id], |row| row.get(0))?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
(iid, title, web_url, "Issue", labels)
|
||||||
|
}
|
||||||
|
"MergeRequest" => {
|
||||||
|
let parent_id = match merge_request_id {
|
||||||
|
Some(pid) => pid,
|
||||||
|
None => return Ok(None),
|
||||||
|
};
|
||||||
|
let parent = conn.query_row(
|
||||||
|
"SELECT m.iid, m.title, m.web_url FROM merge_requests m WHERE m.id = ?1",
|
||||||
|
rusqlite::params![parent_id],
|
||||||
|
|row| {
|
||||||
|
Ok((
|
||||||
|
row.get::<_, i64>(0)?,
|
||||||
|
row.get::<_, Option<String>>(1)?,
|
||||||
|
row.get::<_, Option<String>>(2)?,
|
||||||
|
))
|
||||||
|
},
|
||||||
|
);
|
||||||
|
let (iid, title, web_url) = match parent {
|
||||||
|
Ok(r) => r,
|
||||||
|
Err(rusqlite::Error::QueryReturnedNoRows) => return Ok(None),
|
||||||
|
Err(e) => return Err(e.into()),
|
||||||
|
};
|
||||||
|
let mut label_stmt = conn.prepare_cached(
|
||||||
|
"SELECT l.name FROM mr_labels ml
|
||||||
|
JOIN labels l ON l.id = ml.label_id
|
||||||
|
WHERE ml.merge_request_id = ?1
|
||||||
|
ORDER BY l.name",
|
||||||
|
)?;
|
||||||
|
let labels: Vec<String> = label_stmt
|
||||||
|
.query_map(rusqlite::params![parent_id], |row| row.get(0))?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
(iid, title, web_url, "MergeRequest", labels)
|
||||||
|
}
|
||||||
|
_ => return Ok(None),
|
||||||
|
};
|
||||||
|
|
||||||
|
build_note_document(
|
||||||
|
note_id,
|
||||||
|
gitlab_id,
|
||||||
|
author_username,
|
||||||
|
body,
|
||||||
|
note_type,
|
||||||
|
created_at,
|
||||||
|
updated_at,
|
||||||
|
position_new_path,
|
||||||
|
position_new_line,
|
||||||
|
position_old_path,
|
||||||
|
resolvable,
|
||||||
|
resolved,
|
||||||
|
parent_iid,
|
||||||
|
parent_title.as_deref(),
|
||||||
|
parent_web_url.as_deref(),
|
||||||
|
&labels,
|
||||||
|
parent_type_label,
|
||||||
|
&path_with_namespace,
|
||||||
|
project_id,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ParentMetadata {
|
||||||
|
pub iid: i64,
|
||||||
|
pub title: Option<String>,
|
||||||
|
pub web_url: Option<String>,
|
||||||
|
pub labels: Vec<String>,
|
||||||
|
pub project_path: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ParentMetadataCache {
|
||||||
|
cache: HashMap<(String, i64), Option<ParentMetadata>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for ParentMetadataCache {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ParentMetadataCache {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
cache: HashMap::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_or_fetch(
|
||||||
|
&mut self,
|
||||||
|
conn: &Connection,
|
||||||
|
noteable_type: &str,
|
||||||
|
parent_id: i64,
|
||||||
|
project_path: &str,
|
||||||
|
) -> Result<Option<&ParentMetadata>> {
|
||||||
|
let key = (noteable_type.to_string(), parent_id);
|
||||||
|
if !self.cache.contains_key(&key) {
|
||||||
|
let meta = fetch_parent_metadata(conn, noteable_type, parent_id, project_path)?;
|
||||||
|
self.cache.insert(key.clone(), meta);
|
||||||
|
}
|
||||||
|
Ok(self.cache.get(&key).and_then(|m| m.as_ref()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fetch_parent_metadata(
|
||||||
|
conn: &Connection,
|
||||||
|
noteable_type: &str,
|
||||||
|
parent_id: i64,
|
||||||
|
project_path: &str,
|
||||||
|
) -> Result<Option<ParentMetadata>> {
|
||||||
|
match noteable_type {
|
||||||
|
"Issue" => {
|
||||||
|
let parent = conn.query_row(
|
||||||
|
"SELECT i.iid, i.title, i.web_url FROM issues i WHERE i.id = ?1",
|
||||||
|
rusqlite::params![parent_id],
|
||||||
|
|row| {
|
||||||
|
Ok((
|
||||||
|
row.get::<_, i64>(0)?,
|
||||||
|
row.get::<_, Option<String>>(1)?,
|
||||||
|
row.get::<_, Option<String>>(2)?,
|
||||||
|
))
|
||||||
|
},
|
||||||
|
);
|
||||||
|
let (iid, title, web_url) = match parent {
|
||||||
|
Ok(r) => r,
|
||||||
|
Err(rusqlite::Error::QueryReturnedNoRows) => return Ok(None),
|
||||||
|
Err(e) => return Err(e.into()),
|
||||||
|
};
|
||||||
|
let mut label_stmt = conn.prepare_cached(
|
||||||
|
"SELECT l.name FROM issue_labels il
|
||||||
|
JOIN labels l ON l.id = il.label_id
|
||||||
|
WHERE il.issue_id = ?1
|
||||||
|
ORDER BY l.name",
|
||||||
|
)?;
|
||||||
|
let labels: Vec<String> = label_stmt
|
||||||
|
.query_map(rusqlite::params![parent_id], |row| row.get(0))?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
Ok(Some(ParentMetadata {
|
||||||
|
iid,
|
||||||
|
title,
|
||||||
|
web_url,
|
||||||
|
labels,
|
||||||
|
project_path: project_path.to_string(),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
"MergeRequest" => {
|
||||||
|
let parent = conn.query_row(
|
||||||
|
"SELECT m.iid, m.title, m.web_url FROM merge_requests m WHERE m.id = ?1",
|
||||||
|
rusqlite::params![parent_id],
|
||||||
|
|row| {
|
||||||
|
Ok((
|
||||||
|
row.get::<_, i64>(0)?,
|
||||||
|
row.get::<_, Option<String>>(1)?,
|
||||||
|
row.get::<_, Option<String>>(2)?,
|
||||||
|
))
|
||||||
|
},
|
||||||
|
);
|
||||||
|
let (iid, title, web_url) = match parent {
|
||||||
|
Ok(r) => r,
|
||||||
|
Err(rusqlite::Error::QueryReturnedNoRows) => return Ok(None),
|
||||||
|
Err(e) => return Err(e.into()),
|
||||||
|
};
|
||||||
|
let mut label_stmt = conn.prepare_cached(
|
||||||
|
"SELECT l.name FROM mr_labels ml
|
||||||
|
JOIN labels l ON l.id = ml.label_id
|
||||||
|
WHERE ml.merge_request_id = ?1
|
||||||
|
ORDER BY l.name",
|
||||||
|
)?;
|
||||||
|
let labels: Vec<String> = label_stmt
|
||||||
|
.query_map(rusqlite::params![parent_id], |row| row.get(0))?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
Ok(Some(ParentMetadata {
|
||||||
|
iid,
|
||||||
|
title,
|
||||||
|
web_url,
|
||||||
|
labels,
|
||||||
|
project_path: project_path.to_string(),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
_ => Ok(None),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn extract_note_document_cached(
|
||||||
|
conn: &Connection,
|
||||||
|
note_id: i64,
|
||||||
|
cache: &mut ParentMetadataCache,
|
||||||
|
) -> Result<Option<DocumentData>> {
|
||||||
|
let row = conn.query_row(
|
||||||
|
"SELECT n.id, n.gitlab_id, n.author_username, n.body, n.note_type, n.is_system,
|
||||||
|
n.created_at, n.updated_at, n.position_new_path, n.position_new_line,
|
||||||
|
n.position_old_path, n.position_old_line, n.resolvable, n.resolved, n.resolved_by,
|
||||||
|
d.noteable_type, d.issue_id, d.merge_request_id,
|
||||||
|
p.path_with_namespace, p.id AS project_id
|
||||||
|
FROM notes n
|
||||||
|
JOIN discussions d ON n.discussion_id = d.id
|
||||||
|
JOIN projects p ON n.project_id = p.id
|
||||||
|
WHERE n.id = ?1",
|
||||||
|
rusqlite::params![note_id],
|
||||||
|
|row| {
|
||||||
|
Ok((
|
||||||
|
row.get::<_, i64>(0)?,
|
||||||
|
row.get::<_, i64>(1)?,
|
||||||
|
row.get::<_, Option<String>>(2)?,
|
||||||
|
row.get::<_, Option<String>>(3)?,
|
||||||
|
row.get::<_, Option<String>>(4)?,
|
||||||
|
row.get::<_, bool>(5)?,
|
||||||
|
row.get::<_, i64>(6)?,
|
||||||
|
row.get::<_, i64>(7)?,
|
||||||
|
row.get::<_, Option<String>>(8)?,
|
||||||
|
row.get::<_, Option<i64>>(9)?,
|
||||||
|
row.get::<_, Option<String>>(10)?,
|
||||||
|
row.get::<_, Option<i64>>(11)?,
|
||||||
|
row.get::<_, bool>(12)?,
|
||||||
|
row.get::<_, bool>(13)?,
|
||||||
|
row.get::<_, Option<String>>(14)?,
|
||||||
|
row.get::<_, String>(15)?,
|
||||||
|
row.get::<_, Option<i64>>(16)?,
|
||||||
|
row.get::<_, Option<i64>>(17)?,
|
||||||
|
row.get::<_, String>(18)?,
|
||||||
|
row.get::<_, i64>(19)?,
|
||||||
|
))
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
let (
|
||||||
|
_id,
|
||||||
|
gitlab_id,
|
||||||
|
author_username,
|
||||||
|
body,
|
||||||
|
note_type,
|
||||||
|
is_system,
|
||||||
|
created_at,
|
||||||
|
updated_at,
|
||||||
|
position_new_path,
|
||||||
|
position_new_line,
|
||||||
|
position_old_path,
|
||||||
|
_position_old_line,
|
||||||
|
resolvable,
|
||||||
|
resolved,
|
||||||
|
_resolved_by,
|
||||||
|
noteable_type,
|
||||||
|
issue_id,
|
||||||
|
merge_request_id,
|
||||||
|
path_with_namespace,
|
||||||
|
project_id,
|
||||||
|
) = match row {
|
||||||
|
Ok(r) => r,
|
||||||
|
Err(rusqlite::Error::QueryReturnedNoRows) => return Ok(None),
|
||||||
|
Err(e) => return Err(e.into()),
|
||||||
|
};
|
||||||
|
|
||||||
|
if is_system {
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
let parent_id = match noteable_type.as_str() {
|
||||||
|
"Issue" => match issue_id {
|
||||||
|
Some(pid) => pid,
|
||||||
|
None => return Ok(None),
|
||||||
|
},
|
||||||
|
"MergeRequest" => match merge_request_id {
|
||||||
|
Some(pid) => pid,
|
||||||
|
None => return Ok(None),
|
||||||
|
},
|
||||||
|
_ => return Ok(None),
|
||||||
|
};
|
||||||
|
|
||||||
|
let parent = cache.get_or_fetch(conn, ¬eable_type, parent_id, &path_with_namespace)?;
|
||||||
|
let parent = match parent {
|
||||||
|
Some(p) => p,
|
||||||
|
None => return Ok(None),
|
||||||
|
};
|
||||||
|
|
||||||
|
let parent_iid = parent.iid;
|
||||||
|
let parent_title = parent.title.as_deref();
|
||||||
|
let parent_web_url = parent.web_url.as_deref();
|
||||||
|
let labels = parent.labels.clone();
|
||||||
|
let parent_type_label = noteable_type.as_str();
|
||||||
|
|
||||||
|
build_note_document(
|
||||||
|
note_id,
|
||||||
|
gitlab_id,
|
||||||
|
author_username,
|
||||||
|
body,
|
||||||
|
note_type,
|
||||||
|
created_at,
|
||||||
|
updated_at,
|
||||||
|
position_new_path,
|
||||||
|
position_new_line,
|
||||||
|
position_old_path,
|
||||||
|
resolvable,
|
||||||
|
resolved,
|
||||||
|
parent_iid,
|
||||||
|
parent_title,
|
||||||
|
parent_web_url,
|
||||||
|
&labels,
|
||||||
|
parent_type_label,
|
||||||
|
&path_with_namespace,
|
||||||
|
project_id,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
|
fn build_note_document(
|
||||||
|
note_id: i64,
|
||||||
|
gitlab_id: i64,
|
||||||
|
author_username: Option<String>,
|
||||||
|
body: Option<String>,
|
||||||
|
note_type: Option<String>,
|
||||||
|
created_at: i64,
|
||||||
|
updated_at: i64,
|
||||||
|
position_new_path: Option<String>,
|
||||||
|
position_new_line: Option<i64>,
|
||||||
|
position_old_path: Option<String>,
|
||||||
|
resolvable: bool,
|
||||||
|
resolved: bool,
|
||||||
|
parent_iid: i64,
|
||||||
|
parent_title: Option<&str>,
|
||||||
|
parent_web_url: Option<&str>,
|
||||||
|
labels: &[String],
|
||||||
|
parent_type_label: &str,
|
||||||
|
path_with_namespace: &str,
|
||||||
|
project_id: i64,
|
||||||
|
) -> Result<Option<DocumentData>> {
|
||||||
|
let mut path_set = BTreeSet::new();
|
||||||
|
if let Some(ref p) = position_old_path
|
||||||
|
&& !p.is_empty()
|
||||||
|
{
|
||||||
|
path_set.insert(p.clone());
|
||||||
|
}
|
||||||
|
if let Some(ref p) = position_new_path
|
||||||
|
&& !p.is_empty()
|
||||||
|
{
|
||||||
|
path_set.insert(p.clone());
|
||||||
|
}
|
||||||
|
let paths: Vec<String> = path_set.into_iter().collect();
|
||||||
|
|
||||||
|
let url = parent_web_url.map(|wu| format!("{}#note_{}", wu, gitlab_id));
|
||||||
|
|
||||||
|
let display_title = parent_title.unwrap_or("(untitled)");
|
||||||
|
let embed_title = normalize_title_for_embedding(display_title);
|
||||||
|
let display_note_type = note_type.as_deref().unwrap_or("Note");
|
||||||
|
let display_author = author_username.as_deref().unwrap_or("unknown");
|
||||||
|
let parent_prefix = if parent_type_label == "Issue" {
|
||||||
|
format!("Issue #{}", parent_iid)
|
||||||
|
} else {
|
||||||
|
format!("MR !{}", parent_iid)
|
||||||
|
};
|
||||||
|
|
||||||
|
// Display title uses original (for human-readable output)
|
||||||
|
let title = format!(
|
||||||
|
"Note by @{} on {}: {}",
|
||||||
|
display_author, parent_prefix, display_title
|
||||||
|
);
|
||||||
|
|
||||||
|
let labels_csv = labels.join(", ");
|
||||||
|
|
||||||
|
let mut content = String::new();
|
||||||
|
let _ = writeln!(content, "[[Note]]");
|
||||||
|
let _ = writeln!(content, "source_type: note");
|
||||||
|
let _ = writeln!(content, "note_gitlab_id: {}", gitlab_id);
|
||||||
|
let _ = writeln!(content, "project: {}", path_with_namespace);
|
||||||
|
let _ = writeln!(content, "parent_type: {}", parent_type_label);
|
||||||
|
let _ = writeln!(content, "parent_iid: {}", parent_iid);
|
||||||
|
let _ = writeln!(content, "parent_title: {}", embed_title);
|
||||||
|
let _ = writeln!(content, "note_type: {}", display_note_type);
|
||||||
|
let _ = writeln!(content, "author: @{}", display_author);
|
||||||
|
let _ = writeln!(content, "created_at: {}", ms_to_iso(created_at));
|
||||||
|
if resolvable {
|
||||||
|
let _ = writeln!(content, "resolved: {}", resolved);
|
||||||
|
}
|
||||||
|
if display_note_type == "DiffNote"
|
||||||
|
&& let Some(ref p) = position_new_path
|
||||||
|
{
|
||||||
|
if let Some(line) = position_new_line {
|
||||||
|
let _ = writeln!(content, "path: {}:{}", p, line);
|
||||||
|
} else {
|
||||||
|
let _ = writeln!(content, "path: {}", p);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !labels.is_empty() {
|
||||||
|
let _ = writeln!(content, "labels: {}", labels_csv);
|
||||||
|
}
|
||||||
|
if let Some(ref u) = url {
|
||||||
|
let _ = writeln!(content, "url: {}", u);
|
||||||
|
}
|
||||||
|
|
||||||
|
content.push_str("\n--- Body ---\n\n");
|
||||||
|
content.push_str(body.as_deref().unwrap_or(""));
|
||||||
|
|
||||||
|
let labels_hash = compute_list_hash(labels);
|
||||||
|
let paths_hash = compute_list_hash(&paths);
|
||||||
|
|
||||||
|
let hard_cap = truncate_hard_cap(&content);
|
||||||
|
let content_hash = compute_content_hash(&hard_cap.content);
|
||||||
|
|
||||||
|
Ok(Some(DocumentData {
|
||||||
|
source_type: SourceType::Note,
|
||||||
|
source_id: note_id,
|
||||||
|
project_id,
|
||||||
|
author_username,
|
||||||
|
labels: labels.to_vec(),
|
||||||
|
paths,
|
||||||
|
labels_hash,
|
||||||
|
paths_hash,
|
||||||
|
created_at,
|
||||||
|
updated_at,
|
||||||
|
url,
|
||||||
|
title: Some(title),
|
||||||
|
content_text: hard_cap.content,
|
||||||
|
content_hash,
|
||||||
|
is_truncated: hard_cap.is_truncated,
|
||||||
|
truncated_reason: hard_cap.reason.map(|r| r.as_str().to_string()),
|
||||||
|
}))
|
||||||
|
}
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
use rusqlite::Connection;
|
use rusqlite::Connection;
|
||||||
|
|
||||||
use crate::core::error::Result;
|
use crate::core::error::Result;
|
||||||
use crate::embedding::chunking::{CHUNK_MAX_BYTES, EXPECTED_DIMS};
|
use crate::embedding::chunks::{CHUNK_MAX_BYTES, EXPECTED_DIMS};
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct PendingDocument {
|
pub struct PendingDocument {
|
||||||
|
|||||||
@@ -1,70 +0,0 @@
|
|||||||
pub const CHUNK_ROWID_MULTIPLIER: i64 = 1000;
|
|
||||||
|
|
||||||
pub fn encode_rowid(document_id: i64, chunk_index: i64) -> i64 {
|
|
||||||
assert!(
|
|
||||||
(0..CHUNK_ROWID_MULTIPLIER).contains(&chunk_index),
|
|
||||||
"chunk_index {chunk_index} out of range [0, {CHUNK_ROWID_MULTIPLIER})"
|
|
||||||
);
|
|
||||||
document_id
|
|
||||||
.checked_mul(CHUNK_ROWID_MULTIPLIER)
|
|
||||||
.and_then(|v| v.checked_add(chunk_index))
|
|
||||||
.unwrap_or_else(|| {
|
|
||||||
panic!("encode_rowid overflow: document_id={document_id}, chunk_index={chunk_index}")
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn decode_rowid(rowid: i64) -> (i64, i64) {
|
|
||||||
assert!(
|
|
||||||
rowid >= 0,
|
|
||||||
"decode_rowid called with negative rowid: {rowid}"
|
|
||||||
);
|
|
||||||
let document_id = rowid / CHUNK_ROWID_MULTIPLIER;
|
|
||||||
let chunk_index = rowid % CHUNK_ROWID_MULTIPLIER;
|
|
||||||
(document_id, chunk_index)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_encode_single_chunk() {
|
|
||||||
assert_eq!(encode_rowid(1, 0), 1000);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_encode_multi_chunk() {
|
|
||||||
assert_eq!(encode_rowid(1, 5), 1005);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_encode_specific_values() {
|
|
||||||
assert_eq!(encode_rowid(42, 0), 42000);
|
|
||||||
assert_eq!(encode_rowid(42, 5), 42005);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_decode_zero_chunk() {
|
|
||||||
assert_eq!(decode_rowid(42000), (42, 0));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_decode_roundtrip() {
|
|
||||||
for doc_id in [0, 1, 42, 100, 999, 10000] {
|
|
||||||
for chunk_idx in [0, 1, 5, 99, 999] {
|
|
||||||
let rowid = encode_rowid(doc_id, chunk_idx);
|
|
||||||
let (decoded_doc, decoded_chunk) = decode_rowid(rowid);
|
|
||||||
assert_eq!(
|
|
||||||
(decoded_doc, decoded_chunk),
|
|
||||||
(doc_id, chunk_idx),
|
|
||||||
"Roundtrip failed for doc_id={doc_id}, chunk_idx={chunk_idx}"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_multiplier_value() {
|
|
||||||
assert_eq!(CHUNK_ROWID_MULTIPLIER, 1000);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,3 +1,73 @@
|
|||||||
|
pub const CHUNK_ROWID_MULTIPLIER: i64 = 1000;
|
||||||
|
|
||||||
|
pub fn encode_rowid(document_id: i64, chunk_index: i64) -> i64 {
|
||||||
|
assert!(
|
||||||
|
(0..CHUNK_ROWID_MULTIPLIER).contains(&chunk_index),
|
||||||
|
"chunk_index {chunk_index} out of range [0, {CHUNK_ROWID_MULTIPLIER})"
|
||||||
|
);
|
||||||
|
document_id
|
||||||
|
.checked_mul(CHUNK_ROWID_MULTIPLIER)
|
||||||
|
.and_then(|v| v.checked_add(chunk_index))
|
||||||
|
.unwrap_or_else(|| {
|
||||||
|
panic!("encode_rowid overflow: document_id={document_id}, chunk_index={chunk_index}")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decode_rowid(rowid: i64) -> (i64, i64) {
|
||||||
|
assert!(
|
||||||
|
rowid >= 0,
|
||||||
|
"decode_rowid called with negative rowid: {rowid}"
|
||||||
|
);
|
||||||
|
let document_id = rowid / CHUNK_ROWID_MULTIPLIER;
|
||||||
|
let chunk_index = rowid % CHUNK_ROWID_MULTIPLIER;
|
||||||
|
(document_id, chunk_index)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod chunk_ids_tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_encode_single_chunk() {
|
||||||
|
assert_eq!(encode_rowid(1, 0), 1000);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_encode_multi_chunk() {
|
||||||
|
assert_eq!(encode_rowid(1, 5), 1005);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_encode_specific_values() {
|
||||||
|
assert_eq!(encode_rowid(42, 0), 42000);
|
||||||
|
assert_eq!(encode_rowid(42, 5), 42005);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_decode_zero_chunk() {
|
||||||
|
assert_eq!(decode_rowid(42000), (42, 0));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_decode_roundtrip() {
|
||||||
|
for doc_id in [0, 1, 42, 100, 999, 10000] {
|
||||||
|
for chunk_idx in [0, 1, 5, 99, 999] {
|
||||||
|
let rowid = encode_rowid(doc_id, chunk_idx);
|
||||||
|
let (decoded_doc, decoded_chunk) = decode_rowid(rowid);
|
||||||
|
assert_eq!(
|
||||||
|
(decoded_doc, decoded_chunk),
|
||||||
|
(doc_id, chunk_idx),
|
||||||
|
"Roundtrip failed for doc_id={doc_id}, chunk_idx={chunk_idx}"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_multiplier_value() {
|
||||||
|
assert_eq!(CHUNK_ROWID_MULTIPLIER, 1000);
|
||||||
|
}
|
||||||
|
}
|
||||||
pub const CHUNK_MAX_BYTES: usize = 1_500;
|
pub const CHUNK_MAX_BYTES: usize = 1_500;
|
||||||
|
|
||||||
pub const EXPECTED_DIMS: usize = 768;
|
pub const EXPECTED_DIMS: usize = 768;
|
||||||
@@ -104,4 +174,4 @@ fn floor_char_boundary(s: &str, idx: usize) -> usize {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
#[path = "chunking_tests.rs"]
|
#[path = "chunking_tests.rs"]
|
||||||
mod tests;
|
mod chunking_tests;
|
||||||
@@ -1,11 +1,10 @@
|
|||||||
pub mod change_detector;
|
pub mod change_detector;
|
||||||
pub mod chunk_ids;
|
pub mod chunks;
|
||||||
pub mod chunking;
|
|
||||||
pub mod ollama;
|
pub mod ollama;
|
||||||
pub mod pipeline;
|
pub mod pipeline;
|
||||||
pub mod similarity;
|
pub mod similarity;
|
||||||
|
|
||||||
pub use change_detector::{PendingDocument, count_pending_documents, find_pending_documents};
|
pub use change_detector::{PendingDocument, count_pending_documents, find_pending_documents};
|
||||||
pub use chunking::{CHUNK_MAX_BYTES, CHUNK_OVERLAP_CHARS, split_into_chunks};
|
pub use chunks::{CHUNK_MAX_BYTES, CHUNK_OVERLAP_CHARS, split_into_chunks};
|
||||||
pub use pipeline::{EmbedForIdsResult, EmbedResult, embed_documents, embed_documents_by_ids};
|
pub use pipeline::{EmbedForIdsResult, EmbedResult, embed_documents, embed_documents_by_ids};
|
||||||
pub use similarity::cosine_similarity;
|
pub use similarity::cosine_similarity;
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
use reqwest::Client;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
use tracing::warn;
|
|
||||||
|
|
||||||
use crate::core::error::{LoreError, Result};
|
use crate::core::error::{LoreError, Result};
|
||||||
|
use crate::http::Client;
|
||||||
|
|
||||||
pub struct OllamaConfig {
|
pub struct OllamaConfig {
|
||||||
pub base_url: String,
|
pub base_url: String,
|
||||||
@@ -51,17 +50,7 @@ struct ModelInfo {
|
|||||||
|
|
||||||
impl OllamaClient {
|
impl OllamaClient {
|
||||||
pub fn new(config: OllamaConfig) -> Self {
|
pub fn new(config: OllamaConfig) -> Self {
|
||||||
let client = Client::builder()
|
let client = Client::with_timeout(Duration::from_secs(config.timeout_secs));
|
||||||
.timeout(Duration::from_secs(config.timeout_secs))
|
|
||||||
.build()
|
|
||||||
.unwrap_or_else(|e| {
|
|
||||||
warn!(
|
|
||||||
error = %e,
|
|
||||||
"Failed to build configured Ollama HTTP client; falling back to default client"
|
|
||||||
);
|
|
||||||
Client::new()
|
|
||||||
});
|
|
||||||
|
|
||||||
Self { client, config }
|
Self { client, config }
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -70,22 +59,17 @@ impl OllamaClient {
|
|||||||
|
|
||||||
let response =
|
let response =
|
||||||
self.client
|
self.client
|
||||||
.get(&url)
|
.get(&url, &[])
|
||||||
.send()
|
|
||||||
.await
|
.await
|
||||||
.map_err(|e| LoreError::OllamaUnavailable {
|
.map_err(|e| LoreError::OllamaUnavailable {
|
||||||
base_url: self.config.base_url.clone(),
|
base_url: self.config.base_url.clone(),
|
||||||
source: Some(e),
|
detail: Some(format!("{e:?}")),
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let tags: TagsResponse =
|
let tags: TagsResponse = response.json().map_err(|e| LoreError::OllamaUnavailable {
|
||||||
response
|
base_url: self.config.base_url.clone(),
|
||||||
.json()
|
detail: Some(format!("{e:?}")),
|
||||||
.await
|
})?;
|
||||||
.map_err(|e| LoreError::OllamaUnavailable {
|
|
||||||
base_url: self.config.base_url.clone(),
|
|
||||||
source: Some(e),
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let model_found = tags.models.iter().any(|m| {
|
let model_found = tags.models.iter().any(|m| {
|
||||||
m.name == self.config.model || m.name.starts_with(&format!("{}:", self.config.model))
|
m.name == self.config.model || m.name.starts_with(&format!("{}:", self.config.model))
|
||||||
@@ -110,49 +94,36 @@ impl OllamaClient {
|
|||||||
|
|
||||||
let response = self
|
let response = self
|
||||||
.client
|
.client
|
||||||
.post(&url)
|
.post_json(&url, &[], &request)
|
||||||
.json(&request)
|
|
||||||
.send()
|
|
||||||
.await
|
.await
|
||||||
.map_err(|e| LoreError::OllamaUnavailable {
|
.map_err(|e| LoreError::OllamaUnavailable {
|
||||||
base_url: self.config.base_url.clone(),
|
base_url: self.config.base_url.clone(),
|
||||||
source: Some(e),
|
detail: Some(format!("{e:?}")),
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let status = response.status();
|
if !response.is_success() {
|
||||||
if !status.is_success() {
|
let status = response.status;
|
||||||
let body = response.text().await.unwrap_or_default();
|
let body = response.text().unwrap_or_default();
|
||||||
return Err(LoreError::EmbeddingFailed {
|
return Err(LoreError::EmbeddingFailed {
|
||||||
document_id: 0,
|
document_id: 0,
|
||||||
reason: format!("HTTP {}: {}", status, body),
|
reason: format!("HTTP {status}: {body}"),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
let embed_response: EmbedResponse =
|
let embed_response: EmbedResponse =
|
||||||
response
|
response.json().map_err(|e| LoreError::EmbeddingFailed {
|
||||||
.json()
|
document_id: 0,
|
||||||
.await
|
reason: format!("Failed to parse embed response: {e}"),
|
||||||
.map_err(|e| LoreError::EmbeddingFailed {
|
})?;
|
||||||
document_id: 0,
|
|
||||||
reason: format!("Failed to parse embed response: {}", e),
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(embed_response.embeddings)
|
Ok(embed_response.embeddings)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn check_ollama_health(base_url: &str) -> bool {
|
pub async fn check_ollama_health(base_url: &str) -> bool {
|
||||||
let client = Client::builder()
|
let client = Client::with_timeout(Duration::from_secs(5));
|
||||||
.timeout(Duration::from_secs(5))
|
|
||||||
.build()
|
|
||||||
.ok();
|
|
||||||
|
|
||||||
let Some(client) = client else {
|
|
||||||
return false;
|
|
||||||
};
|
|
||||||
|
|
||||||
let url = format!("{base_url}/api/tags");
|
let url = format!("{base_url}/api/tags");
|
||||||
client.get(&url).send().await.is_ok()
|
client.get(&url, &[]).await.is_ok_and(|r| r.is_success())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|||||||
@@ -9,8 +9,9 @@ use tracing::{debug, info, instrument, warn};
|
|||||||
use crate::core::error::Result;
|
use crate::core::error::Result;
|
||||||
use crate::core::shutdown::ShutdownSignal;
|
use crate::core::shutdown::ShutdownSignal;
|
||||||
use crate::embedding::change_detector::{count_pending_documents, find_pending_documents};
|
use crate::embedding::change_detector::{count_pending_documents, find_pending_documents};
|
||||||
use crate::embedding::chunk_ids::{CHUNK_ROWID_MULTIPLIER, encode_rowid};
|
use crate::embedding::chunks::{
|
||||||
use crate::embedding::chunking::{CHUNK_MAX_BYTES, EXPECTED_DIMS, split_into_chunks};
|
CHUNK_MAX_BYTES, CHUNK_ROWID_MULTIPLIER, EXPECTED_DIMS, encode_rowid, split_into_chunks,
|
||||||
|
};
|
||||||
use crate::embedding::ollama::OllamaClient;
|
use crate::embedding::ollama::OllamaClient;
|
||||||
|
|
||||||
const BATCH_SIZE: usize = 32;
|
const BATCH_SIZE: usize = 32;
|
||||||
@@ -160,7 +161,7 @@ async fn embed_page(
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if page_normal_docs.is_multiple_of(50) {
|
if page_normal_docs != 0 && page_normal_docs.is_multiple_of(50) {
|
||||||
debug!(
|
debug!(
|
||||||
doc_id = doc.document_id,
|
doc_id = doc.document_id,
|
||||||
doc_num = page_normal_docs,
|
doc_num = page_normal_docs,
|
||||||
@@ -168,7 +169,7 @@ async fn embed_page(
|
|||||||
"Chunking document"
|
"Chunking document"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if page_normal_docs.is_multiple_of(100) {
|
if page_normal_docs != 0 && page_normal_docs.is_multiple_of(100) {
|
||||||
info!(
|
info!(
|
||||||
doc_id = doc.document_id,
|
doc_id = doc.document_id,
|
||||||
content_bytes = doc.content_text.len(),
|
content_bytes = doc.content_text.len(),
|
||||||
@@ -685,7 +686,7 @@ fn find_documents_by_ids(
|
|||||||
document_ids: &[i64],
|
document_ids: &[i64],
|
||||||
model_name: &str,
|
model_name: &str,
|
||||||
) -> Result<Vec<crate::embedding::change_detector::PendingDocument>> {
|
) -> Result<Vec<crate::embedding::change_detector::PendingDocument>> {
|
||||||
use crate::embedding::chunking::{CHUNK_MAX_BYTES, EXPECTED_DIMS};
|
use crate::embedding::chunks::{CHUNK_MAX_BYTES, EXPECTED_DIMS};
|
||||||
|
|
||||||
if document_ids.is_empty() {
|
if document_ids.is_empty() {
|
||||||
return Ok(Vec::new());
|
return Ok(Vec::new());
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ use wiremock::{Mock, MockServer, ResponseTemplate};
|
|||||||
|
|
||||||
use crate::core::db::{create_connection, run_migrations};
|
use crate::core::db::{create_connection, run_migrations};
|
||||||
use crate::core::shutdown::ShutdownSignal;
|
use crate::core::shutdown::ShutdownSignal;
|
||||||
use crate::embedding::chunking::EXPECTED_DIMS;
|
use crate::embedding::chunks::EXPECTED_DIMS;
|
||||||
use crate::embedding::ollama::{OllamaClient, OllamaConfig};
|
use crate::embedding::ollama::{OllamaClient, OllamaConfig};
|
||||||
use crate::embedding::pipeline::embed_documents_by_ids;
|
use crate::embedding::pipeline::embed_documents_by_ids;
|
||||||
|
|
||||||
|
|||||||
@@ -1,20 +1,19 @@
|
|||||||
|
use asupersync::time::{sleep, wall_now};
|
||||||
use async_stream::stream;
|
use async_stream::stream;
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use futures::Stream;
|
use futures::Stream;
|
||||||
use reqwest::header::{ACCEPT, HeaderMap, HeaderValue};
|
|
||||||
use reqwest::{Client, Response, StatusCode};
|
|
||||||
use std::pin::Pin;
|
use std::pin::Pin;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
use std::sync::Mutex;
|
||||||
use std::time::{Duration, Instant};
|
use std::time::{Duration, Instant};
|
||||||
use tokio::sync::Mutex;
|
use tracing::debug;
|
||||||
use tokio::time::sleep;
|
|
||||||
use tracing::{debug, warn};
|
|
||||||
|
|
||||||
use super::types::{
|
use super::types::{
|
||||||
GitLabDiscussion, GitLabIssue, GitLabIssueRef, GitLabLabelEvent, GitLabMergeRequest,
|
GitLabDiscussion, GitLabIssue, GitLabIssueRef, GitLabLabelEvent, GitLabMergeRequest,
|
||||||
GitLabMilestoneEvent, GitLabMrDiff, GitLabProject, GitLabStateEvent, GitLabUser, GitLabVersion,
|
GitLabMilestoneEvent, GitLabMrDiff, GitLabProject, GitLabStateEvent, GitLabUser, GitLabVersion,
|
||||||
};
|
};
|
||||||
use crate::core::error::{LoreError, Result};
|
use crate::core::error::{LoreError, Result};
|
||||||
|
use crate::http;
|
||||||
|
|
||||||
struct RateLimiter {
|
struct RateLimiter {
|
||||||
last_request: Instant,
|
last_request: Instant,
|
||||||
@@ -56,9 +55,8 @@ fn rand_jitter() -> u64 {
|
|||||||
(n ^ nanos) % 50
|
(n ^ nanos) % 50
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct GitLabClient {
|
pub struct GitLabClient {
|
||||||
client: Client,
|
client: http::Client,
|
||||||
base_url: String,
|
base_url: String,
|
||||||
token: String,
|
token: String,
|
||||||
rate_limiter: Arc<Mutex<RateLimiter>>,
|
rate_limiter: Arc<Mutex<RateLimiter>>,
|
||||||
@@ -66,27 +64,8 @@ pub struct GitLabClient {
|
|||||||
|
|
||||||
impl GitLabClient {
|
impl GitLabClient {
|
||||||
pub fn new(base_url: &str, token: &str, requests_per_second: Option<f64>) -> Self {
|
pub fn new(base_url: &str, token: &str, requests_per_second: Option<f64>) -> Self {
|
||||||
let mut headers = HeaderMap::new();
|
|
||||||
headers.insert(ACCEPT, HeaderValue::from_static("application/json"));
|
|
||||||
|
|
||||||
let client = Client::builder()
|
|
||||||
.default_headers(headers.clone())
|
|
||||||
.timeout(Duration::from_secs(30))
|
|
||||||
.build()
|
|
||||||
.unwrap_or_else(|e| {
|
|
||||||
warn!(
|
|
||||||
error = %e,
|
|
||||||
"Failed to build configured HTTP client; falling back to default client with timeout"
|
|
||||||
);
|
|
||||||
Client::builder()
|
|
||||||
.default_headers(headers)
|
|
||||||
.timeout(Duration::from_secs(30))
|
|
||||||
.build()
|
|
||||||
.unwrap_or_else(|_| Client::new())
|
|
||||||
});
|
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
client,
|
client: http::Client::with_timeout(Duration::from_secs(30)),
|
||||||
base_url: base_url.trim_end_matches('/').to_string(),
|
base_url: base_url.trim_end_matches('/').to_string(),
|
||||||
token: token.to_string(),
|
token: token.to_string(),
|
||||||
rate_limiter: Arc::new(Mutex::new(RateLimiter::new(
|
rate_limiter: Arc::new(Mutex::new(RateLimiter::new(
|
||||||
@@ -131,25 +110,34 @@ impl GitLabClient {
|
|||||||
let mut last_response = None;
|
let mut last_response = None;
|
||||||
|
|
||||||
for attempt in 0..=Self::MAX_RETRIES {
|
for attempt in 0..=Self::MAX_RETRIES {
|
||||||
let delay = self.rate_limiter.lock().await.check_delay();
|
// SAFETY: std::sync::Mutex blocks the executor thread while held. This is safe
|
||||||
|
// because the critical section is a single Instant::now() comparison with no I/O.
|
||||||
|
// If async work is ever added inside the lock, switch to an async-aware lock.
|
||||||
|
let delay = {
|
||||||
|
let mut limiter = self
|
||||||
|
.rate_limiter
|
||||||
|
.lock()
|
||||||
|
.unwrap_or_else(|poisoned| poisoned.into_inner());
|
||||||
|
limiter.check_delay()
|
||||||
|
};
|
||||||
if let Some(d) = delay {
|
if let Some(d) = delay {
|
||||||
sleep(d).await;
|
sleep(wall_now(), d).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
debug!(url = %url, attempt, "GitLab request");
|
debug!(url = %url, attempt, "GitLab request");
|
||||||
|
|
||||||
let response = self
|
let response = self
|
||||||
.client
|
.client
|
||||||
.get(&url)
|
.get(
|
||||||
.header("PRIVATE-TOKEN", &self.token)
|
&url,
|
||||||
.send()
|
&[
|
||||||
.await
|
("PRIVATE-TOKEN", self.token.as_str()),
|
||||||
.map_err(|e| LoreError::GitLabNetworkError {
|
("Accept", "application/json"),
|
||||||
base_url: self.base_url.clone(),
|
],
|
||||||
source: Some(e),
|
)
|
||||||
})?;
|
.await?;
|
||||||
|
|
||||||
if response.status() == StatusCode::TOO_MANY_REQUESTS && attempt < Self::MAX_RETRIES {
|
if response.status == 429 && attempt < Self::MAX_RETRIES {
|
||||||
let retry_after = Self::parse_retry_after(&response);
|
let retry_after = Self::parse_retry_after(&response);
|
||||||
tracing::info!(
|
tracing::info!(
|
||||||
path = %path,
|
path = %path,
|
||||||
@@ -158,7 +146,7 @@ impl GitLabClient {
|
|||||||
status_code = 429u16,
|
status_code = 429u16,
|
||||||
"Rate limited, retrying"
|
"Rate limited, retrying"
|
||||||
);
|
);
|
||||||
sleep(Duration::from_secs(retry_after)).await;
|
sleep(wall_now(), Duration::from_secs(retry_after)).await;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -167,53 +155,35 @@ impl GitLabClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
self.handle_response(last_response.expect("retry loop ran at least once"), path)
|
self.handle_response(last_response.expect("retry loop ran at least once"), path)
|
||||||
.await
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_retry_after(response: &Response) -> u64 {
|
fn parse_retry_after(response: &http::Response) -> u64 {
|
||||||
response
|
response
|
||||||
.headers()
|
.header("retry-after")
|
||||||
.get("retry-after")
|
|
||||||
.and_then(|v| v.to_str().ok())
|
|
||||||
.and_then(|s| s.parse().ok())
|
.and_then(|s| s.parse().ok())
|
||||||
.unwrap_or(60)
|
.unwrap_or(60)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn handle_response<T: serde::de::DeserializeOwned>(
|
fn handle_response<T: serde::de::DeserializeOwned>(
|
||||||
&self,
|
&self,
|
||||||
response: Response,
|
response: http::Response,
|
||||||
path: &str,
|
path: &str,
|
||||||
) -> Result<T> {
|
) -> Result<T> {
|
||||||
match response.status() {
|
match response.status {
|
||||||
StatusCode::UNAUTHORIZED => Err(LoreError::GitLabAuthFailed),
|
401 => Err(LoreError::GitLabAuthFailed),
|
||||||
|
404 => Err(LoreError::GitLabNotFound {
|
||||||
StatusCode::NOT_FOUND => Err(LoreError::GitLabNotFound {
|
|
||||||
resource: path.to_string(),
|
resource: path.to_string(),
|
||||||
}),
|
}),
|
||||||
|
429 => {
|
||||||
StatusCode::TOO_MANY_REQUESTS => {
|
|
||||||
let retry_after = Self::parse_retry_after(&response);
|
let retry_after = Self::parse_retry_after(&response);
|
||||||
Err(LoreError::GitLabRateLimited { retry_after })
|
Err(LoreError::GitLabRateLimited { retry_after })
|
||||||
}
|
}
|
||||||
|
_ if response.is_success() => response.json::<T>().map_err(|e| {
|
||||||
status if status.is_success() => {
|
LoreError::Other(format!("Failed to decode response from {path}: {e}"))
|
||||||
let text = response.text().await?;
|
}),
|
||||||
serde_json::from_str(&text).map_err(|e| {
|
s => Err(LoreError::Other(format!(
|
||||||
let preview = if text.len() > 500 {
|
"GitLab API error: {s} {}",
|
||||||
&text[..text.floor_char_boundary(500)]
|
response.reason
|
||||||
} else {
|
|
||||||
&text
|
|
||||||
};
|
|
||||||
LoreError::Other(format!(
|
|
||||||
"Failed to decode response from {path}: {e}\nResponse preview: {preview}"
|
|
||||||
))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
status => Err(LoreError::Other(format!(
|
|
||||||
"GitLab API error: {} {}",
|
|
||||||
status.as_u16(),
|
|
||||||
status.canonical_reason().unwrap_or("Unknown")
|
|
||||||
))),
|
))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -261,9 +231,7 @@ impl GitLabClient {
|
|||||||
yield Ok(issue);
|
yield Ok(issue);
|
||||||
}
|
}
|
||||||
|
|
||||||
let next_page = headers
|
let next_page = header_value(&headers, "x-next-page")
|
||||||
.get("x-next-page")
|
|
||||||
.and_then(|v| v.to_str().ok())
|
|
||||||
.and_then(|s| s.parse::<u32>().ok());
|
.and_then(|s| s.parse::<u32>().ok());
|
||||||
|
|
||||||
match next_page {
|
match next_page {
|
||||||
@@ -317,9 +285,7 @@ impl GitLabClient {
|
|||||||
yield Ok(discussion);
|
yield Ok(discussion);
|
||||||
}
|
}
|
||||||
|
|
||||||
let next_page = headers
|
let next_page = header_value(&headers, "x-next-page")
|
||||||
.get("x-next-page")
|
|
||||||
.and_then(|v| v.to_str().ok())
|
|
||||||
.and_then(|s| s.parse::<u32>().ok());
|
.and_then(|s| s.parse::<u32>().ok());
|
||||||
|
|
||||||
match next_page {
|
match next_page {
|
||||||
@@ -422,10 +388,7 @@ impl GitLabClient {
|
|||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let link_next = parse_link_header_next(&headers);
|
let link_next = parse_link_header_next(&headers);
|
||||||
let x_next_page = headers
|
let x_next_page = header_value(&headers, "x-next-page").and_then(|s| s.parse::<u32>().ok());
|
||||||
.get("x-next-page")
|
|
||||||
.and_then(|v| v.to_str().ok())
|
|
||||||
.and_then(|s| s.parse::<u32>().ok());
|
|
||||||
let full_page = items.len() as u32 == per_page;
|
let full_page = items.len() as u32 == per_page;
|
||||||
|
|
||||||
let (next_page, is_last_page) = match (link_next.is_some(), x_next_page, full_page) {
|
let (next_page, is_last_page) = match (link_next.is_some(), x_next_page, full_page) {
|
||||||
@@ -473,9 +436,7 @@ impl GitLabClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let link_next = parse_link_header_next(&headers);
|
let link_next = parse_link_header_next(&headers);
|
||||||
let x_next_page = headers
|
let x_next_page = header_value(&headers, "x-next-page")
|
||||||
.get("x-next-page")
|
|
||||||
.and_then(|v| v.to_str().ok())
|
|
||||||
.and_then(|s| s.parse::<u32>().ok());
|
.and_then(|s| s.parse::<u32>().ok());
|
||||||
|
|
||||||
let should_continue = match (link_next.is_some(), x_next_page, full_page) {
|
let should_continue = match (link_next.is_some(), x_next_page, full_page) {
|
||||||
@@ -511,31 +472,40 @@ impl GitLabClient {
|
|||||||
&self,
|
&self,
|
||||||
path: &str,
|
path: &str,
|
||||||
params: &[(&str, String)],
|
params: &[(&str, String)],
|
||||||
) -> Result<(T, HeaderMap)> {
|
) -> Result<(T, Vec<(String, String)>)> {
|
||||||
let url = format!("{}{}", self.base_url, path);
|
let url = format!("{}{}", self.base_url, path);
|
||||||
let mut last_response = None;
|
let mut last_response = None;
|
||||||
|
|
||||||
for attempt in 0..=Self::MAX_RETRIES {
|
for attempt in 0..=Self::MAX_RETRIES {
|
||||||
let delay = self.rate_limiter.lock().await.check_delay();
|
// SAFETY: std::sync::Mutex blocks the executor thread while held. This is safe
|
||||||
|
// because the critical section is a single Instant::now() comparison with no I/O.
|
||||||
|
// If async work is ever added inside the lock, switch to an async-aware lock.
|
||||||
|
let delay = {
|
||||||
|
let mut limiter = self
|
||||||
|
.rate_limiter
|
||||||
|
.lock()
|
||||||
|
.unwrap_or_else(|poisoned| poisoned.into_inner());
|
||||||
|
limiter.check_delay()
|
||||||
|
};
|
||||||
if let Some(d) = delay {
|
if let Some(d) = delay {
|
||||||
sleep(d).await;
|
sleep(wall_now(), d).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
debug!(url = %url, ?params, attempt, "GitLab paginated request");
|
debug!(url = %url, ?params, attempt, "GitLab paginated request");
|
||||||
|
|
||||||
let response = self
|
let response = self
|
||||||
.client
|
.client
|
||||||
.get(&url)
|
.get_with_query(
|
||||||
.query(params)
|
&url,
|
||||||
.header("PRIVATE-TOKEN", &self.token)
|
params,
|
||||||
.send()
|
&[
|
||||||
.await
|
("PRIVATE-TOKEN", self.token.as_str()),
|
||||||
.map_err(|e| LoreError::GitLabNetworkError {
|
("Accept", "application/json"),
|
||||||
base_url: self.base_url.clone(),
|
],
|
||||||
source: Some(e),
|
)
|
||||||
})?;
|
.await?;
|
||||||
|
|
||||||
if response.status() == StatusCode::TOO_MANY_REQUESTS && attempt < Self::MAX_RETRIES {
|
if response.status == 429 && attempt < Self::MAX_RETRIES {
|
||||||
let retry_after = Self::parse_retry_after(&response);
|
let retry_after = Self::parse_retry_after(&response);
|
||||||
tracing::info!(
|
tracing::info!(
|
||||||
path = %path,
|
path = %path,
|
||||||
@@ -544,7 +514,7 @@ impl GitLabClient {
|
|||||||
status_code = 429u16,
|
status_code = 429u16,
|
||||||
"Rate limited, retrying"
|
"Rate limited, retrying"
|
||||||
);
|
);
|
||||||
sleep(Duration::from_secs(retry_after)).await;
|
sleep(wall_now(), Duration::from_secs(retry_after)).await;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -553,8 +523,8 @@ impl GitLabClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let response = last_response.expect("retry loop ran at least once");
|
let response = last_response.expect("retry loop ran at least once");
|
||||||
let headers = response.headers().clone();
|
let headers = response.headers.clone();
|
||||||
let body = self.handle_response(response, path).await?;
|
let body = self.handle_response(response, path)?;
|
||||||
Ok((body, headers))
|
Ok((body, headers))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -613,10 +583,8 @@ impl GitLabClient {
|
|||||||
let full_page = items.len() as u32 == per_page;
|
let full_page = items.len() as u32 == per_page;
|
||||||
results.extend(items);
|
results.extend(items);
|
||||||
|
|
||||||
let next_page = headers
|
let next_page =
|
||||||
.get("x-next-page")
|
header_value(&headers, "x-next-page").and_then(|s| s.parse::<u32>().ok());
|
||||||
.and_then(|v| v.to_str().ok())
|
|
||||||
.and_then(|s| s.parse::<u32>().ok());
|
|
||||||
|
|
||||||
match next_page {
|
match next_page {
|
||||||
Some(next) if next > page => page = next,
|
Some(next) if next > page => page = next,
|
||||||
@@ -726,14 +694,14 @@ impl GitLabClient {
|
|||||||
)> {
|
)> {
|
||||||
let (state_res, label_res, milestone_res) = match entity_type {
|
let (state_res, label_res, milestone_res) = match entity_type {
|
||||||
"issue" => {
|
"issue" => {
|
||||||
tokio::join!(
|
futures::join!(
|
||||||
self.fetch_issue_state_events(gitlab_project_id, iid),
|
self.fetch_issue_state_events(gitlab_project_id, iid),
|
||||||
self.fetch_issue_label_events(gitlab_project_id, iid),
|
self.fetch_issue_label_events(gitlab_project_id, iid),
|
||||||
self.fetch_issue_milestone_events(gitlab_project_id, iid),
|
self.fetch_issue_milestone_events(gitlab_project_id, iid),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
"merge_request" => {
|
"merge_request" => {
|
||||||
tokio::join!(
|
futures::join!(
|
||||||
self.fetch_mr_state_events(gitlab_project_id, iid),
|
self.fetch_mr_state_events(gitlab_project_id, iid),
|
||||||
self.fetch_mr_label_events(gitlab_project_id, iid),
|
self.fetch_mr_label_events(gitlab_project_id, iid),
|
||||||
self.fetch_mr_milestone_events(gitlab_project_id, iid),
|
self.fetch_mr_milestone_events(gitlab_project_id, iid),
|
||||||
@@ -761,22 +729,26 @@ pub struct MergeRequestPage {
|
|||||||
pub is_last_page: bool,
|
pub is_last_page: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_link_header_next(headers: &HeaderMap) -> Option<String> {
|
fn header_value<'a>(headers: &'a [(String, String)], name: &str) -> Option<&'a str> {
|
||||||
headers
|
headers
|
||||||
.get("link")
|
.iter()
|
||||||
.and_then(|v| v.to_str().ok())
|
.find(|(k, _)| k.eq_ignore_ascii_case(name))
|
||||||
.and_then(|link_str| {
|
.map(|(_, v)| v.as_str())
|
||||||
for part in link_str.split(',') {
|
}
|
||||||
let part = part.trim();
|
|
||||||
if (part.contains("rel=\"next\"") || part.contains("rel=next"))
|
fn parse_link_header_next(headers: &[(String, String)]) -> Option<String> {
|
||||||
&& let Some(start) = part.find('<')
|
header_value(headers, "link").and_then(|link_str| {
|
||||||
&& let Some(end) = part.find('>')
|
for part in link_str.split(',') {
|
||||||
{
|
let part = part.trim();
|
||||||
return Some(part[start + 1..end].to_string());
|
if (part.contains("rel=\"next\"") || part.contains("rel=next"))
|
||||||
}
|
&& let Some(start) = part.find('<')
|
||||||
|
&& let Some(end) = part.find('>')
|
||||||
|
{
|
||||||
|
return Some(part[start + 1..end].to_string());
|
||||||
}
|
}
|
||||||
None
|
}
|
||||||
})
|
None
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn coalesce_not_found<T>(result: Result<Vec<T>>) -> Result<Vec<T>> {
|
fn coalesce_not_found<T>(result: Result<Vec<T>>) -> Result<Vec<T>> {
|
||||||
@@ -836,13 +808,10 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_link_header_extracts_next_url() {
|
fn parse_link_header_extracts_next_url() {
|
||||||
let mut headers = HeaderMap::new();
|
let headers = vec![(
|
||||||
headers.insert(
|
"link".to_string(),
|
||||||
"link",
|
r#"<https://gitlab.example.com/api/v4/projects/1/merge_requests?page=2>; rel="next", <https://gitlab.example.com/api/v4/projects/1/merge_requests?page=5>; rel="last""#.to_string(),
|
||||||
HeaderValue::from_static(
|
)];
|
||||||
r#"<https://gitlab.example.com/api/v4/projects/1/merge_requests?page=2>; rel="next", <https://gitlab.example.com/api/v4/projects/1/merge_requests?page=5>; rel="last""#,
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
let result = parse_link_header_next(&headers);
|
let result = parse_link_header_next(&headers);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@@ -853,11 +822,10 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_link_header_handles_unquoted_rel() {
|
fn parse_link_header_handles_unquoted_rel() {
|
||||||
let mut headers = HeaderMap::new();
|
let headers = vec![(
|
||||||
headers.insert(
|
"link".to_string(),
|
||||||
"link",
|
r#"<https://example.com/next>; rel=next"#.to_string(),
|
||||||
HeaderValue::from_static(r#"<https://example.com/next>; rel=next"#),
|
)];
|
||||||
);
|
|
||||||
|
|
||||||
let result = parse_link_header_next(&headers);
|
let result = parse_link_header_next(&headers);
|
||||||
assert_eq!(result, Some("https://example.com/next".to_string()));
|
assert_eq!(result, Some("https://example.com/next".to_string()));
|
||||||
@@ -865,11 +833,10 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_link_header_returns_none_when_no_next() {
|
fn parse_link_header_returns_none_when_no_next() {
|
||||||
let mut headers = HeaderMap::new();
|
let headers = vec![(
|
||||||
headers.insert(
|
"link".to_string(),
|
||||||
"link",
|
r#"<https://example.com/last>; rel="last""#.to_string(),
|
||||||
HeaderValue::from_static(r#"<https://example.com/last>; rel="last""#),
|
)];
|
||||||
);
|
|
||||||
|
|
||||||
let result = parse_link_header_next(&headers);
|
let result = parse_link_header_next(&headers);
|
||||||
assert!(result.is_none());
|
assert!(result.is_none());
|
||||||
@@ -877,7 +844,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_link_header_returns_none_when_missing() {
|
fn parse_link_header_returns_none_when_missing() {
|
||||||
let headers = HeaderMap::new();
|
let headers: Vec<(String, String)> = vec![];
|
||||||
let result = parse_link_header_next(&headers);
|
let result = parse_link_header_next(&headers);
|
||||||
assert!(result.is_none());
|
assert!(result.is_none());
|
||||||
}
|
}
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user