Compare commits
9 Commits
trace
...
53ce20595b
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
53ce20595b | ||
|
|
1808a4da8e | ||
|
|
7d032833a2 | ||
|
|
097249f4e6 | ||
|
|
8442bcf367 | ||
|
|
c0ca501662 | ||
|
|
c953d8e519 | ||
|
|
63bd58c9b4 | ||
|
|
714c8c2623 |
File diff suppressed because one or more lines are too long
@@ -1 +1 @@
|
|||||||
bd-1sc6
|
bd-1elx
|
||||||
|
|||||||
140
docs/plan-expose-discussion-ids.feedback-5.md
Normal file
140
docs/plan-expose-discussion-ids.feedback-5.md
Normal file
@@ -0,0 +1,140 @@
|
|||||||
|
Your iteration 4 plan is already strong. The highest-impact revisions are around query shape, transaction boundaries, and contract stability for agents.
|
||||||
|
|
||||||
|
1. **Switch discussions query to a two-phase page-first architecture**
|
||||||
|
Analysis: Current `ranked_notes` runs over every filtered discussion before `LIMIT`, which can explode on project-wide queries. A page-first plan keeps complexity proportional to `limit`, improves tail latency, and reduces memory churn.
|
||||||
|
```diff
|
||||||
|
@@ ## 3c. SQL Query
|
||||||
|
-Core query uses a CTE + ranked-notes rollup (window function) to avoid per-row correlated
|
||||||
|
-subqueries.
|
||||||
|
+Core query is split into two phases for scalability:
|
||||||
|
+1) `paged_discussions` applies filters/sort/LIMIT and returns only page IDs.
|
||||||
|
+2) Note rollups and optional `--include-notes` expansion run only for those page IDs.
|
||||||
|
+This bounds note scanning to visible results and stabilizes latency on large projects.
|
||||||
|
|
||||||
|
-WITH filtered_discussions AS (
|
||||||
|
+WITH filtered_discussions AS (
|
||||||
|
...
|
||||||
|
),
|
||||||
|
-ranked_notes AS (
|
||||||
|
+paged_discussions AS (
|
||||||
|
+ SELECT id
|
||||||
|
+ FROM filtered_discussions
|
||||||
|
+ ORDER BY COALESCE({sort_column}, 0) {order}, id {order}
|
||||||
|
+ LIMIT ?
|
||||||
|
+),
|
||||||
|
+ranked_notes AS (
|
||||||
|
...
|
||||||
|
- WHERE n.discussion_id IN (SELECT id FROM filtered_discussions)
|
||||||
|
+ WHERE n.discussion_id IN (SELECT id FROM paged_discussions)
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Move snapshot transaction ownership to handlers (not query helpers)**
|
||||||
|
Analysis: This avoids nested transaction edge cases, keeps function signatures clean, and guarantees one snapshot across count + page + include-notes + serialization metadata.
|
||||||
|
```diff
|
||||||
|
@@ ## Cross-cutting: snapshot consistency
|
||||||
|
-Wrap `query_notes` and `query_discussions` in a deferred read transaction.
|
||||||
|
+Open one deferred read transaction in each handler (`handle_notes`, `handle_discussions`)
|
||||||
|
+and pass `&Transaction` into query helpers. Query helpers do not open/commit transactions.
|
||||||
|
+This guarantees a single snapshot across all subqueries and avoids nested tx pitfalls.
|
||||||
|
|
||||||
|
-pub fn query_discussions(conn: &Connection, ...)
|
||||||
|
+pub fn query_discussions(tx: &rusqlite::Transaction<'_>, ...)
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Add immutable input filter `--project-id` across notes/discussions/show**
|
||||||
|
Analysis: You already expose `gitlab_project_id` because paths are mutable; input should support the same immutable selector. This removes failure modes after project renames/transfers.
|
||||||
|
```diff
|
||||||
|
@@ ## 3a. CLI Args
|
||||||
|
+ /// Filter by immutable GitLab project ID
|
||||||
|
+ #[arg(long, help_heading = "Filters", conflicts_with = "project")]
|
||||||
|
+ pub project_id: Option<i64>,
|
||||||
|
@@ ## Bridge Contract
|
||||||
|
+Input symmetry rule: commands that accept `--project` should also accept `--project-id`.
|
||||||
|
+If both are present, return usage error (exit code 2).
|
||||||
|
```
|
||||||
|
|
||||||
|
4. **Enforce bridge fields for nested notes in `discussions --include-notes`**
|
||||||
|
Analysis: Current guardrail is entity-level; nested notes can still lose required IDs under aggressive filtering. This is a contract hole for write-bridging.
|
||||||
|
```diff
|
||||||
|
@@ ### Field Filtering Guardrail
|
||||||
|
-In robot mode, `filter_fields` MUST force-include Bridge Contract fields...
|
||||||
|
+In robot mode, `filter_fields` MUST force-include Bridge Contract fields at all returned levels:
|
||||||
|
+- discussion row fields
|
||||||
|
+- nested note fields when `discussions --include-notes` is used
|
||||||
|
|
||||||
|
+const BRIDGE_FIELDS_DISCUSSION_NOTES: &[&str] = &[
|
||||||
|
+ "project_path", "gitlab_project_id", "noteable_type", "parent_iid",
|
||||||
|
+ "gitlab_discussion_id", "gitlab_note_id",
|
||||||
|
+];
|
||||||
|
```
|
||||||
|
|
||||||
|
5. **Make ambiguity preflight scope-aware and machine-actionable**
|
||||||
|
Analysis: Current preflight checks only `gitlab_discussion_id`, which can produce false ambiguity when additional filters already narrow to one project. Also, agents need structured candidates, not only free-text.
|
||||||
|
```diff
|
||||||
|
@@ ### Ambiguity Guardrail
|
||||||
|
-SELECT DISTINCT p.path_with_namespace
|
||||||
|
+SELECT DISTINCT p.path_with_namespace, p.gitlab_project_id
|
||||||
|
FROM discussions d
|
||||||
|
JOIN projects p ON p.id = d.project_id
|
||||||
|
-WHERE d.gitlab_discussion_id = ?
|
||||||
|
+WHERE d.gitlab_discussion_id = ?
|
||||||
|
+ /* plus active scope filters: noteable_type, for_issue/for_mr, since/path when present */
|
||||||
|
LIMIT 3
|
||||||
|
|
||||||
|
-Return LoreError::Ambiguous with message
|
||||||
|
+Return LoreError::Ambiguous with structured details:
|
||||||
|
+`{ code, message, candidates:[{project_path, gitlab_project_id}], suggestion }`
|
||||||
|
```
|
||||||
|
|
||||||
|
6. **Add `--contains` filter to `discussions`**
|
||||||
|
Analysis: This is a high-utility agent workflow gap. Agents frequently need “find thread by text then reply”; forcing a separate `notes` search round-trip is unnecessary.
|
||||||
|
```diff
|
||||||
|
@@ ## 3a. CLI Args
|
||||||
|
+ /// Filter discussions whose notes contain text
|
||||||
|
+ #[arg(long, help_heading = "Filters")]
|
||||||
|
+ pub contains: Option<String>,
|
||||||
|
@@ ## 3d. Filters struct
|
||||||
|
+ pub contains: Option<String>,
|
||||||
|
@@ ## 3d. Where-clause construction
|
||||||
|
+- `path` -> EXISTS (...)
|
||||||
|
+- `path` -> EXISTS (...)
|
||||||
|
+- `contains` -> EXISTS (
|
||||||
|
+ SELECT 1 FROM notes n
|
||||||
|
+ WHERE n.discussion_id = d.id
|
||||||
|
+ AND n.body LIKE ?
|
||||||
|
+ )
|
||||||
|
```
|
||||||
|
|
||||||
|
7. **Promote two baseline indexes from “candidate” to “required”**
|
||||||
|
Analysis: These are directly hit by new primary paths; waiting for post-merge profiling risks immediate perf cliffs in real usage.
|
||||||
|
```diff
|
||||||
|
@@ ## 3h. Query-plan validation
|
||||||
|
-Candidate indexes (add only if EXPLAIN QUERY PLAN shows they're needed):
|
||||||
|
-- discussions(project_id, gitlab_discussion_id)
|
||||||
|
-- notes(discussion_id, created_at DESC, id DESC)
|
||||||
|
+Required baseline indexes for this feature:
|
||||||
|
+- discussions(project_id, gitlab_discussion_id)
|
||||||
|
+- notes(discussion_id, created_at DESC, id DESC)
|
||||||
|
+Keep other indexes conditional on EXPLAIN QUERY PLAN.
|
||||||
|
```
|
||||||
|
|
||||||
|
8. **Add schema versioning and remove contradictory rejected items**
|
||||||
|
Analysis: `robot-docs` contract drift is a long-term agent risk; explicit schema versions let clients fail safely. Also, rejected items currently contradict active sections, which creates implementation ambiguity.
|
||||||
|
```diff
|
||||||
|
@@ ## 4. Fix Robot-Docs Response Schemas
|
||||||
|
"meta": {"elapsed_ms": "int", ...}
|
||||||
|
+"meta": {"elapsed_ms":"int", ..., "schema_version":"string"}
|
||||||
|
+
|
||||||
|
+Schema version policy:
|
||||||
|
+- bump minor on additive fields
|
||||||
|
+- bump major on removals/renames
|
||||||
|
+- expose per-command versions in `robot-docs`
|
||||||
|
@@ ## Rejected Recommendations
|
||||||
|
-- Add `gitlab_note_id` to show-command note detail structs ... rejected ...
|
||||||
|
-- Add `gitlab_discussion_id` to show-command discussion detail structs ... rejected ...
|
||||||
|
-- Add `gitlab_project_id` to show-command discussion detail structs ... rejected ...
|
||||||
|
+Remove stale rejected entries that conflict with accepted workstreams in this plan iteration.
|
||||||
|
```
|
||||||
|
|
||||||
|
If you want, I can produce a fully rewritten iteration 5 plan document that applies all of the above edits cleanly end-to-end.
|
||||||
@@ -2,7 +2,7 @@
|
|||||||
plan: true
|
plan: true
|
||||||
title: ""
|
title: ""
|
||||||
status: iterating
|
status: iterating
|
||||||
iteration: 4
|
iteration: 5
|
||||||
target_iterations: 8
|
target_iterations: 8
|
||||||
beads_revision: 0
|
beads_revision: 0
|
||||||
related_plans: []
|
related_plans: []
|
||||||
@@ -52,8 +52,9 @@ output.
|
|||||||
### Field Filtering Guardrail
|
### Field Filtering Guardrail
|
||||||
|
|
||||||
In robot mode, `filter_fields` **MUST** force-include Bridge Contract fields even when the
|
In robot mode, `filter_fields` **MUST** force-include Bridge Contract fields even when the
|
||||||
caller passes a narrower `--fields` list. This prevents agents from accidentally stripping
|
caller passes a narrower `--fields` list. This applies at **all nesting levels**: both the
|
||||||
the identifiers they need for write operations.
|
top-level entity fields and nested sub-entities (e.g., notes inside `discussions --include-notes`).
|
||||||
|
This prevents agents from accidentally stripping the identifiers they need for write operations.
|
||||||
|
|
||||||
**Implementation**: Add a `BRIDGE_FIELDS` constant map per entity type. In `filter_fields()`,
|
**Implementation**: Add a `BRIDGE_FIELDS` constant map per entity type. In `filter_fields()`,
|
||||||
when operating in robot mode, union the caller's requested fields with the bridge set before
|
when operating in robot mode, union the caller's requested fields with the bridge set before
|
||||||
@@ -69,70 +70,127 @@ const BRIDGE_FIELDS_DISCUSSIONS: &[&str] = &[
|
|||||||
"project_path", "gitlab_project_id", "noteable_type", "parent_iid",
|
"project_path", "gitlab_project_id", "noteable_type", "parent_iid",
|
||||||
"gitlab_discussion_id",
|
"gitlab_discussion_id",
|
||||||
];
|
];
|
||||||
|
// Applied to nested notes within discussions --include-notes
|
||||||
|
const BRIDGE_FIELDS_DISCUSSION_NOTES: &[&str] = &[
|
||||||
|
"project_path", "gitlab_project_id", "noteable_type", "parent_iid",
|
||||||
|
"gitlab_discussion_id", "gitlab_note_id",
|
||||||
|
];
|
||||||
```
|
```
|
||||||
|
|
||||||
In `filter_fields`, when entity is `"notes"` or `"discussions"`, merge the bridge set into the
|
In `filter_fields`, when entity is `"notes"` or `"discussions"`, merge the bridge set into the
|
||||||
requested fields before filtering the JSON value. This is a ~5-line change to the existing
|
requested fields before filtering the JSON value. For `"discussions"`, also apply
|
||||||
function.
|
`BRIDGE_FIELDS_DISCUSSION_NOTES` to each element of the nested `notes` array. This is a ~10-line
|
||||||
|
change to the existing function.
|
||||||
|
|
||||||
|
### Snapshot Consistency (Cross-Cutting)
|
||||||
|
|
||||||
|
Multi-query commands (`handle_notes`, `handle_discussions`) **MUST** execute all their queries
|
||||||
|
within a single deferred read transaction. This guarantees snapshot consistency when a concurrent
|
||||||
|
sync/ingest is modifying the database.
|
||||||
|
|
||||||
|
**Transaction ownership lives in handlers, not query helpers.** Each handler opens one deferred
|
||||||
|
read transaction and passes it to query helpers. Query helpers accept `&Connection` (which
|
||||||
|
`Transaction` derefs to via `std::ops::Deref`) so they remain testable with plain connections
|
||||||
|
in unit tests. This avoids nested transaction edge cases and guarantees a single snapshot across
|
||||||
|
count + page + include-notes + serialization.
|
||||||
|
|
||||||
|
```rust
|
||||||
|
// In handle_notes / handle_discussions:
|
||||||
|
let tx = conn.transaction_with_behavior(rusqlite::TransactionBehavior::Deferred)?;
|
||||||
|
let result = query_notes(&tx, &filters, &config)?;
|
||||||
|
// ... serialize ...
|
||||||
|
tx.commit()?; // read-only, but closes cleanly
|
||||||
|
```
|
||||||
|
|
||||||
|
Query helpers keep their `conn: &Connection` signature — `Transaction<'_>` implements
|
||||||
|
`Deref<Target = Connection>`, so `&tx` coerces to `&Connection` at call sites.
|
||||||
|
|
||||||
### Ambiguity Guardrail
|
### Ambiguity Guardrail
|
||||||
|
|
||||||
When filtering by `gitlab_discussion_id` (on either `notes` or `discussions` commands) without
|
When filtering by `gitlab_discussion_id` (on either `notes` or `discussions` commands) without
|
||||||
`--project`, if the query matches discussions in multiple projects:
|
`--project`, if the query matches discussions in multiple projects:
|
||||||
- Return an `Ambiguous` error (exit code 18, matching existing convention)
|
- Return an `Ambiguous` error (exit code 18, matching existing convention)
|
||||||
- Include matching project paths in the error message
|
- Include matching project paths **and `gitlab_project_id`s** in a structured candidates list
|
||||||
- Suggest retry with `--project <path>`
|
- Suggest retry with `--project <path>`
|
||||||
|
|
||||||
**Implementation**: Run a **preflight distinct-project check** before the main list query
|
**Implementation**: Run a **scope-aware preflight distinct-project check** before the main list
|
||||||
executes its `LIMIT`. This is critical because a post-query check on the paginated result set
|
query executes its `LIMIT`. The preflight applies active scope filters (noteable_type, since,
|
||||||
can silently miss cross-project ambiguity when `LIMIT` truncates results to rows from a single
|
for_issue/for_mr) alongside the discussion ID check, so it won't produce false ambiguity when
|
||||||
project. The preflight query is cheap (hits the `gitlab_discussion_id` index, returns at most
|
other filters already narrow to one project. This is critical because a post-query check on the
|
||||||
a few rows) and eliminates non-deterministic write-targeting risk.
|
paginated result set can silently miss cross-project ambiguity when `LIMIT` truncates results to
|
||||||
|
rows from a single project. The preflight query is cheap (hits the `gitlab_discussion_id` index,
|
||||||
|
returns at most a few rows) and eliminates non-deterministic write-targeting risk.
|
||||||
|
|
||||||
```sql
|
```sql
|
||||||
-- Preflight ambiguity check (runs before main query)
|
-- Preflight ambiguity check (runs before main query, includes active scope filters)
|
||||||
SELECT DISTINCT p.path_with_namespace
|
SELECT DISTINCT p.path_with_namespace, p.gitlab_project_id
|
||||||
FROM discussions d
|
FROM discussions d
|
||||||
JOIN projects p ON p.id = d.project_id
|
JOIN projects p ON p.id = d.project_id
|
||||||
WHERE d.gitlab_discussion_id = ?
|
WHERE d.gitlab_discussion_id = ?
|
||||||
|
-- scope filters applied dynamically:
|
||||||
|
-- AND d.noteable_type = ? (when --noteable-type present)
|
||||||
|
-- AND d.merge_request_id = (SELECT ...) (when --for-mr present)
|
||||||
|
-- AND d.issue_id = (SELECT ...) (when --for-issue present)
|
||||||
LIMIT 3
|
LIMIT 3
|
||||||
```
|
```
|
||||||
|
|
||||||
If more than one project is found, return `LoreError::Ambiguous` (exit code 18) with the
|
If more than one project is found, return `LoreError::Ambiguous` (exit code 18) with structured
|
||||||
distinct project paths and suggestion to retry with `--project <path>`.
|
candidates for machine consumption:
|
||||||
|
|
||||||
```rust
|
```rust
|
||||||
// In query_notes / query_discussions, before executing the main query:
|
// In query_notes / query_discussions, before executing the main query:
|
||||||
if let Some(ref disc_id) = filters.gitlab_discussion_id {
|
if let Some(ref disc_id) = filters.gitlab_discussion_id {
|
||||||
if filters.project.is_none() {
|
if filters.project.is_none() {
|
||||||
let distinct_projects: Vec<String> = conn
|
let candidates: Vec<(String, i64)> = conn
|
||||||
.prepare(
|
.prepare(
|
||||||
"SELECT DISTINCT p.path_with_namespace \
|
"SELECT DISTINCT p.path_with_namespace, p.gitlab_project_id \
|
||||||
FROM discussions d \
|
FROM discussions d \
|
||||||
JOIN projects p ON p.id = d.project_id \
|
JOIN projects p ON p.id = d.project_id \
|
||||||
WHERE d.gitlab_discussion_id = ? \
|
WHERE d.gitlab_discussion_id = ? \
|
||||||
LIMIT 3"
|
LIMIT 3"
|
||||||
|
// Note: add scope filter clauses dynamically
|
||||||
)?
|
)?
|
||||||
.query_map([disc_id], |row| row.get(0))?
|
.query_map([disc_id], |row| Ok((row.get(0)?, row.get(1)?)))?
|
||||||
.collect::<std::result::Result<Vec<_>, _>>()?;
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
if distinct_projects.len() > 1 {
|
if candidates.len() > 1 {
|
||||||
return Err(LoreError::Ambiguous {
|
return Err(LoreError::Ambiguous {
|
||||||
message: format!(
|
message: format!(
|
||||||
"Discussion ID matches {} projects: {}. Use --project to disambiguate.",
|
"Discussion ID matches {} projects. Use --project to disambiguate.",
|
||||||
distinct_projects.len(),
|
candidates.len(),
|
||||||
distinct_projects.join(", ")
|
|
||||||
),
|
),
|
||||||
|
candidates: candidates.into_iter()
|
||||||
|
.map(|(path, id)| AmbiguousCandidate { project_path: path, gitlab_project_id: id })
|
||||||
|
.collect(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
In robot mode, the error serializes as:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"error": {
|
||||||
|
"code": "AMBIGUOUS",
|
||||||
|
"message": "Discussion ID matches 2 projects. Use --project to disambiguate.",
|
||||||
|
"candidates": [
|
||||||
|
{"project_path": "group/repo-a", "gitlab_project_id": 42},
|
||||||
|
{"project_path": "group/repo-b", "gitlab_project_id": 99}
|
||||||
|
],
|
||||||
|
"suggestion": "lore -J discussions --gitlab-discussion-id <id> --project <path>",
|
||||||
|
"actions": ["lore -J discussions --gitlab-discussion-id <id> --project group/repo-a"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This gives agents machine-actionable candidates: they can pick a project and retry immediately
|
||||||
|
without parsing free-text error messages.
|
||||||
|
|
||||||
#### 1h. Wrap `query_notes` in a read transaction
|
#### 1h. Wrap `query_notes` in a read transaction
|
||||||
|
|
||||||
Wrap the count query and page query in a deferred read transaction per the Snapshot Consistency
|
Per the Snapshot Consistency cross-cutting requirement, `handle_notes` opens a deferred read
|
||||||
cross-cutting requirement. See the Bridge Contract section for the pattern.
|
transaction and passes it to `query_notes`. See the Snapshot Consistency section for the pattern.
|
||||||
|
|
||||||
### Tests
|
### Tests
|
||||||
|
|
||||||
@@ -337,6 +395,7 @@ fn notes_ambiguous_gitlab_discussion_id_across_projects() {
|
|||||||
// (this can happen since IDs are per-project)
|
// (this can happen since IDs are per-project)
|
||||||
// Filter by gitlab_discussion_id without --project
|
// Filter by gitlab_discussion_id without --project
|
||||||
// Assert LoreError::Ambiguous is returned with both project paths
|
// Assert LoreError::Ambiguous is returned with both project paths
|
||||||
|
// Assert candidates include gitlab_project_id for machine consumption
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -352,6 +411,19 @@ fn notes_ambiguity_preflight_not_defeated_by_limit() {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Test 8: Ambiguity preflight respects scope filters (no false positives)
|
||||||
|
|
||||||
|
```rust
|
||||||
|
#[test]
|
||||||
|
fn notes_ambiguity_preflight_respects_scope_filters() {
|
||||||
|
let conn = create_test_db();
|
||||||
|
// Insert 2 projects, each with a discussion sharing the same gitlab_discussion_id
|
||||||
|
// But one is Issue-type and the other MergeRequest-type
|
||||||
|
// Filter by gitlab_discussion_id + --noteable-type MergeRequest (narrows to 1 project)
|
||||||
|
// Assert NO ambiguity error — scope filters disambiguate
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## 2. Add `gitlab_discussion_id` to Show Command Discussion Groups
|
## 2. Add `gitlab_discussion_id` to Show Command Discussion Groups
|
||||||
@@ -644,6 +716,9 @@ lore -J discussions --gitlab-discussion-id 6a9c1750b37d
|
|||||||
|
|
||||||
# List unresolved threads with latest 2 notes inline (fewer round-trips)
|
# List unresolved threads with latest 2 notes inline (fewer round-trips)
|
||||||
lore -J discussions --for-mr 99 --resolution unresolved --include-notes 2
|
lore -J discussions --for-mr 99 --resolution unresolved --include-notes 2
|
||||||
|
|
||||||
|
# Find discussions containing specific text
|
||||||
|
lore -J discussions --for-mr 99 --contains "prefer the approach"
|
||||||
```
|
```
|
||||||
|
|
||||||
### Response Schema
|
### Response Schema
|
||||||
@@ -801,6 +876,10 @@ pub struct DiscussionsArgs {
|
|||||||
#[arg(long, value_enum, help_heading = "Filters")]
|
#[arg(long, value_enum, help_heading = "Filters")]
|
||||||
pub noteable_type: Option<NoteableTypeFilter>,
|
pub noteable_type: Option<NoteableTypeFilter>,
|
||||||
|
|
||||||
|
/// Filter discussions whose notes contain text (case-insensitive LIKE match)
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub contains: Option<String>,
|
||||||
|
|
||||||
/// Include up to N latest notes per discussion (0 = none, default; clamped to 20)
|
/// Include up to N latest notes per discussion (0 = none, default; clamped to 20)
|
||||||
#[arg(long, default_value = "0", help_heading = "Output")]
|
#[arg(long, default_value = "0", help_heading = "Output")]
|
||||||
pub include_notes: usize,
|
pub include_notes: usize,
|
||||||
@@ -925,7 +1004,7 @@ The `included_note_count` is set to `notes.len()` and `has_more_notes` is set to
|
|||||||
`note_count > included_note_count` during the JSON conversion, providing per-discussion
|
`note_count > included_note_count` during the JSON conversion, providing per-discussion
|
||||||
truncation signals.
|
truncation signals.
|
||||||
|
|
||||||
#### 3c. SQL Query
|
#### 3c. SQL Query — Two-Phase Page-First Architecture
|
||||||
|
|
||||||
**File**: `src/cli/commands/list.rs`
|
**File**: `src/cli/commands/list.rs`
|
||||||
|
|
||||||
@@ -935,21 +1014,29 @@ pub fn query_discussions(
|
|||||||
filters: &DiscussionListFilters,
|
filters: &DiscussionListFilters,
|
||||||
config: &Config,
|
config: &Config,
|
||||||
) -> Result<DiscussionListResult> {
|
) -> Result<DiscussionListResult> {
|
||||||
// Wrap all queries in a deferred read transaction for snapshot consistency
|
// NOTE: Transaction is managed by the handler (handle_discussions).
|
||||||
let tx = conn.transaction_with_behavior(rusqlite::TransactionBehavior::Deferred)?;
|
// This function receives &Connection (which Transaction derefs to via `std::ops::Deref`).
|
||||||
|
|
||||||
// Preflight ambiguity check (if gitlab_discussion_id without project)
|
// Preflight ambiguity check (if gitlab_discussion_id without project)
|
||||||
// ... see Ambiguity Guardrail section ...
|
// ... see Ambiguity Guardrail section ...
|
||||||
|
|
||||||
// Main query + count query ...
|
// Phase 1: Filter + sort + LIMIT to get page IDs
|
||||||
// ... note expansion query (if include_notes > 0) ...
|
// Phase 2: Note rollups only for paged results
|
||||||
|
// Phase 3: Optional --include-notes expansion (separate query)
|
||||||
tx.commit()?;
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
Core query uses a CTE + ranked-notes rollup (window function) to avoid per-row correlated
|
The query uses a **two-phase page-first architecture** for scalability:
|
||||||
subqueries. The `ROW_NUMBER()` approach produces a single scan over the notes table, which
|
|
||||||
is more predictable than repeated LIMIT 1 sub-selects at scale (200K+ discussions):
|
1. **Phase 1** (`paged_discussions`): Apply all filters, sort, and LIMIT to produce just the
|
||||||
|
discussion IDs for the current page. This bounds the result set before any note scanning.
|
||||||
|
2. **Phase 2** (`ranked_notes` + `note_rollup`): Run note aggregation only for the paged
|
||||||
|
discussion IDs. This ensures note scanning is proportional to `--limit`, not to the total
|
||||||
|
filtered discussion count.
|
||||||
|
|
||||||
|
This architecture prevents the performance cliff that occurs on project-wide queries with
|
||||||
|
thousands of discussions: instead of scanning notes for all filtered discussions (potentially
|
||||||
|
200K+), we scan only for the 50 (or whatever `--limit` is) that will actually be returned.
|
||||||
|
|
||||||
```sql
|
```sql
|
||||||
WITH filtered_discussions AS (
|
WITH filtered_discussions AS (
|
||||||
@@ -961,6 +1048,14 @@ WITH filtered_discussions AS (
|
|||||||
JOIN projects p ON d.project_id = p.id
|
JOIN projects p ON d.project_id = p.id
|
||||||
{where_sql}
|
{where_sql}
|
||||||
),
|
),
|
||||||
|
-- Phase 1: Page-first — apply sort + LIMIT before note scanning
|
||||||
|
paged_discussions AS (
|
||||||
|
SELECT id
|
||||||
|
FROM filtered_discussions
|
||||||
|
ORDER BY COALESCE({sort_column}, 0) {order}, id {order}
|
||||||
|
LIMIT ?
|
||||||
|
),
|
||||||
|
-- Phase 2: Note rollups only for paged results
|
||||||
ranked_notes AS (
|
ranked_notes AS (
|
||||||
SELECT
|
SELECT
|
||||||
n.discussion_id,
|
n.discussion_id,
|
||||||
@@ -980,7 +1075,7 @@ ranked_notes AS (
|
|||||||
n.created_at, n.id
|
n.created_at, n.id
|
||||||
) AS rn_first_position
|
) AS rn_first_position
|
||||||
FROM notes n
|
FROM notes n
|
||||||
WHERE n.discussion_id IN (SELECT id FROM filtered_discussions)
|
WHERE n.discussion_id IN (SELECT id FROM paged_discussions)
|
||||||
),
|
),
|
||||||
note_rollup AS (
|
note_rollup AS (
|
||||||
SELECT
|
SELECT
|
||||||
@@ -1012,12 +1107,12 @@ SELECT
|
|||||||
nr.position_new_path,
|
nr.position_new_path,
|
||||||
nr.position_new_line
|
nr.position_new_line
|
||||||
FROM filtered_discussions fd
|
FROM filtered_discussions fd
|
||||||
|
JOIN paged_discussions pd ON fd.id = pd.id
|
||||||
JOIN projects p ON fd.project_id = p.id
|
JOIN projects p ON fd.project_id = p.id
|
||||||
LEFT JOIN issues i ON fd.issue_id = i.id
|
LEFT JOIN issues i ON fd.issue_id = i.id
|
||||||
LEFT JOIN merge_requests m ON fd.merge_request_id = m.id
|
LEFT JOIN merge_requests m ON fd.merge_request_id = m.id
|
||||||
LEFT JOIN note_rollup nr ON nr.discussion_id = fd.id
|
LEFT JOIN note_rollup nr ON nr.discussion_id = fd.id
|
||||||
ORDER BY COALESCE({sort_column}, 0) {order}, fd.id {order}
|
ORDER BY COALESCE({sort_column}, 0) {order}, fd.id {order}
|
||||||
LIMIT ?
|
|
||||||
```
|
```
|
||||||
|
|
||||||
**Dual window function rationale**: The `ranked_notes` CTE uses two separate `ROW_NUMBER()`
|
**Dual window function rationale**: The `ranked_notes` CTE uses two separate `ROW_NUMBER()`
|
||||||
@@ -1028,12 +1123,11 @@ displacing the first human author/body, and prevents a non-positioned note from
|
|||||||
the file location. The `MAX(CASE WHEN rn_xxx = 1 ...)` pattern extracts the correct value
|
the file location. The `MAX(CASE WHEN rn_xxx = 1 ...)` pattern extracts the correct value
|
||||||
from each independently-ranked sequence.
|
from each independently-ranked sequence.
|
||||||
|
|
||||||
**Performance rationale**: The CTE pre-filters discussions before joining notes. The
|
**Page-first scalability rationale**: The `paged_discussions` CTE applies LIMIT before note
|
||||||
`ranked_notes` CTE uses `ROW_NUMBER()` (a single pass over the notes index) instead of
|
scanning. For MR-scoped queries (50-200 discussions) the performance is equivalent to the
|
||||||
correlated `(SELECT ... LIMIT 1)` sub-selects per discussion. For MR-scoped queries
|
non-paged approach. For project-wide scans with thousands of discussions, the page-first
|
||||||
(50-200 discussions) the performance is equivalent. For project-wide scans with thousands
|
architecture avoids scanning notes for discussions that won't appear in the result, keeping
|
||||||
of discussions, the window function approach avoids repeated index probes and produces a
|
latency proportional to `--limit` rather than to the total filtered count.
|
||||||
more predictable query plan.
|
|
||||||
|
|
||||||
**Note on ordering**: The `COALESCE({sort_column}, 0)` with tiebreaker `fd.id` ensures
|
**Note on ordering**: The `COALESCE({sort_column}, 0)` with tiebreaker `fd.id` ensures
|
||||||
deterministic ordering even when timestamps are NULL (partial sync states). The `id`
|
deterministic ordering even when timestamps are NULL (partial sync states). The `id`
|
||||||
@@ -1042,6 +1136,10 @@ tiebreaker is cheap (primary key) and prevents unstable sort output.
|
|||||||
**Note on SQLite FILTER syntax**: SQLite does not support `COUNT(*) FILTER (WHERE ...)`.
|
**Note on SQLite FILTER syntax**: SQLite does not support `COUNT(*) FILTER (WHERE ...)`.
|
||||||
Use `SUM(CASE WHEN ... THEN 1 ELSE 0 END)` instead (as shown above).
|
Use `SUM(CASE WHEN ... THEN 1 ELSE 0 END)` instead (as shown above).
|
||||||
|
|
||||||
|
**Count query**: The total_count query runs separately against `filtered_discussions` (without
|
||||||
|
the LIMIT) using `SELECT COUNT(*) FROM filtered_discussions`. This is needed for `has_more`
|
||||||
|
metadata. The count uses the same filter CTEs but omits notes entirely.
|
||||||
|
|
||||||
#### 3c-ii. Note expansion query (--include-notes)
|
#### 3c-ii. Note expansion query (--include-notes)
|
||||||
|
|
||||||
When `include_notes > 0`, after the main discussion query, run a **single batched query**
|
When `include_notes > 0`, after the main discussion query, run a **single batched query**
|
||||||
@@ -1103,6 +1201,7 @@ pub struct DiscussionListFilters {
|
|||||||
pub since: Option<String>,
|
pub since: Option<String>,
|
||||||
pub path: Option<String>,
|
pub path: Option<String>,
|
||||||
pub noteable_type: Option<NoteableTypeFilter>,
|
pub noteable_type: Option<NoteableTypeFilter>,
|
||||||
|
pub contains: Option<String>,
|
||||||
pub sort: DiscussionSortField,
|
pub sort: DiscussionSortField,
|
||||||
pub order: SortDirection,
|
pub order: SortDirection,
|
||||||
pub include_notes: usize,
|
pub include_notes: usize,
|
||||||
@@ -1117,6 +1216,7 @@ Where-clause construction uses `match` on typed enums — never raw string inter
|
|||||||
- `since` → `d.first_note_at >= ?` (using `parse_since()`)
|
- `since` → `d.first_note_at >= ?` (using `parse_since()`)
|
||||||
- `path` → `EXISTS (SELECT 1 FROM notes n WHERE n.discussion_id = d.id AND n.position_new_path LIKE ?)`
|
- `path` → `EXISTS (SELECT 1 FROM notes n WHERE n.discussion_id = d.id AND n.position_new_path LIKE ?)`
|
||||||
- `noteable_type` → match: `Issue` → `d.noteable_type = 'Issue'`, `MergeRequest` → `d.noteable_type = 'MergeRequest'`
|
- `noteable_type` → match: `Issue` → `d.noteable_type = 'Issue'`, `MergeRequest` → `d.noteable_type = 'MergeRequest'`
|
||||||
|
- `contains` → `EXISTS (SELECT 1 FROM notes n WHERE n.discussion_id = d.id AND n.body LIKE '%' || ? || '%')`
|
||||||
|
|
||||||
#### 3e. Handler wiring
|
#### 3e. Handler wiring
|
||||||
|
|
||||||
@@ -1128,7 +1228,7 @@ Add match arm:
|
|||||||
Some(Commands::Discussions(args)) => handle_discussions(cli.config.as_deref(), args, robot_mode),
|
Some(Commands::Discussions(args)) => handle_discussions(cli.config.as_deref(), args, robot_mode),
|
||||||
```
|
```
|
||||||
|
|
||||||
Handler function:
|
Handler function (with transaction ownership):
|
||||||
|
|
||||||
```rust
|
```rust
|
||||||
fn handle_discussions(
|
fn handle_discussions(
|
||||||
@@ -1143,6 +1243,10 @@ fn handle_discussions(
|
|||||||
|
|
||||||
let effective_limit = args.limit.min(500);
|
let effective_limit = args.limit.min(500);
|
||||||
let effective_include_notes = args.include_notes.min(20);
|
let effective_include_notes = args.include_notes.min(20);
|
||||||
|
|
||||||
|
// Snapshot consistency: one transaction across all queries
|
||||||
|
let tx = conn.transaction_with_behavior(rusqlite::TransactionBehavior::Deferred)?;
|
||||||
|
|
||||||
let filters = DiscussionListFilters {
|
let filters = DiscussionListFilters {
|
||||||
limit: effective_limit,
|
limit: effective_limit,
|
||||||
project: args.project,
|
project: args.project,
|
||||||
@@ -1153,12 +1257,15 @@ fn handle_discussions(
|
|||||||
since: args.since,
|
since: args.since,
|
||||||
path: args.path,
|
path: args.path,
|
||||||
noteable_type: args.noteable_type,
|
noteable_type: args.noteable_type,
|
||||||
|
contains: args.contains,
|
||||||
sort: args.sort,
|
sort: args.sort,
|
||||||
order: args.order,
|
order: args.order,
|
||||||
include_notes: effective_include_notes,
|
include_notes: effective_include_notes,
|
||||||
};
|
};
|
||||||
|
|
||||||
let result = query_discussions(&conn, &filters, &config)?;
|
let result = query_discussions(&tx, &filters, &config)?;
|
||||||
|
|
||||||
|
tx.commit()?; // read-only, but closes cleanly
|
||||||
|
|
||||||
let format = if robot_mode && args.format == "table" {
|
let format = if robot_mode && args.format == "table" {
|
||||||
"json"
|
"json"
|
||||||
@@ -1247,7 +1354,7 @@ CSV view: all fields, following same pattern as `print_list_notes_csv`.
|
|||||||
.collect(),
|
.collect(),
|
||||||
```
|
```
|
||||||
|
|
||||||
#### 3h. Query-plan validation
|
#### 3h. Query-plan validation and indexes
|
||||||
|
|
||||||
Before merging the discussions command, capture `EXPLAIN QUERY PLAN` output for the three
|
Before merging the discussions command, capture `EXPLAIN QUERY PLAN` output for the three
|
||||||
primary query patterns:
|
primary query patterns:
|
||||||
@@ -1255,17 +1362,26 @@ primary query patterns:
|
|||||||
- `--project <path> --since 7d --sort last-note`
|
- `--project <path> --since 7d --sort last-note`
|
||||||
- `--gitlab-discussion-id <id>`
|
- `--gitlab-discussion-id <id>`
|
||||||
|
|
||||||
If plans show table scans on `notes` or `discussions` for these patterns, add targeted indexes
|
**Required baseline index** (directly hit by `--include-notes` expansion, which runs a
|
||||||
to the `MIGRATIONS` array in `src/core/db.rs`:
|
`ROW_NUMBER() OVER (PARTITION BY discussion_id ORDER BY created_at DESC, id DESC)` window
|
||||||
|
on the notes table):
|
||||||
|
|
||||||
**Candidate indexes** (add only if EXPLAIN QUERY PLAN shows they're needed):
|
```sql
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_notes_discussion_created_desc
|
||||||
|
ON notes(discussion_id, created_at DESC, id DESC);
|
||||||
|
```
|
||||||
|
|
||||||
|
This index is non-negotiable because the include-notes expansion query's performance is
|
||||||
|
directly proportional to how efficiently it can scan notes per discussion. Without it, SQLite
|
||||||
|
falls back to a full table scan of the 282K-row notes table for each batch.
|
||||||
|
|
||||||
|
**Conditional indexes** (add only if EXPLAIN QUERY PLAN shows they're needed):
|
||||||
- `discussions(project_id, gitlab_discussion_id)` — for ambiguity preflight + direct ID lookup
|
- `discussions(project_id, gitlab_discussion_id)` — for ambiguity preflight + direct ID lookup
|
||||||
- `discussions(merge_request_id, last_note_at, id)` — for MR-scoped + sorted queries
|
- `discussions(merge_request_id, last_note_at, id)` — for MR-scoped + sorted queries
|
||||||
- `notes(discussion_id, created_at DESC, id DESC)` — for `--include-notes` expansion
|
|
||||||
- `notes(discussion_id, is_system, created_at, id)` — for ranked_notes CTE ordering
|
- `notes(discussion_id, is_system, created_at, id)` — for ranked_notes CTE ordering
|
||||||
|
|
||||||
This is a measured approach: profile first, add indexes only where the query plan demands them.
|
This is a measured approach: one required index for the critical new path, remaining indexes
|
||||||
No speculative index creation.
|
added only where the query plan demands them.
|
||||||
|
|
||||||
### Tests
|
### Tests
|
||||||
|
|
||||||
@@ -1500,7 +1616,7 @@ fn discussions_ambiguous_gitlab_discussion_id_across_projects() {
|
|||||||
};
|
};
|
||||||
let result = query_discussions(&conn, &filters, &Config::default());
|
let result = query_discussions(&conn, &filters, &Config::default());
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
// Error should be Ambiguous with both project paths
|
// Error should be Ambiguous with both project paths and gitlab_project_ids
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -1579,6 +1695,99 @@ fn discussions_first_note_rollup_skips_system_notes() {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Test 15: --contains filter returns matching discussions
|
||||||
|
|
||||||
|
```rust
|
||||||
|
#[test]
|
||||||
|
fn query_discussions_contains_filter() {
|
||||||
|
let conn = create_test_db();
|
||||||
|
insert_project(&conn, 1);
|
||||||
|
insert_mr(&conn, 1, 1, 99, "Test MR");
|
||||||
|
insert_discussion(&conn, 1, "disc-match", 1, None, Some(1), "MergeRequest");
|
||||||
|
insert_discussion(&conn, 2, "disc-nomatch", 1, None, Some(1), "MergeRequest");
|
||||||
|
insert_note_in_discussion(&conn, 1, 500, 1, 1, "alice", "I really do prefer this approach");
|
||||||
|
insert_note_in_discussion(&conn, 2, 501, 2, 1, "bob", "Looks good to me");
|
||||||
|
|
||||||
|
let filters = DiscussionListFilters {
|
||||||
|
contains: Some("really do prefer".to_string()),
|
||||||
|
..DiscussionListFilters::default_for_mr(99)
|
||||||
|
};
|
||||||
|
let result = query_discussions(&conn, &filters, &Config::default()).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(result.discussions.len(), 1);
|
||||||
|
assert_eq!(result.discussions[0].gitlab_discussion_id, "disc-match");
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Test 16: Nested note bridge fields survive --fields filtering in robot mode
|
||||||
|
|
||||||
|
```rust
|
||||||
|
#[test]
|
||||||
|
fn discussions_nested_note_bridge_fields_forced_in_robot_mode() {
|
||||||
|
// When discussions --include-notes returns nested notes,
|
||||||
|
// bridge fields on nested notes must survive --fields filtering
|
||||||
|
let mut value = serde_json::json!({
|
||||||
|
"data": {
|
||||||
|
"discussions": [{
|
||||||
|
"gitlab_discussion_id": "abc",
|
||||||
|
"noteable_type": "MergeRequest",
|
||||||
|
"parent_iid": 99,
|
||||||
|
"project_path": "group/repo",
|
||||||
|
"gitlab_project_id": 42,
|
||||||
|
"note_count": 1,
|
||||||
|
"notes": [{
|
||||||
|
"body": "test note",
|
||||||
|
"project_path": "group/repo",
|
||||||
|
"gitlab_project_id": 42,
|
||||||
|
"noteable_type": "MergeRequest",
|
||||||
|
"parent_iid": 99,
|
||||||
|
"gitlab_discussion_id": "abc",
|
||||||
|
"gitlab_note_id": 500
|
||||||
|
}]
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Agent requests only "body" on notes — bridge fields must still appear
|
||||||
|
filter_fields_robot(
|
||||||
|
&mut value,
|
||||||
|
"discussions",
|
||||||
|
&["note_count".to_string()],
|
||||||
|
);
|
||||||
|
|
||||||
|
let note = &value["data"]["discussions"][0]["notes"][0];
|
||||||
|
assert!(note.get("gitlab_discussion_id").is_some());
|
||||||
|
assert!(note.get("gitlab_note_id").is_some());
|
||||||
|
assert!(note.get("gitlab_project_id").is_some());
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Test 17: Ambiguity preflight respects scope filters (no false positives)
|
||||||
|
|
||||||
|
```rust
|
||||||
|
#[test]
|
||||||
|
fn discussions_ambiguity_preflight_respects_scope_filters() {
|
||||||
|
let conn = create_test_db();
|
||||||
|
insert_project(&conn, 1); // "group/repo-a"
|
||||||
|
insert_project(&conn, 2); // "group/repo-b"
|
||||||
|
// Same gitlab_discussion_id in both projects
|
||||||
|
// But different noteable_types
|
||||||
|
insert_discussion(&conn, 1, "shared-id", 1, Some(1), None, "Issue");
|
||||||
|
insert_discussion(&conn, 2, "shared-id", 2, None, Some(1), "MergeRequest");
|
||||||
|
|
||||||
|
// Filter by noteable_type narrows to one project — should NOT fire ambiguity
|
||||||
|
let filters = DiscussionListFilters {
|
||||||
|
gitlab_discussion_id: Some("shared-id".to_string()),
|
||||||
|
noteable_type: Some(NoteableTypeFilter::MergeRequest),
|
||||||
|
project: None,
|
||||||
|
..DiscussionListFilters::default()
|
||||||
|
};
|
||||||
|
let result = query_discussions(&conn, &filters, &Config::default());
|
||||||
|
assert!(result.is_ok());
|
||||||
|
assert_eq!(result.unwrap().discussions.len(), 1);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## 4. Fix Robot-Docs Response Schemas
|
## 4. Fix Robot-Docs Response Schemas
|
||||||
@@ -1629,6 +1838,7 @@ With:
|
|||||||
"--since <period>",
|
"--since <period>",
|
||||||
"--path <filepath>",
|
"--path <filepath>",
|
||||||
"--noteable-type <Issue|MergeRequest>",
|
"--noteable-type <Issue|MergeRequest>",
|
||||||
|
"--contains <text>",
|
||||||
"--include-notes <N>",
|
"--include-notes <N>",
|
||||||
"--sort <first-note|last-note>",
|
"--sort <first-note|last-note>",
|
||||||
"--order <asc|desc>",
|
"--order <asc|desc>",
|
||||||
@@ -1831,14 +2041,13 @@ Changes 1 and 2 can be done in parallel. Change 4 must come last since it docume
|
|||||||
final schema of all preceding changes.
|
final schema of all preceding changes.
|
||||||
|
|
||||||
**Cross-cutting**: The Bridge Contract field guardrail (force-including bridge fields in robot
|
**Cross-cutting**: The Bridge Contract field guardrail (force-including bridge fields in robot
|
||||||
mode) should be implemented as part of Change 1, since it modifies `filter_fields` in
|
mode, including nested notes) should be implemented as part of Change 1, since it modifies
|
||||||
`robot.rs` which all subsequent changes depend on. The `BRIDGE_FIELDS_*` constants are defined
|
`filter_fields` in `robot.rs` which all subsequent changes depend on. The `BRIDGE_FIELDS_*`
|
||||||
once and reused by Changes 3 and 4.
|
constants are defined once and reused by Changes 3 and 4.
|
||||||
|
|
||||||
**Cross-cutting**: The snapshot consistency pattern (deferred read transaction) should be
|
**Cross-cutting**: The snapshot consistency pattern (deferred read transaction in handlers)
|
||||||
implemented in Change 1 for `query_notes` and carried forward to Change 3 for
|
should be implemented in Change 1 for `handle_notes` and carried forward to Change 3 for
|
||||||
`query_discussions`. This is a one-line wrapper that provides correctness guarantees with
|
`handle_discussions`. Transaction ownership lives in handlers; query helpers accept `&Connection`.
|
||||||
zero performance cost.
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -1850,40 +2059,52 @@ After all changes:
|
|||||||
`gitlab_discussion_id`, `gitlab_note_id`, and `gitlab_project_id` in the response
|
`gitlab_discussion_id`, `gitlab_note_id`, and `gitlab_project_id` in the response
|
||||||
2. An agent can run `lore -J discussions --for-mr 3929 --resolution unresolved` to see all
|
2. An agent can run `lore -J discussions --for-mr 3929 --resolution unresolved` to see all
|
||||||
open threads with their IDs
|
open threads with their IDs
|
||||||
3. An agent can run `lore -J mrs 3929` and see `gitlab_discussion_id`, `resolvable`,
|
3. An agent can run `lore -J discussions --for-mr 3929 --contains "prefer the approach"` to
|
||||||
|
find threads by text content without a separate `notes` round-trip
|
||||||
|
4. An agent can run `lore -J mrs 3929` and see `gitlab_discussion_id`, `resolvable`,
|
||||||
`resolved`, and `last_note_at_iso` on each discussion group, plus `gitlab_note_id` on
|
`resolved`, and `last_note_at_iso` on each discussion group, plus `gitlab_note_id` on
|
||||||
each note within
|
each note within
|
||||||
4. `lore robot-docs` lists actual field names for all commands
|
5. `lore robot-docs` lists actual field names for all commands
|
||||||
5. All existing tests still pass
|
6. All existing tests still pass
|
||||||
6. No clippy warnings (pedantic + nursery)
|
7. No clippy warnings (pedantic + nursery)
|
||||||
7. Robot-docs contract tests pass with field-set parity (not just string-contains), preventing
|
8. Robot-docs contract tests pass with field-set parity (not just string-contains), preventing
|
||||||
future schema drift in both directions
|
future schema drift in both directions
|
||||||
8. Bridge Contract fields (`project_path`, `gitlab_project_id`, `noteable_type`, `parent_iid`,
|
9. Bridge Contract fields (`project_path`, `gitlab_project_id`, `noteable_type`, `parent_iid`,
|
||||||
`gitlab_discussion_id`, `gitlab_note_id`) are present in every applicable read payload
|
`gitlab_discussion_id`, `gitlab_note_id`) are present in every applicable read payload
|
||||||
9. Bridge Contract fields survive `--fields` filtering in robot mode (guardrail enforced)
|
10. Bridge Contract fields survive `--fields` filtering in robot mode (guardrail enforced),
|
||||||
10. `--gitlab-discussion-id` filter works on both `notes` and `discussions` commands
|
including nested notes within `discussions --include-notes`
|
||||||
11. `--include-notes N` populates inline notes on `discussions` output via single batched query
|
11. `--gitlab-discussion-id` filter works on both `notes` and `discussions` commands
|
||||||
12. CLI-level contract integration tests verify bridge fields through the full handler path
|
12. `--include-notes N` populates inline notes on `discussions` output via single batched query
|
||||||
13. `gitlab_note_id` is available in notes list output (alongside `gitlab_id` for back-compat)
|
13. CLI-level contract integration tests verify bridge fields through the full handler path
|
||||||
|
14. `gitlab_note_id` is available in notes list output (alongside `gitlab_id` for back-compat)
|
||||||
and in show detail notes, providing a uniform field name across all commands
|
and in show detail notes, providing a uniform field name across all commands
|
||||||
14. Ambiguity guardrail fires when `--gitlab-discussion-id` matches multiple projects without
|
15. Ambiguity guardrail fires when `--gitlab-discussion-id` matches multiple projects without
|
||||||
`--project` specified — **including when LIMIT would have hidden the ambiguity** (preflight
|
`--project` specified — **including when LIMIT would have hidden the ambiguity** (preflight
|
||||||
query runs before LIMIT)
|
query runs before LIMIT). Error includes structured candidates with `gitlab_project_id`
|
||||||
15. Output guardrails clamp `--limit` to 500 and `--include-notes` to 20; `meta` reports
|
for machine consumption
|
||||||
|
16. Ambiguity preflight is scope-aware: active filters (noteable_type, for_issue/for_mr) are
|
||||||
|
applied alongside the discussion ID check, preventing false ambiguity when scope already
|
||||||
|
narrows to one project
|
||||||
|
17. Output guardrails clamp `--limit` to 500 and `--include-notes` to 20; `meta` reports
|
||||||
effective values and `has_more` truncation flag
|
effective values and `has_more` truncation flag
|
||||||
16. Discussion and show queries use deterministic ordering (COALESCE + id tiebreaker) to
|
18. Discussion and show queries use deterministic ordering (COALESCE + id tiebreaker) to
|
||||||
prevent unstable output during partial sync states
|
prevent unstable output during partial sync states
|
||||||
17. Per-discussion truncation signals (`included_note_count`, `has_more_notes`) are accurate
|
19. Per-discussion truncation signals (`included_note_count`, `has_more_notes`) are accurate
|
||||||
for `--include-notes` output
|
for `--include-notes` output
|
||||||
18. Multi-query commands (`query_notes`, `query_discussions`) use deferred read transactions
|
20. Multi-query handlers (`handle_notes`, `handle_discussions`) open deferred read transactions;
|
||||||
for snapshot consistency during concurrent ingest
|
query helpers accept `&Connection` for snapshot consistency and testability
|
||||||
19. Discussion filters (`resolution`, `noteable_type`, `sort`, `order`) use typed enums
|
21. Discussion filters (`resolution`, `noteable_type`, `sort`, `order`) use typed enums
|
||||||
with match-to-SQL mapping — no raw string interpolation in query construction
|
with match-to-SQL mapping — no raw string interpolation in query construction
|
||||||
20. First-note rollup correctly handles discussions with leading system notes — `first_author`
|
22. First-note rollup correctly handles discussions with leading system notes — `first_author`
|
||||||
and `first_note_body_snippet` always reflect the first non-system note
|
and `first_note_body_snippet` always reflect the first non-system note
|
||||||
21. Query plans for primary discussion query patterns (`--for-mr`, `--project --since`,
|
23. Query plans for primary discussion query patterns (`--for-mr`, `--project --since`,
|
||||||
`--gitlab-discussion-id`) have been validated via EXPLAIN QUERY PLAN; targeted indexes
|
`--gitlab-discussion-id`) have been validated via EXPLAIN QUERY PLAN; targeted indexes
|
||||||
added only where scans were observed
|
added only where scans were observed
|
||||||
|
24. The `notes(discussion_id, created_at DESC, id DESC)` index is present for `--include-notes`
|
||||||
|
expansion performance
|
||||||
|
25. Discussion query uses page-first CTE architecture: note rollups scan only the paged result
|
||||||
|
set, not all filtered discussions, keeping latency proportional to `--limit`
|
||||||
|
26. `--contains` filter on `discussions` returns only discussions with matching note text
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -1902,6 +2123,6 @@ After all changes:
|
|||||||
- **`--with-write-hints` flag for inline glab endpoint templates** — rejected because this couples lore's read surface to glab's API surface, violating the read/write split principle. The Bridge Contract gives agents the raw identifiers; constructing glab commands is the agent's responsibility. Adding endpoint templates would require lore to track glab API changes, creating an unnecessary maintenance burden.
|
- **`--with-write-hints` flag for inline glab endpoint templates** — rejected because this couples lore's read surface to glab's API surface, violating the read/write split principle. The Bridge Contract gives agents the raw identifiers; constructing glab commands is the agent's responsibility. Adding endpoint templates would require lore to track glab API changes, creating an unnecessary maintenance burden.
|
||||||
- **Show-command note ordering change (`ORDER BY COALESCE(position, ...), created_at, id`)** — rejected because show-command note ordering within a discussion thread is out of scope for this plan. The existing ordering works correctly for present data; the defensive COALESCE pattern is applied to discussion-level ordering where it matters for agent workflows.
|
- **Show-command note ordering change (`ORDER BY COALESCE(position, ...), created_at, id`)** — rejected because show-command note ordering within a discussion thread is out of scope for this plan. The existing ordering works correctly for present data; the defensive COALESCE pattern is applied to discussion-level ordering where it matters for agent workflows.
|
||||||
- **Query-plan validation as a separate numbered workstream** — rejected because it adds delivery overhead without proportional benefit. Query-plan validation is integrated into workstream 3 as a pre-merge validation step (section 3h), with candidate indexes listed but only added when EXPLAIN QUERY PLAN shows they're needed. This keeps the measured approach without inflating the workstream count.
|
- **Query-plan validation as a separate numbered workstream** — rejected because it adds delivery overhead without proportional benefit. Query-plan validation is integrated into workstream 3 as a pre-merge validation step (section 3h), with candidate indexes listed but only added when EXPLAIN QUERY PLAN shows they're needed. This keeps the measured approach without inflating the workstream count.
|
||||||
- **Add `gitlab_note_id` to show-command note detail structs** — rejected because show-command note detail structs already have `gitlab_id` (same value as `id`). The field is unambiguous and consistent with the Bridge Contract. Adding `gitlab_note_id` would create a duplicate and increase payload size without benefit.
|
- **`--project-id` immutable input filter across notes/discussions/show** — rejected because this is scope creep touching every command and changing CLI ergonomics. Agents already get `gitlab_project_id` in output to construct API calls; the input-side concern (project renames breaking `--project`) is theoretical and hasn't been observed in practice. The `--project` flag already supports fuzzy matching which handles most rename scenarios. If real-world evidence surfaces, this can be added later without breaking changes.
|
||||||
- **Add `gitlab_discussion_id` to show-command discussion detail structs** — rejected because show-command discussion detail structs already have `gitlab_discussion_id`. The field is unambiguous and consistent with the Bridge Contract. Adding `gitlab_discussion_id` would create a duplicate and increase payload size without benefit.
|
- **Schema versioning in robot-docs (`schema_version` field + semver policy)** — rejected because this tool has zero external consumers beyond our own agents, and the contract tests (field-set parity assertions) catch drift at compile time. Schema versioning adds bureaucratic overhead (version bumps, compatibility matrices, deprecation policies) without proportional benefit for an internal tool in early development. If lore gains external consumers, this can be reconsidered.
|
||||||
- **Add `gitlab_project_id` to show-command discussion detail structs** — rejected because show-command discussion detail structs already have `gitlab_project_id`. The field is unambiguous and consistent with the Bridge Contract. Adding `gitlab_project_id` would create a duplicate and increase payload size without benefit.
|
- **Remove "stale" rejected items that "conflict" with active sections** — rejected because the prior entries about show-command structs were stale from iteration 2 and have been cleaned up independently. The rejected section is cumulative by design — it prevents future reviewers from re-proposing changes that have already been evaluated.
|
||||||
|
|||||||
@@ -25,6 +25,7 @@ pub enum CorrectionRule {
|
|||||||
ValueNormalization,
|
ValueNormalization,
|
||||||
ValueFuzzy,
|
ValueFuzzy,
|
||||||
FlagPrefix,
|
FlagPrefix,
|
||||||
|
NoColorExpansion,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Result of the correction pass over raw args.
|
/// Result of the correction pass over raw args.
|
||||||
@@ -128,6 +129,7 @@ const COMMAND_FLAGS: &[(&str, &[&str])] = &[
|
|||||||
"--dry-run",
|
"--dry-run",
|
||||||
"--no-dry-run",
|
"--no-dry-run",
|
||||||
"--timings",
|
"--timings",
|
||||||
|
"--lock",
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
@@ -193,6 +195,7 @@ const COMMAND_FLAGS: &[(&str, &[&str])] = &[
|
|||||||
"--as-of",
|
"--as-of",
|
||||||
"--explain-score",
|
"--explain-score",
|
||||||
"--include-bots",
|
"--include-bots",
|
||||||
|
"--include-closed",
|
||||||
"--all-history",
|
"--all-history",
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
@@ -202,7 +205,6 @@ const COMMAND_FLAGS: &[(&str, &[&str])] = &[
|
|||||||
&[
|
&[
|
||||||
"--limit",
|
"--limit",
|
||||||
"--fields",
|
"--fields",
|
||||||
"--format",
|
|
||||||
"--author",
|
"--author",
|
||||||
"--note-type",
|
"--note-type",
|
||||||
"--contains",
|
"--contains",
|
||||||
@@ -423,9 +425,21 @@ pub fn correct_args(raw: Vec<String>, strict: bool) -> CorrectionResult {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let Some(fixed) = try_correct(&arg, &valid, strict) {
|
if let Some(fixed) = try_correct(&arg, &valid, strict) {
|
||||||
let s = fixed.corrected.clone();
|
if fixed.rule == CorrectionRule::NoColorExpansion {
|
||||||
corrections.push(fixed);
|
// Expand --no-color → --color never
|
||||||
corrected.push(s);
|
corrections.push(Correction {
|
||||||
|
original: fixed.original,
|
||||||
|
corrected: "--color never".to_string(),
|
||||||
|
rule: CorrectionRule::NoColorExpansion,
|
||||||
|
confidence: 1.0,
|
||||||
|
});
|
||||||
|
corrected.push("--color".to_string());
|
||||||
|
corrected.push("never".to_string());
|
||||||
|
} else {
|
||||||
|
let s = fixed.corrected.clone();
|
||||||
|
corrections.push(fixed);
|
||||||
|
corrected.push(s);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
corrected.push(arg);
|
corrected.push(arg);
|
||||||
}
|
}
|
||||||
@@ -610,12 +624,27 @@ const CLAP_BUILTINS: &[&str] = &["--help", "--version"];
|
|||||||
///
|
///
|
||||||
/// When `strict` is true, fuzzy matching is disabled — only deterministic
|
/// When `strict` is true, fuzzy matching is disabled — only deterministic
|
||||||
/// corrections (single-dash fix, case normalization) are applied.
|
/// corrections (single-dash fix, case normalization) are applied.
|
||||||
|
///
|
||||||
|
/// Special case: `--no-color` is rewritten to `--color never` by returning
|
||||||
|
/// the `--color` correction and letting the caller handle arg insertion.
|
||||||
|
/// However, since we correct one arg at a time, we use `NoColorExpansion`
|
||||||
|
/// to signal that the next phase should insert `never` after this arg.
|
||||||
fn try_correct(arg: &str, valid_flags: &[&str], strict: bool) -> Option<Correction> {
|
fn try_correct(arg: &str, valid_flags: &[&str], strict: bool) -> Option<Correction> {
|
||||||
// Only attempt correction on flag-like args (starts with `-`)
|
// Only attempt correction on flag-like args (starts with `-`)
|
||||||
if !arg.starts_with('-') {
|
if !arg.starts_with('-') {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Special case: --no-color → --color never (common agent/user expectation)
|
||||||
|
if arg.eq_ignore_ascii_case("--no-color") {
|
||||||
|
return Some(Correction {
|
||||||
|
original: arg.to_string(),
|
||||||
|
corrected: "--no-color".to_string(), // sentinel; expanded in correct_args
|
||||||
|
rule: CorrectionRule::NoColorExpansion,
|
||||||
|
confidence: 1.0,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
// B2: Never correct clap built-in flags (--help, --version)
|
// B2: Never correct clap built-in flags (--help, --version)
|
||||||
let flag_part_for_builtin = if let Some(eq_pos) = arg.find('=') {
|
let flag_part_for_builtin = if let Some(eq_pos) = arg.find('=') {
|
||||||
&arg[..eq_pos]
|
&arg[..eq_pos]
|
||||||
@@ -765,9 +794,21 @@ fn try_correct(arg: &str, valid_flags: &[&str], strict: bool) -> Option<Correcti
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Find the best fuzzy match among valid flags for a given (lowercased) input.
|
/// Find the best fuzzy match among valid flags for a given (lowercased) input.
|
||||||
|
///
|
||||||
|
/// Applies a length guard to prevent short candidates (e.g. `--for`, 5 chars
|
||||||
|
/// including dashes) from inflating Jaro-Winkler scores against long inputs.
|
||||||
|
/// When the input is more than 40% longer than a candidate, that candidate is
|
||||||
|
/// excluded from fuzzy consideration (it can still match via prefix rule).
|
||||||
fn best_fuzzy_match<'a>(input: &str, valid_flags: &[&'a str]) -> Option<(&'a str, f64)> {
|
fn best_fuzzy_match<'a>(input: &str, valid_flags: &[&'a str]) -> Option<(&'a str, f64)> {
|
||||||
valid_flags
|
valid_flags
|
||||||
.iter()
|
.iter()
|
||||||
|
.filter(|&&flag| {
|
||||||
|
// Guard: skip short candidates when input is much longer.
|
||||||
|
// e.g. "--foobar" (8 chars) should not fuzzy-match "--for" (5 chars)
|
||||||
|
// Ratio: input must be within 1.4x the candidate length.
|
||||||
|
let max_input_len = (flag.len() as f64 * 1.4) as usize;
|
||||||
|
input.len() <= max_input_len
|
||||||
|
})
|
||||||
.map(|&flag| (flag, jaro_winkler(input, flag)))
|
.map(|&flag| (flag, jaro_winkler(input, flag)))
|
||||||
.max_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal))
|
.max_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal))
|
||||||
}
|
}
|
||||||
@@ -845,6 +886,9 @@ pub fn format_teaching_note(correction: &Correction) -> String {
|
|||||||
correction.corrected, correction.original
|
correction.corrected, correction.original
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
CorrectionRule::NoColorExpansion => {
|
||||||
|
"Use `--color never` instead of `--no-color`".to_string()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1285,6 +1329,53 @@ mod tests {
|
|||||||
assert!(note.contains("full flag name"));
|
assert!(note.contains("full flag name"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ---- --no-color expansion ----
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn no_color_expands_to_color_never() {
|
||||||
|
let result = correct_args(args("lore --no-color health"), false);
|
||||||
|
assert_eq!(result.corrections.len(), 1);
|
||||||
|
assert_eq!(result.corrections[0].rule, CorrectionRule::NoColorExpansion);
|
||||||
|
assert_eq!(result.args, args("lore --color never health"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn no_color_case_insensitive() {
|
||||||
|
let result = correct_args(args("lore --No-Color issues"), false);
|
||||||
|
assert_eq!(result.corrections.len(), 1);
|
||||||
|
assert_eq!(result.args, args("lore --color never issues"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn no_color_with_robot_mode() {
|
||||||
|
let result = correct_args(args("lore --robot --no-color health"), true);
|
||||||
|
assert_eq!(result.corrections.len(), 1);
|
||||||
|
assert_eq!(result.args, args("lore --robot --color never health"));
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- Fuzzy matching length guard ----
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn foobar_does_not_match_for() {
|
||||||
|
// --foobar (8 chars) should NOT fuzzy-match --for (5 chars)
|
||||||
|
let result = correct_args(args("lore count --foobar issues"), false);
|
||||||
|
assert!(
|
||||||
|
!result.corrections.iter().any(|c| c.corrected == "--for"),
|
||||||
|
"expected --foobar not to match --for"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn fro_still_matches_for() {
|
||||||
|
// --fro (5 chars) is short enough to fuzzy-match --for (5 chars)
|
||||||
|
// and also qualifies as a prefix match
|
||||||
|
let result = correct_args(args("lore count --fro issues"), false);
|
||||||
|
assert!(
|
||||||
|
result.corrections.iter().any(|c| c.corrected == "--for"),
|
||||||
|
"expected --fro to match --for"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
// ---- Post-clap suggestion helpers ----
|
// ---- Post-clap suggestion helpers ----
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|||||||
@@ -257,7 +257,10 @@ pub fn print_event_count_json(counts: &EventCounts, elapsed_ms: u64) {
|
|||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta { elapsed_ms },
|
||||||
};
|
};
|
||||||
|
|
||||||
println!("{}", serde_json::to_string(&output).unwrap());
|
match serde_json::to_string(&output) {
|
||||||
|
Ok(json) => println!("{json}"),
|
||||||
|
Err(e) => eprintln!("Error serializing to JSON: {e}"),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_event_count(counts: &EventCounts) {
|
pub fn print_event_count(counts: &EventCounts) {
|
||||||
@@ -325,7 +328,10 @@ pub fn print_count_json(result: &CountResult, elapsed_ms: u64) {
|
|||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta { elapsed_ms },
|
||||||
};
|
};
|
||||||
|
|
||||||
println!("{}", serde_json::to_string(&output).unwrap());
|
match serde_json::to_string(&output) {
|
||||||
|
Ok(json) => println!("{json}"),
|
||||||
|
Err(e) => eprintln!("Error serializing to JSON: {e}"),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_count(result: &CountResult) {
|
pub fn print_count(result: &CountResult) {
|
||||||
|
|||||||
292
src/cli/commands/cron.rs
Normal file
292
src/cli/commands/cron.rs
Normal file
@@ -0,0 +1,292 @@
|
|||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
|
use crate::Config;
|
||||||
|
use crate::cli::render::Theme;
|
||||||
|
use crate::cli::robot::RobotMeta;
|
||||||
|
use crate::core::cron::{
|
||||||
|
CronInstallResult, CronStatusResult, CronUninstallResult, cron_status, install_cron,
|
||||||
|
uninstall_cron,
|
||||||
|
};
|
||||||
|
use crate::core::db::create_connection;
|
||||||
|
use crate::core::error::Result;
|
||||||
|
use crate::core::paths::get_db_path;
|
||||||
|
use crate::core::time::ms_to_iso;
|
||||||
|
|
||||||
|
// ── install ──
|
||||||
|
|
||||||
|
pub fn run_cron_install(interval_minutes: u32) -> Result<CronInstallResult> {
|
||||||
|
install_cron(interval_minutes)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_cron_install(result: &CronInstallResult) {
|
||||||
|
if result.replaced {
|
||||||
|
println!(
|
||||||
|
" {} cron entry updated (was already installed)",
|
||||||
|
Theme::success().render("Updated")
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
println!(
|
||||||
|
" {} cron entry installed",
|
||||||
|
Theme::success().render("Installed")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
println!();
|
||||||
|
println!(" {} {}", Theme::dim().render("entry:"), result.entry);
|
||||||
|
println!(
|
||||||
|
" {} every {} minutes",
|
||||||
|
Theme::dim().render("interval:"),
|
||||||
|
result.interval_minutes
|
||||||
|
);
|
||||||
|
println!(
|
||||||
|
" {} {}",
|
||||||
|
Theme::dim().render("log:"),
|
||||||
|
result.log_path.display()
|
||||||
|
);
|
||||||
|
|
||||||
|
if cfg!(target_os = "macos") {
|
||||||
|
println!();
|
||||||
|
println!(
|
||||||
|
" {} On macOS, the terminal running cron may need",
|
||||||
|
Theme::warning().render("Note:")
|
||||||
|
);
|
||||||
|
println!(" Full Disk Access in System Settings > Privacy & Security.");
|
||||||
|
}
|
||||||
|
println!();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct CronInstallJson {
|
||||||
|
ok: bool,
|
||||||
|
data: CronInstallData,
|
||||||
|
meta: RobotMeta,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct CronInstallData {
|
||||||
|
action: &'static str,
|
||||||
|
entry: String,
|
||||||
|
interval_minutes: u32,
|
||||||
|
log_path: String,
|
||||||
|
replaced: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_cron_install_json(result: &CronInstallResult, elapsed_ms: u64) {
|
||||||
|
let output = CronInstallJson {
|
||||||
|
ok: true,
|
||||||
|
data: CronInstallData {
|
||||||
|
action: "install",
|
||||||
|
entry: result.entry.clone(),
|
||||||
|
interval_minutes: result.interval_minutes,
|
||||||
|
log_path: result.log_path.display().to_string(),
|
||||||
|
replaced: result.replaced,
|
||||||
|
},
|
||||||
|
meta: RobotMeta { elapsed_ms },
|
||||||
|
};
|
||||||
|
if let Ok(json) = serde_json::to_string(&output) {
|
||||||
|
println!("{json}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── uninstall ──
|
||||||
|
|
||||||
|
pub fn run_cron_uninstall() -> Result<CronUninstallResult> {
|
||||||
|
uninstall_cron()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_cron_uninstall(result: &CronUninstallResult) {
|
||||||
|
if result.was_installed {
|
||||||
|
println!(
|
||||||
|
" {} cron entry removed",
|
||||||
|
Theme::success().render("Removed")
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
println!(
|
||||||
|
" {} no lore-sync cron entry found",
|
||||||
|
Theme::dim().render("Nothing to remove:")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
println!();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct CronUninstallJson {
|
||||||
|
ok: bool,
|
||||||
|
data: CronUninstallData,
|
||||||
|
meta: RobotMeta,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct CronUninstallData {
|
||||||
|
action: &'static str,
|
||||||
|
was_installed: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_cron_uninstall_json(result: &CronUninstallResult, elapsed_ms: u64) {
|
||||||
|
let output = CronUninstallJson {
|
||||||
|
ok: true,
|
||||||
|
data: CronUninstallData {
|
||||||
|
action: "uninstall",
|
||||||
|
was_installed: result.was_installed,
|
||||||
|
},
|
||||||
|
meta: RobotMeta { elapsed_ms },
|
||||||
|
};
|
||||||
|
if let Ok(json) = serde_json::to_string(&output) {
|
||||||
|
println!("{json}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── status ──
|
||||||
|
|
||||||
|
pub fn run_cron_status(config: &Config) -> Result<CronStatusInfo> {
|
||||||
|
let status = cron_status()?;
|
||||||
|
|
||||||
|
// Query last sync run from DB
|
||||||
|
let last_sync = get_last_sync_time(config).unwrap_or_default();
|
||||||
|
|
||||||
|
Ok(CronStatusInfo { status, last_sync })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct CronStatusInfo {
|
||||||
|
pub status: CronStatusResult,
|
||||||
|
pub last_sync: Option<LastSyncInfo>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct LastSyncInfo {
|
||||||
|
pub started_at_iso: String,
|
||||||
|
pub status: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_last_sync_time(config: &Config) -> Result<Option<LastSyncInfo>> {
|
||||||
|
let db_path = get_db_path(config.storage.db_path.as_deref());
|
||||||
|
if !db_path.exists() {
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
let conn = create_connection(&db_path)?;
|
||||||
|
let result = conn.query_row(
|
||||||
|
"SELECT started_at, status FROM sync_runs ORDER BY started_at DESC LIMIT 1",
|
||||||
|
[],
|
||||||
|
|row| {
|
||||||
|
let started_at: i64 = row.get(0)?;
|
||||||
|
let status: String = row.get(1)?;
|
||||||
|
Ok(LastSyncInfo {
|
||||||
|
started_at_iso: ms_to_iso(started_at),
|
||||||
|
status,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
);
|
||||||
|
match result {
|
||||||
|
Ok(info) => Ok(Some(info)),
|
||||||
|
Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None),
|
||||||
|
// Table may not exist if migrations haven't run yet
|
||||||
|
Err(rusqlite::Error::SqliteFailure(_, Some(ref msg))) if msg.contains("no such table") => {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
Err(e) => Err(e.into()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_cron_status(info: &CronStatusInfo) {
|
||||||
|
if info.status.installed {
|
||||||
|
println!(
|
||||||
|
" {} lore-sync is installed in crontab",
|
||||||
|
Theme::success().render("Installed")
|
||||||
|
);
|
||||||
|
if let Some(interval) = info.status.interval_minutes {
|
||||||
|
println!(
|
||||||
|
" {} every {} minutes",
|
||||||
|
Theme::dim().render("interval:"),
|
||||||
|
interval
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if let Some(ref binary) = info.status.binary_path {
|
||||||
|
let label = if info.status.binary_mismatch {
|
||||||
|
Theme::warning().render("binary:")
|
||||||
|
} else {
|
||||||
|
Theme::dim().render("binary:")
|
||||||
|
};
|
||||||
|
println!(" {label} {binary}");
|
||||||
|
if info.status.binary_mismatch
|
||||||
|
&& let Some(ref current) = info.status.current_binary
|
||||||
|
{
|
||||||
|
println!(
|
||||||
|
" {}",
|
||||||
|
Theme::warning().render(&format!(" current binary is {current} (mismatch!)"))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(ref log) = info.status.log_path {
|
||||||
|
println!(" {} {}", Theme::dim().render("log:"), log.display());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
println!(
|
||||||
|
" {} lore-sync is not installed in crontab",
|
||||||
|
Theme::dim().render("Not installed:")
|
||||||
|
);
|
||||||
|
println!(
|
||||||
|
" {} lore cron install",
|
||||||
|
Theme::dim().render("install with:")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(ref last) = info.last_sync {
|
||||||
|
println!(
|
||||||
|
" {} {} ({})",
|
||||||
|
Theme::dim().render("last sync:"),
|
||||||
|
last.started_at_iso,
|
||||||
|
last.status
|
||||||
|
);
|
||||||
|
}
|
||||||
|
println!();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct CronStatusJson {
|
||||||
|
ok: bool,
|
||||||
|
data: CronStatusData,
|
||||||
|
meta: RobotMeta,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct CronStatusData {
|
||||||
|
installed: bool,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
interval_minutes: Option<u32>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
binary_path: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
current_binary: Option<String>,
|
||||||
|
binary_mismatch: bool,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
log_path: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
cron_entry: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
last_sync_at: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
last_sync_status: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_cron_status_json(info: &CronStatusInfo, elapsed_ms: u64) {
|
||||||
|
let output = CronStatusJson {
|
||||||
|
ok: true,
|
||||||
|
data: CronStatusData {
|
||||||
|
installed: info.status.installed,
|
||||||
|
interval_minutes: info.status.interval_minutes,
|
||||||
|
binary_path: info.status.binary_path.clone(),
|
||||||
|
current_binary: info.status.current_binary.clone(),
|
||||||
|
binary_mismatch: info.status.binary_mismatch,
|
||||||
|
log_path: info
|
||||||
|
.status
|
||||||
|
.log_path
|
||||||
|
.as_ref()
|
||||||
|
.map(|p| p.display().to_string()),
|
||||||
|
cron_entry: info.status.cron_entry.clone(),
|
||||||
|
last_sync_at: info.last_sync.as_ref().map(|s| s.started_at_iso.clone()),
|
||||||
|
last_sync_status: info.last_sync.as_ref().map(|s| s.status.clone()),
|
||||||
|
},
|
||||||
|
meta: RobotMeta { elapsed_ms },
|
||||||
|
};
|
||||||
|
if let Ok(json) = serde_json::to_string(&output) {
|
||||||
|
println!("{json}");
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -137,5 +137,8 @@ pub fn print_embed_json(result: &EmbedCommandResult, elapsed_ms: u64) {
|
|||||||
data: result,
|
data: result,
|
||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta { elapsed_ms },
|
||||||
};
|
};
|
||||||
println!("{}", serde_json::to_string(&output).unwrap());
|
match serde_json::to_string(&output) {
|
||||||
|
Ok(json) => println!("{json}"),
|
||||||
|
Err(e) => eprintln!("Error serializing to JSON: {e}"),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
|
|
||||||
|
use tracing::info;
|
||||||
|
|
||||||
use crate::Config;
|
use crate::Config;
|
||||||
use crate::cli::render::{self, Icons, Theme};
|
use crate::cli::render::{self, Icons, Theme};
|
||||||
use crate::core::db::create_connection;
|
use crate::core::db::create_connection;
|
||||||
@@ -46,6 +48,9 @@ pub struct FileHistoryResult {
|
|||||||
pub discussions: Vec<FileDiscussion>,
|
pub discussions: Vec<FileDiscussion>,
|
||||||
pub total_mrs: usize,
|
pub total_mrs: usize,
|
||||||
pub paths_searched: usize,
|
pub paths_searched: usize,
|
||||||
|
/// Diagnostic hints explaining why results may be empty.
|
||||||
|
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||||
|
pub hints: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Run the file-history query.
|
/// Run the file-history query.
|
||||||
@@ -77,6 +82,11 @@ pub fn run_file_history(
|
|||||||
|
|
||||||
let paths_searched = all_paths.len();
|
let paths_searched = all_paths.len();
|
||||||
|
|
||||||
|
info!(
|
||||||
|
paths = paths_searched,
|
||||||
|
renames_followed, "file-history: resolved {} path(s) for '{}'", paths_searched, path
|
||||||
|
);
|
||||||
|
|
||||||
// Build placeholders for IN clause
|
// Build placeholders for IN clause
|
||||||
let placeholders: Vec<String> = (0..all_paths.len())
|
let placeholders: Vec<String> = (0..all_paths.len())
|
||||||
.map(|i| format!("?{}", i + 2))
|
.map(|i| format!("?{}", i + 2))
|
||||||
@@ -135,14 +145,31 @@ pub fn run_file_history(
|
|||||||
web_url: row.get(8)?,
|
web_url: row.get(8)?,
|
||||||
})
|
})
|
||||||
})?
|
})?
|
||||||
.filter_map(std::result::Result::ok)
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
.collect();
|
|
||||||
|
|
||||||
let total_mrs = merge_requests.len();
|
let total_mrs = merge_requests.len();
|
||||||
|
|
||||||
|
info!(
|
||||||
|
mr_count = total_mrs,
|
||||||
|
"file-history: found {} MR(s) touching '{}'", total_mrs, path
|
||||||
|
);
|
||||||
|
|
||||||
// Optionally fetch DiffNote discussions on this file
|
// Optionally fetch DiffNote discussions on this file
|
||||||
let discussions = if include_discussions && !merge_requests.is_empty() {
|
let discussions = if include_discussions && !merge_requests.is_empty() {
|
||||||
fetch_file_discussions(&conn, &all_paths, project_id)?
|
let discs = fetch_file_discussions(&conn, &all_paths, project_id)?;
|
||||||
|
info!(
|
||||||
|
discussion_count = discs.len(),
|
||||||
|
"file-history: found {} discussion(s)",
|
||||||
|
discs.len()
|
||||||
|
);
|
||||||
|
discs
|
||||||
|
} else {
|
||||||
|
Vec::new()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Build diagnostic hints when no results found
|
||||||
|
let hints = if total_mrs == 0 {
|
||||||
|
build_file_history_hints(&conn, project_id, &all_paths)?
|
||||||
} else {
|
} else {
|
||||||
Vec::new()
|
Vec::new()
|
||||||
};
|
};
|
||||||
@@ -155,6 +182,7 @@ pub fn run_file_history(
|
|||||||
discussions,
|
discussions,
|
||||||
total_mrs,
|
total_mrs,
|
||||||
paths_searched,
|
paths_searched,
|
||||||
|
hints,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -179,8 +207,7 @@ fn fetch_file_discussions(
|
|||||||
JOIN discussions d ON d.id = n.discussion_id \
|
JOIN discussions d ON d.id = n.discussion_id \
|
||||||
WHERE n.position_new_path IN ({in_clause}) {project_filter} \
|
WHERE n.position_new_path IN ({in_clause}) {project_filter} \
|
||||||
AND n.is_system = 0 \
|
AND n.is_system = 0 \
|
||||||
ORDER BY n.created_at DESC \
|
ORDER BY n.created_at DESC"
|
||||||
LIMIT 50"
|
|
||||||
);
|
);
|
||||||
|
|
||||||
let mut stmt = conn.prepare(&sql)?;
|
let mut stmt = conn.prepare(&sql)?;
|
||||||
@@ -210,12 +237,57 @@ fn fetch_file_discussions(
|
|||||||
created_at_iso: ms_to_iso(created_at),
|
created_at_iso: ms_to_iso(created_at),
|
||||||
})
|
})
|
||||||
})?
|
})?
|
||||||
.filter_map(std::result::Result::ok)
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok(discussions)
|
Ok(discussions)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Build diagnostic hints explaining why a file-history query returned no results.
|
||||||
|
fn build_file_history_hints(
|
||||||
|
conn: &rusqlite::Connection,
|
||||||
|
project_id: Option<i64>,
|
||||||
|
paths: &[String],
|
||||||
|
) -> Result<Vec<String>> {
|
||||||
|
let mut hints = Vec::new();
|
||||||
|
|
||||||
|
// Check if mr_file_changes has ANY rows for this project
|
||||||
|
let has_file_changes: bool = if let Some(pid) = project_id {
|
||||||
|
conn.query_row(
|
||||||
|
"SELECT EXISTS(SELECT 1 FROM mr_file_changes WHERE project_id = ?1 LIMIT 1)",
|
||||||
|
rusqlite::params![pid],
|
||||||
|
|row| row.get(0),
|
||||||
|
)?
|
||||||
|
} else {
|
||||||
|
conn.query_row(
|
||||||
|
"SELECT EXISTS(SELECT 1 FROM mr_file_changes LIMIT 1)",
|
||||||
|
[],
|
||||||
|
|row| row.get(0),
|
||||||
|
)?
|
||||||
|
};
|
||||||
|
|
||||||
|
if !has_file_changes {
|
||||||
|
hints.push(
|
||||||
|
"No MR file changes have been synced yet. Run 'lore sync' to fetch file change data."
|
||||||
|
.to_string(),
|
||||||
|
);
|
||||||
|
return Ok(hints);
|
||||||
|
}
|
||||||
|
|
||||||
|
// File changes exist but none match these paths
|
||||||
|
let path_list = paths
|
||||||
|
.iter()
|
||||||
|
.map(|p| format!("'{p}'"))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(", ");
|
||||||
|
hints.push(format!(
|
||||||
|
"Searched paths [{}] were not found in MR file changes. \
|
||||||
|
The file may predate the sync window or use a different path.",
|
||||||
|
path_list
|
||||||
|
));
|
||||||
|
|
||||||
|
Ok(hints)
|
||||||
|
}
|
||||||
|
|
||||||
// ── Human output ────────────────────────────────────────────────────────────
|
// ── Human output ────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
pub fn print_file_history(result: &FileHistoryResult) {
|
pub fn print_file_history(result: &FileHistoryResult) {
|
||||||
@@ -250,10 +322,16 @@ pub fn print_file_history(result: &FileHistoryResult) {
|
|||||||
Icons::info(),
|
Icons::info(),
|
||||||
Theme::dim().render("No merge requests found touching this file.")
|
Theme::dim().render("No merge requests found touching this file.")
|
||||||
);
|
);
|
||||||
println!(
|
if !result.renames_followed && result.rename_chain.len() == 1 {
|
||||||
" {}",
|
println!(
|
||||||
Theme::dim().render("Hint: Run 'lore sync' to fetch MR file changes.")
|
" {} Searched: {}",
|
||||||
);
|
Icons::info(),
|
||||||
|
Theme::dim().render(&result.rename_chain[0])
|
||||||
|
);
|
||||||
|
}
|
||||||
|
for hint in &result.hints {
|
||||||
|
println!(" {} {}", Icons::info(), Theme::dim().render(hint));
|
||||||
|
}
|
||||||
println!();
|
println!();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -327,6 +405,7 @@ pub fn print_file_history_json(result: &FileHistoryResult, elapsed_ms: u64) {
|
|||||||
"total_mrs": result.total_mrs,
|
"total_mrs": result.total_mrs,
|
||||||
"renames_followed": result.renames_followed,
|
"renames_followed": result.renames_followed,
|
||||||
"paths_searched": result.paths_searched,
|
"paths_searched": result.paths_searched,
|
||||||
|
"hints": if result.hints.is_empty() { None } else { Some(&result.hints) },
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -259,7 +259,10 @@ pub fn print_generate_docs_json(result: &GenerateDocsResult, elapsed_ms: u64) {
|
|||||||
},
|
},
|
||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta { elapsed_ms },
|
||||||
};
|
};
|
||||||
println!("{}", serde_json::to_string(&output).unwrap());
|
match serde_json::to_string(&output) {
|
||||||
|
Ok(json) => println!("{json}"),
|
||||||
|
Err(e) => eprintln!("Error serializing to JSON: {e}"),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|||||||
@@ -982,7 +982,10 @@ pub fn print_ingest_summary_json(result: &IngestResult, elapsed_ms: u64) {
|
|||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta { elapsed_ms },
|
||||||
};
|
};
|
||||||
|
|
||||||
println!("{}", serde_json::to_string(&output).unwrap());
|
match serde_json::to_string(&output) {
|
||||||
|
Ok(json) => println!("{json}"),
|
||||||
|
Err(e) => eprintln!("Error serializing to JSON: {e}"),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_ingest_summary(result: &IngestResult) {
|
pub fn print_ingest_summary(result: &IngestResult) {
|
||||||
@@ -1109,5 +1112,8 @@ pub fn print_dry_run_preview_json(preview: &DryRunPreview) {
|
|||||||
data: preview.clone(),
|
data: preview.clone(),
|
||||||
};
|
};
|
||||||
|
|
||||||
println!("{}", serde_json::to_string(&output).unwrap());
|
match serde_json::to_string(&output) {
|
||||||
|
Ok(json) => println!("{json}"),
|
||||||
|
Err(e) => eprintln!("Error serializing to JSON: {e}"),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -980,59 +980,6 @@ pub fn print_list_notes_json(result: &NoteListResult, elapsed_ms: u64, fields: O
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_list_notes_jsonl(result: &NoteListResult) {
|
|
||||||
for note in &result.notes {
|
|
||||||
let json_row = NoteListRowJson::from(note);
|
|
||||||
match serde_json::to_string(&json_row) {
|
|
||||||
Ok(json) => println!("{json}"),
|
|
||||||
Err(e) => eprintln!("Error serializing to JSON: {e}"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Escape a field for RFC 4180 CSV: quote fields containing commas, quotes, or newlines.
|
|
||||||
fn csv_escape(field: &str) -> String {
|
|
||||||
if field.contains(',') || field.contains('"') || field.contains('\n') || field.contains('\r') {
|
|
||||||
let escaped = field.replace('"', "\"\"");
|
|
||||||
format!("\"{escaped}\"")
|
|
||||||
} else {
|
|
||||||
field.to_string()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn print_list_notes_csv(result: &NoteListResult) {
|
|
||||||
println!(
|
|
||||||
"id,gitlab_id,author_username,body,note_type,is_system,created_at,updated_at,position_new_path,position_new_line,noteable_type,parent_iid,project_path"
|
|
||||||
);
|
|
||||||
for note in &result.notes {
|
|
||||||
let body = note.body.as_deref().unwrap_or("");
|
|
||||||
let note_type = note.note_type.as_deref().unwrap_or("");
|
|
||||||
let path = note.position_new_path.as_deref().unwrap_or("");
|
|
||||||
let line = note
|
|
||||||
.position_new_line
|
|
||||||
.map_or(String::new(), |l| l.to_string());
|
|
||||||
let noteable = note.noteable_type.as_deref().unwrap_or("");
|
|
||||||
let parent_iid = note.parent_iid.map_or(String::new(), |i| i.to_string());
|
|
||||||
|
|
||||||
println!(
|
|
||||||
"{},{},{},{},{},{},{},{},{},{},{},{},{}",
|
|
||||||
note.id,
|
|
||||||
note.gitlab_id,
|
|
||||||
csv_escape(¬e.author_username),
|
|
||||||
csv_escape(body),
|
|
||||||
csv_escape(note_type),
|
|
||||||
note.is_system,
|
|
||||||
note.created_at,
|
|
||||||
note.updated_at,
|
|
||||||
csv_escape(path),
|
|
||||||
line,
|
|
||||||
csv_escape(noteable),
|
|
||||||
parent_iid,
|
|
||||||
csv_escape(¬e.project_path),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Note query layer
|
// Note query layer
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|||||||
@@ -1269,60 +1269,6 @@ fn test_truncate_note_body() {
|
|||||||
assert!(result.ends_with("..."));
|
assert!(result.ends_with("..."));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_csv_escape_basic() {
|
|
||||||
assert_eq!(csv_escape("simple"), "simple");
|
|
||||||
assert_eq!(csv_escape("has,comma"), "\"has,comma\"");
|
|
||||||
assert_eq!(csv_escape("has\"quote"), "\"has\"\"quote\"");
|
|
||||||
assert_eq!(csv_escape("has\nnewline"), "\"has\nnewline\"");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_csv_output_basic() {
|
|
||||||
let result = NoteListResult {
|
|
||||||
notes: vec![NoteListRow {
|
|
||||||
id: 1,
|
|
||||||
gitlab_id: 100,
|
|
||||||
author_username: "alice".to_string(),
|
|
||||||
body: Some("Hello, world".to_string()),
|
|
||||||
note_type: Some("DiffNote".to_string()),
|
|
||||||
is_system: false,
|
|
||||||
created_at: 1_000_000,
|
|
||||||
updated_at: 2_000_000,
|
|
||||||
position_new_path: Some("src/main.rs".to_string()),
|
|
||||||
position_new_line: Some(42),
|
|
||||||
position_old_path: None,
|
|
||||||
position_old_line: None,
|
|
||||||
resolvable: true,
|
|
||||||
resolved: false,
|
|
||||||
resolved_by: None,
|
|
||||||
noteable_type: Some("Issue".to_string()),
|
|
||||||
parent_iid: Some(7),
|
|
||||||
parent_title: Some("Test issue".to_string()),
|
|
||||||
project_path: "group/project".to_string(),
|
|
||||||
}],
|
|
||||||
total_count: 1,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Verify csv_escape handles the comma in body correctly
|
|
||||||
let body = result.notes[0].body.as_deref().unwrap();
|
|
||||||
let escaped = csv_escape(body);
|
|
||||||
assert_eq!(escaped, "\"Hello, world\"");
|
|
||||||
|
|
||||||
// Verify the formatting helpers
|
|
||||||
assert_eq!(
|
|
||||||
format_note_type(result.notes[0].note_type.as_deref()),
|
|
||||||
"Diff"
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
format_note_parent(
|
|
||||||
result.notes[0].noteable_type.as_deref(),
|
|
||||||
result.notes[0].parent_iid,
|
|
||||||
),
|
|
||||||
"Issue #7"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_jsonl_output_one_per_line() {
|
fn test_jsonl_output_one_per_line() {
|
||||||
let result = NoteListResult {
|
let result = NoteListResult {
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
pub mod auth_test;
|
pub mod auth_test;
|
||||||
pub mod count;
|
pub mod count;
|
||||||
|
#[cfg(unix)]
|
||||||
|
pub mod cron;
|
||||||
pub mod doctor;
|
pub mod doctor;
|
||||||
pub mod drift;
|
pub mod drift;
|
||||||
pub mod embed;
|
pub mod embed;
|
||||||
@@ -22,6 +24,12 @@ pub use count::{
|
|||||||
print_count, print_count_json, print_event_count, print_event_count_json, run_count,
|
print_count, print_count_json, print_event_count, print_event_count_json, run_count,
|
||||||
run_count_events,
|
run_count_events,
|
||||||
};
|
};
|
||||||
|
#[cfg(unix)]
|
||||||
|
pub use cron::{
|
||||||
|
print_cron_install, print_cron_install_json, print_cron_status, print_cron_status_json,
|
||||||
|
print_cron_uninstall, print_cron_uninstall_json, run_cron_install, run_cron_status,
|
||||||
|
run_cron_uninstall,
|
||||||
|
};
|
||||||
pub use doctor::{DoctorChecks, print_doctor_results, run_doctor};
|
pub use doctor::{DoctorChecks, print_doctor_results, run_doctor};
|
||||||
pub use drift::{DriftResponse, print_drift_human, print_drift_json, run_drift};
|
pub use drift::{DriftResponse, print_drift_human, print_drift_json, run_drift};
|
||||||
pub use embed::{print_embed, print_embed_json, run_embed};
|
pub use embed::{print_embed, print_embed_json, run_embed};
|
||||||
@@ -35,8 +43,7 @@ pub use init::{InitInputs, InitOptions, InitResult, run_init};
|
|||||||
pub use list::{
|
pub use list::{
|
||||||
ListFilters, MrListFilters, NoteListFilters, open_issue_in_browser, open_mr_in_browser,
|
ListFilters, MrListFilters, NoteListFilters, open_issue_in_browser, open_mr_in_browser,
|
||||||
print_list_issues, print_list_issues_json, print_list_mrs, print_list_mrs_json,
|
print_list_issues, print_list_issues_json, print_list_mrs, print_list_mrs_json,
|
||||||
print_list_notes, print_list_notes_csv, print_list_notes_json, print_list_notes_jsonl,
|
print_list_notes, print_list_notes_json, query_notes, run_list_issues, run_list_mrs,
|
||||||
query_notes, run_list_issues, run_list_mrs,
|
|
||||||
};
|
};
|
||||||
pub use search::{
|
pub use search::{
|
||||||
SearchCliFilters, SearchResponse, print_search_results, print_search_results_json, run_search,
|
SearchCliFilters, SearchResponse, print_search_results, print_search_results_json, run_search,
|
||||||
|
|||||||
@@ -439,5 +439,8 @@ pub fn print_search_results_json(
|
|||||||
let expanded = crate::cli::robot::expand_fields_preset(f, "search");
|
let expanded = crate::cli::robot::expand_fields_preset(f, "search");
|
||||||
crate::cli::robot::filter_fields(&mut value, "results", &expanded);
|
crate::cli::robot::filter_fields(&mut value, "results", &expanded);
|
||||||
}
|
}
|
||||||
println!("{}", serde_json::to_string(&value).unwrap());
|
match serde_json::to_string(&value) {
|
||||||
|
Ok(json) => println!("{json}"),
|
||||||
|
Err(e) => eprintln!("Error serializing to JSON: {e}"),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -585,5 +585,8 @@ pub fn print_stats_json(result: &StatsResult, elapsed_ms: u64) {
|
|||||||
},
|
},
|
||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta { elapsed_ms },
|
||||||
};
|
};
|
||||||
println!("{}", serde_json::to_string(&output).unwrap());
|
match serde_json::to_string(&output) {
|
||||||
|
Ok(json) => println!("{json}"),
|
||||||
|
Err(e) => eprintln!("Error serializing to JSON: {e}"),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -746,7 +746,10 @@ pub fn print_sync_json(result: &SyncResult, elapsed_ms: u64, metrics: Option<&Me
|
|||||||
stages,
|
stages,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
println!("{}", serde_json::to_string(&output).unwrap());
|
match serde_json::to_string(&output) {
|
||||||
|
Ok(json) => println!("{json}"),
|
||||||
|
Err(e) => eprintln!("Error serializing to JSON: {e}"),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default, Serialize)]
|
#[derive(Debug, Default, Serialize)]
|
||||||
@@ -880,7 +883,10 @@ pub fn print_sync_dry_run_json(result: &SyncDryRunResult) {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
println!("{}", serde_json::to_string(&output).unwrap());
|
match serde_json::to_string(&output) {
|
||||||
|
Ok(json) => println!("{json}"),
|
||||||
|
Err(e) => eprintln!("Error serializing to JSON: {e}"),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|||||||
@@ -268,7 +268,10 @@ pub fn print_sync_status_json(result: &SyncStatusResult, elapsed_ms: u64) {
|
|||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta { elapsed_ms },
|
||||||
};
|
};
|
||||||
|
|
||||||
println!("{}", serde_json::to_string(&output).unwrap());
|
match serde_json::to_string(&output) {
|
||||||
|
Ok(json) => println!("{json}"),
|
||||||
|
Err(e) => eprintln!("Error serializing to JSON: {e}"),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_sync_status(result: &SyncStatusResult) {
|
pub fn print_sync_status(result: &SyncStatusResult) {
|
||||||
|
|||||||
@@ -374,7 +374,10 @@ pub fn print_timeline_json_with_meta(
|
|||||||
let expanded = crate::cli::robot::expand_fields_preset(f, "timeline");
|
let expanded = crate::cli::robot::expand_fields_preset(f, "timeline");
|
||||||
crate::cli::robot::filter_fields(&mut value, "events", &expanded);
|
crate::cli::robot::filter_fields(&mut value, "events", &expanded);
|
||||||
}
|
}
|
||||||
println!("{}", serde_json::to_string(&value).unwrap());
|
match serde_json::to_string(&value) {
|
||||||
|
Ok(json) => println!("{json}"),
|
||||||
|
Err(e) => eprintln!("Error serializing to JSON: {e}"),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
|
|||||||
@@ -50,17 +50,23 @@ pub fn print_trace(result: &TraceResult) {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Show searched paths when there are renames but no chains
|
||||||
if result.trace_chains.is_empty() {
|
if result.trace_chains.is_empty() {
|
||||||
println!(
|
println!(
|
||||||
"\n {} {}",
|
"\n {} {}",
|
||||||
Icons::info(),
|
Icons::info(),
|
||||||
Theme::dim().render("No trace chains found for this file.")
|
Theme::dim().render("No trace chains found for this file.")
|
||||||
);
|
);
|
||||||
println!(
|
if !result.renames_followed && result.resolved_paths.len() == 1 {
|
||||||
" {}",
|
println!(
|
||||||
Theme::dim()
|
" {} Searched: {}",
|
||||||
.render("Hint: Run 'lore sync' to fetch MR file changes and cross-references.")
|
Icons::info(),
|
||||||
);
|
Theme::dim().render(&result.resolved_paths[0])
|
||||||
|
);
|
||||||
|
}
|
||||||
|
for hint in &result.hints {
|
||||||
|
println!(" {} {}", Icons::info(), Theme::dim().render(hint));
|
||||||
|
}
|
||||||
println!();
|
println!();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -195,6 +201,7 @@ pub fn print_trace_json(result: &TraceResult, elapsed_ms: u64, line_requested: O
|
|||||||
"elapsed_ms": elapsed_ms,
|
"elapsed_ms": elapsed_ms,
|
||||||
"total_chains": result.total_chains,
|
"total_chains": result.total_chains,
|
||||||
"renames_followed": result.renames_followed,
|
"renames_followed": result.renames_followed,
|
||||||
|
"hints": if result.hints.is_empty() { None } else { Some(&result.hints) },
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
299
src/cli/commands/who/active.rs
Normal file
299
src/cli/commands/who/active.rs
Normal file
@@ -0,0 +1,299 @@
|
|||||||
|
use rusqlite::Connection;
|
||||||
|
|
||||||
|
use crate::cli::render::{self, Theme};
|
||||||
|
use crate::core::error::Result;
|
||||||
|
use crate::core::time::ms_to_iso;
|
||||||
|
|
||||||
|
use super::types::*;
|
||||||
|
|
||||||
|
pub(super) fn query_active(
|
||||||
|
conn: &Connection,
|
||||||
|
project_id: Option<i64>,
|
||||||
|
since_ms: i64,
|
||||||
|
limit: usize,
|
||||||
|
include_closed: bool,
|
||||||
|
) -> Result<ActiveResult> {
|
||||||
|
let limit_plus_one = (limit + 1) as i64;
|
||||||
|
|
||||||
|
// State filter for open-entities-only (default behavior)
|
||||||
|
let state_joins = if include_closed {
|
||||||
|
""
|
||||||
|
} else {
|
||||||
|
" LEFT JOIN issues i ON d.issue_id = i.id
|
||||||
|
LEFT JOIN merge_requests m ON d.merge_request_id = m.id"
|
||||||
|
};
|
||||||
|
let state_filter = if include_closed {
|
||||||
|
""
|
||||||
|
} else {
|
||||||
|
" AND (i.id IS NULL OR i.state = 'opened')
|
||||||
|
AND (m.id IS NULL OR m.state = 'opened')"
|
||||||
|
};
|
||||||
|
|
||||||
|
// Total unresolved count -- conditionally built
|
||||||
|
let total_sql_global = format!(
|
||||||
|
"SELECT COUNT(*) FROM discussions d
|
||||||
|
{state_joins}
|
||||||
|
WHERE d.resolvable = 1 AND d.resolved = 0
|
||||||
|
AND d.last_note_at >= ?1
|
||||||
|
{state_filter}"
|
||||||
|
);
|
||||||
|
let total_sql_scoped = format!(
|
||||||
|
"SELECT COUNT(*) FROM discussions d
|
||||||
|
{state_joins}
|
||||||
|
WHERE d.resolvable = 1 AND d.resolved = 0
|
||||||
|
AND d.last_note_at >= ?1
|
||||||
|
AND d.project_id = ?2
|
||||||
|
{state_filter}"
|
||||||
|
);
|
||||||
|
|
||||||
|
let total_unresolved_in_window: u32 = match project_id {
|
||||||
|
None => conn.query_row(&total_sql_global, rusqlite::params![since_ms], |row| {
|
||||||
|
row.get(0)
|
||||||
|
})?,
|
||||||
|
Some(pid) => {
|
||||||
|
conn.query_row(&total_sql_scoped, rusqlite::params![since_ms, pid], |row| {
|
||||||
|
row.get(0)
|
||||||
|
})?
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Active discussions with context -- conditionally built SQL
|
||||||
|
let sql_global = format!(
|
||||||
|
"
|
||||||
|
WITH picked AS (
|
||||||
|
SELECT d.id, d.noteable_type, d.issue_id, d.merge_request_id,
|
||||||
|
d.project_id, d.last_note_at
|
||||||
|
FROM discussions d
|
||||||
|
{state_joins}
|
||||||
|
WHERE d.resolvable = 1 AND d.resolved = 0
|
||||||
|
AND d.last_note_at >= ?1
|
||||||
|
{state_filter}
|
||||||
|
ORDER BY d.last_note_at DESC
|
||||||
|
LIMIT ?2
|
||||||
|
),
|
||||||
|
note_counts AS (
|
||||||
|
SELECT
|
||||||
|
n.discussion_id,
|
||||||
|
COUNT(*) AS note_count
|
||||||
|
FROM notes n
|
||||||
|
JOIN picked p ON p.id = n.discussion_id
|
||||||
|
WHERE n.is_system = 0
|
||||||
|
GROUP BY n.discussion_id
|
||||||
|
),
|
||||||
|
participants AS (
|
||||||
|
SELECT
|
||||||
|
x.discussion_id,
|
||||||
|
GROUP_CONCAT(x.author_username, X'1F') AS participants
|
||||||
|
FROM (
|
||||||
|
SELECT DISTINCT n.discussion_id, n.author_username
|
||||||
|
FROM notes n
|
||||||
|
JOIN picked p ON p.id = n.discussion_id
|
||||||
|
WHERE n.is_system = 0 AND n.author_username IS NOT NULL
|
||||||
|
) x
|
||||||
|
GROUP BY x.discussion_id
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
p.id AS discussion_id,
|
||||||
|
p.noteable_type,
|
||||||
|
COALESCE(i.iid, m.iid) AS entity_iid,
|
||||||
|
COALESCE(i.title, m.title) AS entity_title,
|
||||||
|
proj.path_with_namespace,
|
||||||
|
p.last_note_at,
|
||||||
|
COALESCE(nc.note_count, 0) AS note_count,
|
||||||
|
COALESCE(pa.participants, '') AS participants
|
||||||
|
FROM picked p
|
||||||
|
JOIN projects proj ON p.project_id = proj.id
|
||||||
|
LEFT JOIN issues i ON p.issue_id = i.id
|
||||||
|
LEFT JOIN merge_requests m ON p.merge_request_id = m.id
|
||||||
|
LEFT JOIN note_counts nc ON nc.discussion_id = p.id
|
||||||
|
LEFT JOIN participants pa ON pa.discussion_id = p.id
|
||||||
|
ORDER BY p.last_note_at DESC
|
||||||
|
"
|
||||||
|
);
|
||||||
|
|
||||||
|
let sql_scoped = format!(
|
||||||
|
"
|
||||||
|
WITH picked AS (
|
||||||
|
SELECT d.id, d.noteable_type, d.issue_id, d.merge_request_id,
|
||||||
|
d.project_id, d.last_note_at
|
||||||
|
FROM discussions d
|
||||||
|
{state_joins}
|
||||||
|
WHERE d.resolvable = 1 AND d.resolved = 0
|
||||||
|
AND d.last_note_at >= ?1
|
||||||
|
AND d.project_id = ?2
|
||||||
|
{state_filter}
|
||||||
|
ORDER BY d.last_note_at DESC
|
||||||
|
LIMIT ?3
|
||||||
|
),
|
||||||
|
note_counts AS (
|
||||||
|
SELECT
|
||||||
|
n.discussion_id,
|
||||||
|
COUNT(*) AS note_count
|
||||||
|
FROM notes n
|
||||||
|
JOIN picked p ON p.id = n.discussion_id
|
||||||
|
WHERE n.is_system = 0
|
||||||
|
GROUP BY n.discussion_id
|
||||||
|
),
|
||||||
|
participants AS (
|
||||||
|
SELECT
|
||||||
|
x.discussion_id,
|
||||||
|
GROUP_CONCAT(x.author_username, X'1F') AS participants
|
||||||
|
FROM (
|
||||||
|
SELECT DISTINCT n.discussion_id, n.author_username
|
||||||
|
FROM notes n
|
||||||
|
JOIN picked p ON p.id = n.discussion_id
|
||||||
|
WHERE n.is_system = 0 AND n.author_username IS NOT NULL
|
||||||
|
) x
|
||||||
|
GROUP BY x.discussion_id
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
p.id AS discussion_id,
|
||||||
|
p.noteable_type,
|
||||||
|
COALESCE(i.iid, m.iid) AS entity_iid,
|
||||||
|
COALESCE(i.title, m.title) AS entity_title,
|
||||||
|
proj.path_with_namespace,
|
||||||
|
p.last_note_at,
|
||||||
|
COALESCE(nc.note_count, 0) AS note_count,
|
||||||
|
COALESCE(pa.participants, '') AS participants
|
||||||
|
FROM picked p
|
||||||
|
JOIN projects proj ON p.project_id = proj.id
|
||||||
|
LEFT JOIN issues i ON p.issue_id = i.id
|
||||||
|
LEFT JOIN merge_requests m ON p.merge_request_id = m.id
|
||||||
|
LEFT JOIN note_counts nc ON nc.discussion_id = p.id
|
||||||
|
LEFT JOIN participants pa ON pa.discussion_id = p.id
|
||||||
|
ORDER BY p.last_note_at DESC
|
||||||
|
"
|
||||||
|
);
|
||||||
|
|
||||||
|
// Row-mapping closure shared between both variants
|
||||||
|
let map_row = |row: &rusqlite::Row| -> rusqlite::Result<ActiveDiscussion> {
|
||||||
|
let noteable_type: String = row.get(1)?;
|
||||||
|
let entity_type = if noteable_type == "MergeRequest" {
|
||||||
|
"MR"
|
||||||
|
} else {
|
||||||
|
"Issue"
|
||||||
|
};
|
||||||
|
let participants_csv: Option<String> = row.get(7)?;
|
||||||
|
// Sort participants for deterministic output -- GROUP_CONCAT order is undefined
|
||||||
|
let mut participants: Vec<String> = participants_csv
|
||||||
|
.as_deref()
|
||||||
|
.filter(|s| !s.is_empty())
|
||||||
|
.map(|csv| csv.split('\x1F').map(String::from).collect())
|
||||||
|
.unwrap_or_default();
|
||||||
|
participants.sort();
|
||||||
|
|
||||||
|
const MAX_PARTICIPANTS: usize = 50;
|
||||||
|
let participants_total = participants.len() as u32;
|
||||||
|
let participants_truncated = participants.len() > MAX_PARTICIPANTS;
|
||||||
|
if participants_truncated {
|
||||||
|
participants.truncate(MAX_PARTICIPANTS);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(ActiveDiscussion {
|
||||||
|
discussion_id: row.get(0)?,
|
||||||
|
entity_type: entity_type.to_string(),
|
||||||
|
entity_iid: row.get(2)?,
|
||||||
|
entity_title: row.get(3)?,
|
||||||
|
project_path: row.get(4)?,
|
||||||
|
last_note_at: row.get(5)?,
|
||||||
|
note_count: row.get(6)?,
|
||||||
|
participants,
|
||||||
|
participants_total,
|
||||||
|
participants_truncated,
|
||||||
|
})
|
||||||
|
};
|
||||||
|
|
||||||
|
// Select variant first, then prepare exactly one statement
|
||||||
|
let discussions: Vec<ActiveDiscussion> = match project_id {
|
||||||
|
None => {
|
||||||
|
let mut stmt = conn.prepare_cached(&sql_global)?;
|
||||||
|
stmt.query_map(rusqlite::params![since_ms, limit_plus_one], &map_row)?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?
|
||||||
|
}
|
||||||
|
Some(pid) => {
|
||||||
|
let mut stmt = conn.prepare_cached(&sql_scoped)?;
|
||||||
|
stmt.query_map(rusqlite::params![since_ms, pid, limit_plus_one], &map_row)?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let truncated = discussions.len() > limit;
|
||||||
|
let discussions: Vec<ActiveDiscussion> = discussions.into_iter().take(limit).collect();
|
||||||
|
|
||||||
|
Ok(ActiveResult {
|
||||||
|
discussions,
|
||||||
|
total_unresolved_in_window,
|
||||||
|
truncated,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn print_active_human(r: &ActiveResult, project_path: Option<&str>) {
|
||||||
|
println!();
|
||||||
|
println!(
|
||||||
|
"{}",
|
||||||
|
Theme::bold().render(&format!(
|
||||||
|
"Active Discussions ({} unresolved in window)",
|
||||||
|
r.total_unresolved_in_window
|
||||||
|
))
|
||||||
|
);
|
||||||
|
println!("{}", "\u{2500}".repeat(60));
|
||||||
|
super::print_scope_hint(project_path);
|
||||||
|
println!();
|
||||||
|
|
||||||
|
if r.discussions.is_empty() {
|
||||||
|
println!(
|
||||||
|
" {}",
|
||||||
|
Theme::dim().render("No active unresolved discussions in this time window.")
|
||||||
|
);
|
||||||
|
println!();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
for disc in &r.discussions {
|
||||||
|
let prefix = if disc.entity_type == "MR" { "!" } else { "#" };
|
||||||
|
let participants_str = disc
|
||||||
|
.participants
|
||||||
|
.iter()
|
||||||
|
.map(|p| format!("@{p}"))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(", ");
|
||||||
|
|
||||||
|
println!(
|
||||||
|
" {} {} {} {} notes {}",
|
||||||
|
Theme::info().render(&format!("{prefix}{}", disc.entity_iid)),
|
||||||
|
render::truncate(&disc.entity_title, 40),
|
||||||
|
Theme::dim().render(&render::format_relative_time(disc.last_note_at)),
|
||||||
|
disc.note_count,
|
||||||
|
Theme::dim().render(&disc.project_path),
|
||||||
|
);
|
||||||
|
if !participants_str.is_empty() {
|
||||||
|
println!(" {}", Theme::dim().render(&participants_str));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if r.truncated {
|
||||||
|
println!(
|
||||||
|
" {}",
|
||||||
|
Theme::dim().render("(showing first -n; rerun with a higher --limit)")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
println!();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn active_to_json(r: &ActiveResult) -> serde_json::Value {
|
||||||
|
serde_json::json!({
|
||||||
|
"total_unresolved_in_window": r.total_unresolved_in_window,
|
||||||
|
"truncated": r.truncated,
|
||||||
|
"discussions": r.discussions.iter().map(|d| serde_json::json!({
|
||||||
|
"discussion_id": d.discussion_id,
|
||||||
|
"entity_type": d.entity_type,
|
||||||
|
"entity_iid": d.entity_iid,
|
||||||
|
"entity_title": d.entity_title,
|
||||||
|
"project_path": d.project_path,
|
||||||
|
"last_note_at": ms_to_iso(d.last_note_at),
|
||||||
|
"note_count": d.note_count,
|
||||||
|
"participants": d.participants,
|
||||||
|
"participants_total": d.participants_total,
|
||||||
|
"participants_truncated": d.participants_truncated,
|
||||||
|
})).collect::<Vec<_>>(),
|
||||||
|
})
|
||||||
|
}
|
||||||
839
src/cli/commands/who/expert.rs
Normal file
839
src/cli/commands/who/expert.rs
Normal file
@@ -0,0 +1,839 @@
|
|||||||
|
use std::collections::{HashMap, HashSet};
|
||||||
|
|
||||||
|
use rusqlite::Connection;
|
||||||
|
|
||||||
|
use crate::cli::render::{self, Icons, Theme};
|
||||||
|
use crate::core::config::ScoringConfig;
|
||||||
|
use crate::core::error::Result;
|
||||||
|
use crate::core::path_resolver::{PathQuery, build_path_query};
|
||||||
|
use crate::core::time::ms_to_iso;
|
||||||
|
|
||||||
|
use super::types::*;
|
||||||
|
|
||||||
|
pub(super) fn half_life_decay(elapsed_ms: i64, half_life_days: u32) -> f64 {
|
||||||
|
let days = (elapsed_ms as f64 / 86_400_000.0).max(0.0);
|
||||||
|
let hl = f64::from(half_life_days);
|
||||||
|
if hl <= 0.0 {
|
||||||
|
return 0.0;
|
||||||
|
}
|
||||||
|
2.0_f64.powf(-days / hl)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Query: Expert Mode ─────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
|
pub(super) fn query_expert(
|
||||||
|
conn: &Connection,
|
||||||
|
path: &str,
|
||||||
|
project_id: Option<i64>,
|
||||||
|
since_ms: i64,
|
||||||
|
as_of_ms: i64,
|
||||||
|
limit: usize,
|
||||||
|
scoring: &ScoringConfig,
|
||||||
|
detail: bool,
|
||||||
|
explain_score: bool,
|
||||||
|
include_bots: bool,
|
||||||
|
) -> Result<ExpertResult> {
|
||||||
|
let pq = build_path_query(conn, path, project_id)?;
|
||||||
|
|
||||||
|
let sql = build_expert_sql_v2(pq.is_prefix);
|
||||||
|
let mut stmt = conn.prepare_cached(&sql)?;
|
||||||
|
|
||||||
|
// Params: ?1=path, ?2=since_ms, ?3=project_id, ?4=as_of_ms,
|
||||||
|
// ?5=closed_mr_multiplier, ?6=reviewer_min_note_chars
|
||||||
|
let rows = stmt.query_map(
|
||||||
|
rusqlite::params![
|
||||||
|
pq.value,
|
||||||
|
since_ms,
|
||||||
|
project_id,
|
||||||
|
as_of_ms,
|
||||||
|
scoring.closed_mr_multiplier,
|
||||||
|
scoring.reviewer_min_note_chars,
|
||||||
|
],
|
||||||
|
|row| {
|
||||||
|
Ok(SignalRow {
|
||||||
|
username: row.get(0)?,
|
||||||
|
signal: row.get(1)?,
|
||||||
|
mr_id: row.get(2)?,
|
||||||
|
qty: row.get(3)?,
|
||||||
|
ts: row.get(4)?,
|
||||||
|
state_mult: row.get(5)?,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
)?;
|
||||||
|
|
||||||
|
// Per-user accumulator keyed by username.
|
||||||
|
let mut accum: HashMap<String, UserAccum> = HashMap::new();
|
||||||
|
|
||||||
|
for row_result in rows {
|
||||||
|
let r = row_result?;
|
||||||
|
let entry = accum
|
||||||
|
.entry(r.username.clone())
|
||||||
|
.or_insert_with(|| UserAccum {
|
||||||
|
contributions: Vec::new(),
|
||||||
|
last_seen_ms: 0,
|
||||||
|
mr_ids_author: HashSet::new(),
|
||||||
|
mr_ids_reviewer: HashSet::new(),
|
||||||
|
note_count: 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
if r.ts > entry.last_seen_ms {
|
||||||
|
entry.last_seen_ms = r.ts;
|
||||||
|
}
|
||||||
|
|
||||||
|
match r.signal.as_str() {
|
||||||
|
"diffnote_author" | "file_author" => {
|
||||||
|
entry.mr_ids_author.insert(r.mr_id);
|
||||||
|
}
|
||||||
|
"file_reviewer_participated" | "file_reviewer_assigned" => {
|
||||||
|
entry.mr_ids_reviewer.insert(r.mr_id);
|
||||||
|
}
|
||||||
|
"note_group" => {
|
||||||
|
entry.note_count += r.qty as u32;
|
||||||
|
// DiffNote reviewers are also reviewer activity.
|
||||||
|
entry.mr_ids_reviewer.insert(r.mr_id);
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
entry.contributions.push(Contribution {
|
||||||
|
signal: r.signal,
|
||||||
|
mr_id: r.mr_id,
|
||||||
|
qty: r.qty,
|
||||||
|
ts: r.ts,
|
||||||
|
state_mult: r.state_mult,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Bot filtering: exclude configured bot usernames (case-insensitive).
|
||||||
|
if !include_bots && !scoring.excluded_usernames.is_empty() {
|
||||||
|
let excluded: HashSet<String> = scoring
|
||||||
|
.excluded_usernames
|
||||||
|
.iter()
|
||||||
|
.map(|u| u.to_lowercase())
|
||||||
|
.collect();
|
||||||
|
accum.retain(|username, _| !excluded.contains(&username.to_lowercase()));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compute decayed scores with deterministic ordering.
|
||||||
|
let mut scored: Vec<ScoredUser> = accum
|
||||||
|
.into_iter()
|
||||||
|
.map(|(username, mut ua)| {
|
||||||
|
// Sort contributions by mr_id ASC for deterministic f64 summation.
|
||||||
|
ua.contributions.sort_by_key(|c| c.mr_id);
|
||||||
|
|
||||||
|
let mut comp_author = 0.0_f64;
|
||||||
|
let mut comp_reviewer_participated = 0.0_f64;
|
||||||
|
let mut comp_reviewer_assigned = 0.0_f64;
|
||||||
|
let mut comp_notes = 0.0_f64;
|
||||||
|
|
||||||
|
for c in &ua.contributions {
|
||||||
|
let elapsed = as_of_ms - c.ts;
|
||||||
|
match c.signal.as_str() {
|
||||||
|
"diffnote_author" | "file_author" => {
|
||||||
|
let decay = half_life_decay(elapsed, scoring.author_half_life_days);
|
||||||
|
comp_author += scoring.author_weight as f64 * decay * c.state_mult;
|
||||||
|
}
|
||||||
|
"file_reviewer_participated" => {
|
||||||
|
let decay = half_life_decay(elapsed, scoring.reviewer_half_life_days);
|
||||||
|
comp_reviewer_participated +=
|
||||||
|
scoring.reviewer_weight as f64 * decay * c.state_mult;
|
||||||
|
}
|
||||||
|
"file_reviewer_assigned" => {
|
||||||
|
let decay =
|
||||||
|
half_life_decay(elapsed, scoring.reviewer_assignment_half_life_days);
|
||||||
|
comp_reviewer_assigned +=
|
||||||
|
scoring.reviewer_assignment_weight as f64 * decay * c.state_mult;
|
||||||
|
}
|
||||||
|
"note_group" => {
|
||||||
|
let decay = half_life_decay(elapsed, scoring.note_half_life_days);
|
||||||
|
// Diminishing returns: log2(1 + count) per MR.
|
||||||
|
let note_value = (1.0 + c.qty as f64).log2();
|
||||||
|
comp_notes += scoring.note_bonus as f64 * note_value * decay * c.state_mult;
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let raw_score =
|
||||||
|
comp_author + comp_reviewer_participated + comp_reviewer_assigned + comp_notes;
|
||||||
|
ScoredUser {
|
||||||
|
username,
|
||||||
|
raw_score,
|
||||||
|
components: ScoreComponents {
|
||||||
|
author: comp_author,
|
||||||
|
reviewer_participated: comp_reviewer_participated,
|
||||||
|
reviewer_assigned: comp_reviewer_assigned,
|
||||||
|
notes: comp_notes,
|
||||||
|
},
|
||||||
|
accum: ua,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Sort: raw_score DESC, last_seen DESC, username ASC (deterministic tiebreaker).
|
||||||
|
scored.sort_by(|a, b| {
|
||||||
|
b.raw_score
|
||||||
|
.partial_cmp(&a.raw_score)
|
||||||
|
.unwrap_or(std::cmp::Ordering::Equal)
|
||||||
|
.then_with(|| b.accum.last_seen_ms.cmp(&a.accum.last_seen_ms))
|
||||||
|
.then_with(|| a.username.cmp(&b.username))
|
||||||
|
});
|
||||||
|
|
||||||
|
let truncated = scored.len() > limit;
|
||||||
|
scored.truncate(limit);
|
||||||
|
|
||||||
|
// Build Expert structs with MR refs.
|
||||||
|
let mut experts: Vec<Expert> = scored
|
||||||
|
.into_iter()
|
||||||
|
.map(|su| {
|
||||||
|
let mut mr_refs = build_mr_refs_for_user(conn, &su.accum);
|
||||||
|
mr_refs.sort();
|
||||||
|
let mr_refs_total = mr_refs.len() as u32;
|
||||||
|
let mr_refs_truncated = mr_refs.len() > MAX_MR_REFS_PER_USER;
|
||||||
|
if mr_refs_truncated {
|
||||||
|
mr_refs.truncate(MAX_MR_REFS_PER_USER);
|
||||||
|
}
|
||||||
|
Expert {
|
||||||
|
username: su.username,
|
||||||
|
score: su.raw_score.round() as i64,
|
||||||
|
score_raw: if explain_score {
|
||||||
|
Some(su.raw_score)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
},
|
||||||
|
components: if explain_score {
|
||||||
|
Some(su.components)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
},
|
||||||
|
review_mr_count: su.accum.mr_ids_reviewer.len() as u32,
|
||||||
|
review_note_count: su.accum.note_count,
|
||||||
|
author_mr_count: su.accum.mr_ids_author.len() as u32,
|
||||||
|
last_seen_ms: su.accum.last_seen_ms,
|
||||||
|
mr_refs,
|
||||||
|
mr_refs_total,
|
||||||
|
mr_refs_truncated,
|
||||||
|
details: None,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Populate per-MR detail when --detail is requested
|
||||||
|
if detail && !experts.is_empty() {
|
||||||
|
let details_map = query_expert_details(conn, &pq, &experts, since_ms, project_id)?;
|
||||||
|
for expert in &mut experts {
|
||||||
|
expert.details = details_map.get(&expert.username).cloned();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(ExpertResult {
|
||||||
|
path_query: if pq.is_prefix {
|
||||||
|
// Use raw input (unescaped) for display — pq.value has LIKE escaping.
|
||||||
|
path.trim_end_matches('/').to_string()
|
||||||
|
} else {
|
||||||
|
// For exact matches (including suffix-resolved), show the resolved path.
|
||||||
|
pq.value.clone()
|
||||||
|
},
|
||||||
|
path_match: if pq.is_prefix { "prefix" } else { "exact" }.to_string(),
|
||||||
|
experts,
|
||||||
|
truncated,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
struct SignalRow {
|
||||||
|
username: String,
|
||||||
|
signal: String,
|
||||||
|
mr_id: i64,
|
||||||
|
qty: i64,
|
||||||
|
ts: i64,
|
||||||
|
state_mult: f64,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Per-user signal accumulator used during Rust-side scoring.
|
||||||
|
struct UserAccum {
|
||||||
|
contributions: Vec<Contribution>,
|
||||||
|
last_seen_ms: i64,
|
||||||
|
mr_ids_author: HashSet<i64>,
|
||||||
|
mr_ids_reviewer: HashSet<i64>,
|
||||||
|
note_count: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A single contribution to a user's score (one signal row).
|
||||||
|
struct Contribution {
|
||||||
|
signal: String,
|
||||||
|
mr_id: i64,
|
||||||
|
qty: i64,
|
||||||
|
ts: i64,
|
||||||
|
state_mult: f64,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Intermediate scored user before building Expert structs.
|
||||||
|
struct ScoredUser {
|
||||||
|
username: String,
|
||||||
|
raw_score: f64,
|
||||||
|
components: ScoreComponents,
|
||||||
|
accum: UserAccum,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Build MR refs (e.g. "group/project!123") for a user from their accumulated MR IDs.
|
||||||
|
fn build_mr_refs_for_user(conn: &Connection, ua: &UserAccum) -> Vec<String> {
|
||||||
|
let all_mr_ids: HashSet<i64> = ua
|
||||||
|
.mr_ids_author
|
||||||
|
.iter()
|
||||||
|
.chain(ua.mr_ids_reviewer.iter())
|
||||||
|
.copied()
|
||||||
|
.chain(ua.contributions.iter().map(|c| c.mr_id))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
if all_mr_ids.is_empty() {
|
||||||
|
return Vec::new();
|
||||||
|
}
|
||||||
|
|
||||||
|
let placeholders: Vec<String> = (1..=all_mr_ids.len()).map(|i| format!("?{i}")).collect();
|
||||||
|
let sql = format!(
|
||||||
|
"SELECT p.path_with_namespace || '!' || CAST(m.iid AS TEXT)
|
||||||
|
FROM merge_requests m
|
||||||
|
JOIN projects p ON m.project_id = p.id
|
||||||
|
WHERE m.id IN ({})",
|
||||||
|
placeholders.join(",")
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut stmt = match conn.prepare(&sql) {
|
||||||
|
Ok(s) => s,
|
||||||
|
Err(_) => return Vec::new(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut mr_ids_vec: Vec<i64> = all_mr_ids.into_iter().collect();
|
||||||
|
mr_ids_vec.sort_unstable();
|
||||||
|
let params: Vec<&dyn rusqlite::types::ToSql> = mr_ids_vec
|
||||||
|
.iter()
|
||||||
|
.map(|id| id as &dyn rusqlite::types::ToSql)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
stmt.query_map(&*params, |row| row.get::<_, String>(0))
|
||||||
|
.map(|rows| rows.filter_map(|r| r.ok()).collect())
|
||||||
|
.unwrap_or_default()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Build the CTE-based expert SQL for time-decay scoring (v2).
|
||||||
|
///
|
||||||
|
/// Returns raw signal rows `(username, signal, mr_id, qty, ts, state_mult)` that
|
||||||
|
/// Rust aggregates with per-signal decay and `log2(1+count)` for note groups.
|
||||||
|
///
|
||||||
|
/// Parameters: `?1` = path, `?2` = since_ms, `?3` = project_id (nullable),
|
||||||
|
/// `?4` = as_of_ms, `?5` = closed_mr_multiplier, `?6` = reviewer_min_note_chars
|
||||||
|
pub(super) fn build_expert_sql_v2(is_prefix: bool) -> String {
|
||||||
|
let path_op = if is_prefix {
|
||||||
|
"LIKE ?1 ESCAPE '\\'"
|
||||||
|
} else {
|
||||||
|
"= ?1"
|
||||||
|
};
|
||||||
|
// INDEXED BY hints for each branch:
|
||||||
|
// - new_path branch: idx_notes_diffnote_path_created (existing)
|
||||||
|
// - old_path branch: idx_notes_old_path_author (migration 026)
|
||||||
|
format!(
|
||||||
|
"
|
||||||
|
WITH matched_notes_raw AS (
|
||||||
|
-- Branch 1: match on position_new_path
|
||||||
|
SELECT n.id, n.discussion_id, n.author_username, n.created_at, n.project_id
|
||||||
|
FROM notes n INDEXED BY idx_notes_diffnote_path_created
|
||||||
|
WHERE n.note_type = 'DiffNote'
|
||||||
|
AND n.is_system = 0
|
||||||
|
AND n.author_username IS NOT NULL
|
||||||
|
AND n.created_at >= ?2
|
||||||
|
AND n.created_at < ?4
|
||||||
|
AND (?3 IS NULL OR n.project_id = ?3)
|
||||||
|
AND n.position_new_path {path_op}
|
||||||
|
UNION ALL
|
||||||
|
-- Branch 2: match on position_old_path
|
||||||
|
SELECT n.id, n.discussion_id, n.author_username, n.created_at, n.project_id
|
||||||
|
FROM notes n INDEXED BY idx_notes_old_path_author
|
||||||
|
WHERE n.note_type = 'DiffNote'
|
||||||
|
AND n.is_system = 0
|
||||||
|
AND n.author_username IS NOT NULL
|
||||||
|
AND n.created_at >= ?2
|
||||||
|
AND n.created_at < ?4
|
||||||
|
AND (?3 IS NULL OR n.project_id = ?3)
|
||||||
|
AND n.position_old_path IS NOT NULL
|
||||||
|
AND n.position_old_path {path_op}
|
||||||
|
),
|
||||||
|
matched_notes AS (
|
||||||
|
-- Dedup: prevent double-counting when old_path = new_path (no rename)
|
||||||
|
SELECT DISTINCT id, discussion_id, author_username, created_at, project_id
|
||||||
|
FROM matched_notes_raw
|
||||||
|
),
|
||||||
|
matched_file_changes_raw AS (
|
||||||
|
-- Branch 1: match on new_path
|
||||||
|
SELECT fc.merge_request_id, fc.project_id
|
||||||
|
FROM mr_file_changes fc INDEXED BY idx_mfc_new_path_project_mr
|
||||||
|
WHERE (?3 IS NULL OR fc.project_id = ?3)
|
||||||
|
AND fc.new_path {path_op}
|
||||||
|
UNION ALL
|
||||||
|
-- Branch 2: match on old_path
|
||||||
|
SELECT fc.merge_request_id, fc.project_id
|
||||||
|
FROM mr_file_changes fc INDEXED BY idx_mfc_old_path_project_mr
|
||||||
|
WHERE (?3 IS NULL OR fc.project_id = ?3)
|
||||||
|
AND fc.old_path IS NOT NULL
|
||||||
|
AND fc.old_path {path_op}
|
||||||
|
),
|
||||||
|
matched_file_changes AS (
|
||||||
|
-- Dedup: prevent double-counting when old_path = new_path (no rename)
|
||||||
|
SELECT DISTINCT merge_request_id, project_id
|
||||||
|
FROM matched_file_changes_raw
|
||||||
|
),
|
||||||
|
mr_activity AS (
|
||||||
|
-- Centralized state-aware timestamps and state multiplier.
|
||||||
|
-- Scoped to MRs matched by file changes to avoid materializing the full MR table.
|
||||||
|
SELECT DISTINCT
|
||||||
|
m.id AS mr_id,
|
||||||
|
m.author_username,
|
||||||
|
m.state,
|
||||||
|
CASE
|
||||||
|
WHEN m.state = 'merged' THEN COALESCE(m.merged_at, m.created_at)
|
||||||
|
WHEN m.state = 'closed' THEN COALESCE(m.closed_at, m.created_at)
|
||||||
|
ELSE COALESCE(m.updated_at, m.created_at)
|
||||||
|
END AS activity_ts,
|
||||||
|
CASE WHEN m.state = 'closed' THEN ?5 ELSE 1.0 END AS state_mult
|
||||||
|
FROM merge_requests m
|
||||||
|
JOIN matched_file_changes mfc ON mfc.merge_request_id = m.id
|
||||||
|
WHERE m.state IN ('opened','merged','closed')
|
||||||
|
),
|
||||||
|
reviewer_participation AS (
|
||||||
|
-- Precompute which (mr_id, username) pairs have substantive DiffNote participation.
|
||||||
|
SELECT DISTINCT d.merge_request_id AS mr_id, mn.author_username AS username
|
||||||
|
FROM matched_notes mn
|
||||||
|
JOIN discussions d ON mn.discussion_id = d.id
|
||||||
|
JOIN notes n_body ON mn.id = n_body.id
|
||||||
|
WHERE d.merge_request_id IS NOT NULL
|
||||||
|
AND LENGTH(TRIM(COALESCE(n_body.body, ''))) >= ?6
|
||||||
|
),
|
||||||
|
raw AS (
|
||||||
|
-- Signal 1: DiffNote reviewer (individual notes for note_cnt)
|
||||||
|
SELECT mn.author_username AS username, 'diffnote_reviewer' AS signal,
|
||||||
|
m.id AS mr_id, mn.id AS note_id, mn.created_at AS seen_at,
|
||||||
|
CASE WHEN m.state = 'closed' THEN ?5 ELSE 1.0 END AS state_mult
|
||||||
|
FROM matched_notes mn
|
||||||
|
JOIN discussions d ON mn.discussion_id = d.id
|
||||||
|
JOIN merge_requests m ON d.merge_request_id = m.id
|
||||||
|
WHERE (m.author_username IS NULL OR mn.author_username != m.author_username)
|
||||||
|
AND m.state IN ('opened','merged','closed')
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
-- Signal 2: DiffNote MR author
|
||||||
|
SELECT m.author_username AS username, 'diffnote_author' AS signal,
|
||||||
|
m.id AS mr_id, NULL AS note_id, MAX(mn.created_at) AS seen_at,
|
||||||
|
CASE WHEN m.state = 'closed' THEN ?5 ELSE 1.0 END AS state_mult
|
||||||
|
FROM merge_requests m
|
||||||
|
JOIN discussions d ON d.merge_request_id = m.id
|
||||||
|
JOIN matched_notes mn ON mn.discussion_id = d.id
|
||||||
|
WHERE m.author_username IS NOT NULL
|
||||||
|
AND m.state IN ('opened','merged','closed')
|
||||||
|
GROUP BY m.author_username, m.id
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
-- Signal 3: MR author via file changes (uses mr_activity CTE)
|
||||||
|
SELECT a.author_username AS username, 'file_author' AS signal,
|
||||||
|
a.mr_id, NULL AS note_id,
|
||||||
|
a.activity_ts AS seen_at, a.state_mult
|
||||||
|
FROM mr_activity a
|
||||||
|
WHERE a.author_username IS NOT NULL
|
||||||
|
AND a.activity_ts >= ?2
|
||||||
|
AND a.activity_ts < ?4
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
-- Signal 4a: Reviewer participated (in mr_reviewers AND left DiffNotes on path)
|
||||||
|
SELECT r.username AS username, 'file_reviewer_participated' AS signal,
|
||||||
|
a.mr_id, NULL AS note_id,
|
||||||
|
a.activity_ts AS seen_at, a.state_mult
|
||||||
|
FROM mr_activity a
|
||||||
|
JOIN mr_reviewers r ON r.merge_request_id = a.mr_id
|
||||||
|
JOIN reviewer_participation rp ON rp.mr_id = a.mr_id AND rp.username = r.username
|
||||||
|
WHERE r.username IS NOT NULL
|
||||||
|
AND (a.author_username IS NULL OR r.username != a.author_username)
|
||||||
|
AND a.activity_ts >= ?2
|
||||||
|
AND a.activity_ts < ?4
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
-- Signal 4b: Reviewer assigned-only (in mr_reviewers, NO DiffNotes on path)
|
||||||
|
SELECT r.username AS username, 'file_reviewer_assigned' AS signal,
|
||||||
|
a.mr_id, NULL AS note_id,
|
||||||
|
a.activity_ts AS seen_at, a.state_mult
|
||||||
|
FROM mr_activity a
|
||||||
|
JOIN mr_reviewers r ON r.merge_request_id = a.mr_id
|
||||||
|
LEFT JOIN reviewer_participation rp ON rp.mr_id = a.mr_id AND rp.username = r.username
|
||||||
|
WHERE rp.username IS NULL
|
||||||
|
AND r.username IS NOT NULL
|
||||||
|
AND (a.author_username IS NULL OR r.username != a.author_username)
|
||||||
|
AND a.activity_ts >= ?2
|
||||||
|
AND a.activity_ts < ?4
|
||||||
|
),
|
||||||
|
aggregated AS (
|
||||||
|
-- MR-level signals: 1 row per (username, signal_class, mr_id) with MAX(ts)
|
||||||
|
SELECT username, signal, mr_id, 1 AS qty, MAX(seen_at) AS ts, MAX(state_mult) AS state_mult
|
||||||
|
FROM raw WHERE signal != 'diffnote_reviewer'
|
||||||
|
GROUP BY username, signal, mr_id
|
||||||
|
UNION ALL
|
||||||
|
-- Note signals: 1 row per (username, mr_id) with note_count and max_ts
|
||||||
|
SELECT username, 'note_group' AS signal, mr_id, COUNT(*) AS qty, MAX(seen_at) AS ts,
|
||||||
|
MAX(state_mult) AS state_mult
|
||||||
|
FROM raw WHERE signal = 'diffnote_reviewer' AND note_id IS NOT NULL
|
||||||
|
GROUP BY username, mr_id
|
||||||
|
)
|
||||||
|
SELECT username, signal, mr_id, qty, ts, state_mult FROM aggregated WHERE username IS NOT NULL
|
||||||
|
"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Query per-MR detail for a set of experts. Returns a map of username -> Vec<ExpertMrDetail>.
|
||||||
|
pub(super) fn query_expert_details(
|
||||||
|
conn: &Connection,
|
||||||
|
pq: &PathQuery,
|
||||||
|
experts: &[Expert],
|
||||||
|
since_ms: i64,
|
||||||
|
project_id: Option<i64>,
|
||||||
|
) -> Result<HashMap<String, Vec<ExpertMrDetail>>> {
|
||||||
|
let path_op = if pq.is_prefix {
|
||||||
|
"LIKE ?1 ESCAPE '\\'"
|
||||||
|
} else {
|
||||||
|
"= ?1"
|
||||||
|
};
|
||||||
|
|
||||||
|
// Build IN clause for usernames
|
||||||
|
let placeholders: Vec<String> = experts
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(i, _)| format!("?{}", i + 4))
|
||||||
|
.collect();
|
||||||
|
let in_clause = placeholders.join(",");
|
||||||
|
|
||||||
|
let sql = format!(
|
||||||
|
"
|
||||||
|
WITH signals AS (
|
||||||
|
-- 1. DiffNote reviewer (matches both new_path and old_path for renamed files)
|
||||||
|
SELECT
|
||||||
|
n.author_username AS username,
|
||||||
|
'reviewer' AS role,
|
||||||
|
m.id AS mr_id,
|
||||||
|
(p.path_with_namespace || '!' || CAST(m.iid AS TEXT)) AS mr_ref,
|
||||||
|
m.title AS title,
|
||||||
|
COUNT(*) AS note_count,
|
||||||
|
MAX(n.created_at) AS last_activity
|
||||||
|
FROM notes n
|
||||||
|
JOIN discussions d ON n.discussion_id = d.id
|
||||||
|
JOIN merge_requests m ON d.merge_request_id = m.id
|
||||||
|
JOIN projects p ON m.project_id = p.id
|
||||||
|
WHERE n.note_type = 'DiffNote'
|
||||||
|
AND n.is_system = 0
|
||||||
|
AND n.author_username IS NOT NULL
|
||||||
|
AND (m.author_username IS NULL OR n.author_username != m.author_username)
|
||||||
|
AND m.state IN ('opened','merged','closed')
|
||||||
|
AND (n.position_new_path {path_op}
|
||||||
|
OR (n.position_old_path IS NOT NULL AND n.position_old_path {path_op}))
|
||||||
|
AND n.created_at >= ?2
|
||||||
|
AND (?3 IS NULL OR n.project_id = ?3)
|
||||||
|
AND n.author_username IN ({in_clause})
|
||||||
|
GROUP BY n.author_username, m.id
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
-- 2. DiffNote MR author (matches both new_path and old_path for renamed files)
|
||||||
|
SELECT
|
||||||
|
m.author_username AS username,
|
||||||
|
'author' AS role,
|
||||||
|
m.id AS mr_id,
|
||||||
|
(p.path_with_namespace || '!' || CAST(m.iid AS TEXT)) AS mr_ref,
|
||||||
|
m.title AS title,
|
||||||
|
0 AS note_count,
|
||||||
|
MAX(n.created_at) AS last_activity
|
||||||
|
FROM merge_requests m
|
||||||
|
JOIN discussions d ON d.merge_request_id = m.id
|
||||||
|
JOIN notes n ON n.discussion_id = d.id
|
||||||
|
JOIN projects p ON m.project_id = p.id
|
||||||
|
WHERE n.note_type = 'DiffNote'
|
||||||
|
AND n.is_system = 0
|
||||||
|
AND m.author_username IS NOT NULL
|
||||||
|
AND m.state IN ('opened','merged','closed')
|
||||||
|
AND (n.position_new_path {path_op}
|
||||||
|
OR (n.position_old_path IS NOT NULL AND n.position_old_path {path_op}))
|
||||||
|
AND n.created_at >= ?2
|
||||||
|
AND (?3 IS NULL OR n.project_id = ?3)
|
||||||
|
AND m.author_username IN ({in_clause})
|
||||||
|
GROUP BY m.author_username, m.id
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
-- 3. MR author via file changes (matches both new_path and old_path)
|
||||||
|
SELECT
|
||||||
|
m.author_username AS username,
|
||||||
|
'author' AS role,
|
||||||
|
m.id AS mr_id,
|
||||||
|
(p.path_with_namespace || '!' || CAST(m.iid AS TEXT)) AS mr_ref,
|
||||||
|
m.title AS title,
|
||||||
|
0 AS note_count,
|
||||||
|
m.updated_at AS last_activity
|
||||||
|
FROM mr_file_changes fc
|
||||||
|
JOIN merge_requests m ON fc.merge_request_id = m.id
|
||||||
|
JOIN projects p ON m.project_id = p.id
|
||||||
|
WHERE m.author_username IS NOT NULL
|
||||||
|
AND m.state IN ('opened','merged','closed')
|
||||||
|
AND (fc.new_path {path_op}
|
||||||
|
OR (fc.old_path IS NOT NULL AND fc.old_path {path_op}))
|
||||||
|
AND m.updated_at >= ?2
|
||||||
|
AND (?3 IS NULL OR fc.project_id = ?3)
|
||||||
|
AND m.author_username IN ({in_clause})
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
-- 4. MR reviewer via file changes + mr_reviewers (matches both new_path and old_path)
|
||||||
|
SELECT
|
||||||
|
r.username AS username,
|
||||||
|
'reviewer' AS role,
|
||||||
|
m.id AS mr_id,
|
||||||
|
(p.path_with_namespace || '!' || CAST(m.iid AS TEXT)) AS mr_ref,
|
||||||
|
m.title AS title,
|
||||||
|
0 AS note_count,
|
||||||
|
m.updated_at AS last_activity
|
||||||
|
FROM mr_file_changes fc
|
||||||
|
JOIN merge_requests m ON fc.merge_request_id = m.id
|
||||||
|
JOIN projects p ON m.project_id = p.id
|
||||||
|
JOIN mr_reviewers r ON r.merge_request_id = m.id
|
||||||
|
WHERE r.username IS NOT NULL
|
||||||
|
AND (m.author_username IS NULL OR r.username != m.author_username)
|
||||||
|
AND m.state IN ('opened','merged','closed')
|
||||||
|
AND (fc.new_path {path_op}
|
||||||
|
OR (fc.old_path IS NOT NULL AND fc.old_path {path_op}))
|
||||||
|
AND m.updated_at >= ?2
|
||||||
|
AND (?3 IS NULL OR fc.project_id = ?3)
|
||||||
|
AND r.username IN ({in_clause})
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
username,
|
||||||
|
mr_ref,
|
||||||
|
title,
|
||||||
|
GROUP_CONCAT(DISTINCT role) AS roles,
|
||||||
|
SUM(note_count) AS total_notes,
|
||||||
|
MAX(last_activity) AS last_activity
|
||||||
|
FROM signals
|
||||||
|
GROUP BY username, mr_ref
|
||||||
|
ORDER BY username ASC, last_activity DESC
|
||||||
|
"
|
||||||
|
);
|
||||||
|
|
||||||
|
// prepare() not prepare_cached(): the IN clause varies by expert count,
|
||||||
|
// so the SQL shape changes per invocation and caching wastes memory.
|
||||||
|
let mut stmt = conn.prepare(&sql)?;
|
||||||
|
|
||||||
|
// Build params: ?1=path, ?2=since_ms, ?3=project_id, ?4..=usernames
|
||||||
|
let mut params: Vec<Box<dyn rusqlite::types::ToSql>> = Vec::new();
|
||||||
|
params.push(Box::new(pq.value.clone()));
|
||||||
|
params.push(Box::new(since_ms));
|
||||||
|
params.push(Box::new(project_id));
|
||||||
|
for expert in experts {
|
||||||
|
params.push(Box::new(expert.username.clone()));
|
||||||
|
}
|
||||||
|
let param_refs: Vec<&dyn rusqlite::types::ToSql> = params.iter().map(|p| p.as_ref()).collect();
|
||||||
|
|
||||||
|
let rows: Vec<(String, String, String, String, u32, i64)> = stmt
|
||||||
|
.query_map(param_refs.as_slice(), |row| {
|
||||||
|
Ok((
|
||||||
|
row.get(0)?,
|
||||||
|
row.get(1)?,
|
||||||
|
row.get(2)?,
|
||||||
|
row.get::<_, String>(3)?,
|
||||||
|
row.get(4)?,
|
||||||
|
row.get(5)?,
|
||||||
|
))
|
||||||
|
})?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
let mut map: HashMap<String, Vec<ExpertMrDetail>> = HashMap::new();
|
||||||
|
for (username, mr_ref, title, roles_csv, note_count, last_activity) in rows {
|
||||||
|
let has_author = roles_csv.contains("author");
|
||||||
|
let has_reviewer = roles_csv.contains("reviewer");
|
||||||
|
let role = match (has_author, has_reviewer) {
|
||||||
|
(true, true) => "A+R",
|
||||||
|
(true, false) => "A",
|
||||||
|
(false, true) => "R",
|
||||||
|
_ => "?",
|
||||||
|
}
|
||||||
|
.to_string();
|
||||||
|
map.entry(username).or_default().push(ExpertMrDetail {
|
||||||
|
mr_ref,
|
||||||
|
title,
|
||||||
|
role,
|
||||||
|
note_count,
|
||||||
|
last_activity_ms: last_activity,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(map)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn print_expert_human(r: &ExpertResult, project_path: Option<&str>) {
|
||||||
|
println!();
|
||||||
|
println!(
|
||||||
|
"{}",
|
||||||
|
Theme::bold().render(&format!("Experts for {}", r.path_query))
|
||||||
|
);
|
||||||
|
println!("{}", "\u{2500}".repeat(60));
|
||||||
|
println!(
|
||||||
|
" {}",
|
||||||
|
Theme::dim().render(&format!(
|
||||||
|
"(matching {} {})",
|
||||||
|
r.path_match,
|
||||||
|
if r.path_match == "exact" {
|
||||||
|
"file"
|
||||||
|
} else {
|
||||||
|
"directory prefix"
|
||||||
|
}
|
||||||
|
))
|
||||||
|
);
|
||||||
|
super::print_scope_hint(project_path);
|
||||||
|
println!();
|
||||||
|
|
||||||
|
if r.experts.is_empty() {
|
||||||
|
println!(
|
||||||
|
" {}",
|
||||||
|
Theme::dim().render("No experts found for this path.")
|
||||||
|
);
|
||||||
|
println!();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
println!(
|
||||||
|
" {:<16} {:>6} {:>12} {:>6} {:>12} {} {}",
|
||||||
|
Theme::bold().render("Username"),
|
||||||
|
Theme::bold().render("Score"),
|
||||||
|
Theme::bold().render("Reviewed(MRs)"),
|
||||||
|
Theme::bold().render("Notes"),
|
||||||
|
Theme::bold().render("Authored(MRs)"),
|
||||||
|
Theme::bold().render("Last Seen"),
|
||||||
|
Theme::bold().render("MR Refs"),
|
||||||
|
);
|
||||||
|
|
||||||
|
for expert in &r.experts {
|
||||||
|
let reviews = if expert.review_mr_count > 0 {
|
||||||
|
expert.review_mr_count.to_string()
|
||||||
|
} else {
|
||||||
|
"-".to_string()
|
||||||
|
};
|
||||||
|
let notes = if expert.review_note_count > 0 {
|
||||||
|
expert.review_note_count.to_string()
|
||||||
|
} else {
|
||||||
|
"-".to_string()
|
||||||
|
};
|
||||||
|
let authored = if expert.author_mr_count > 0 {
|
||||||
|
expert.author_mr_count.to_string()
|
||||||
|
} else {
|
||||||
|
"-".to_string()
|
||||||
|
};
|
||||||
|
let mr_str = expert
|
||||||
|
.mr_refs
|
||||||
|
.iter()
|
||||||
|
.take(5)
|
||||||
|
.cloned()
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(", ");
|
||||||
|
let overflow = if expert.mr_refs_total > 5 {
|
||||||
|
format!(" +{}", expert.mr_refs_total - 5)
|
||||||
|
} else {
|
||||||
|
String::new()
|
||||||
|
};
|
||||||
|
println!(
|
||||||
|
" {:<16} {:>6} {:>12} {:>6} {:>12} {:<12}{}{}",
|
||||||
|
Theme::info().render(&format!("{} {}", Icons::user(), expert.username)),
|
||||||
|
expert.score,
|
||||||
|
reviews,
|
||||||
|
notes,
|
||||||
|
authored,
|
||||||
|
render::format_relative_time(expert.last_seen_ms),
|
||||||
|
if mr_str.is_empty() {
|
||||||
|
String::new()
|
||||||
|
} else {
|
||||||
|
format!(" {mr_str}")
|
||||||
|
},
|
||||||
|
overflow,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Print detail sub-rows when populated
|
||||||
|
if let Some(details) = &expert.details {
|
||||||
|
const MAX_DETAIL_DISPLAY: usize = 10;
|
||||||
|
for d in details.iter().take(MAX_DETAIL_DISPLAY) {
|
||||||
|
let notes_str = if d.note_count > 0 {
|
||||||
|
format!("{} notes", d.note_count)
|
||||||
|
} else {
|
||||||
|
String::new()
|
||||||
|
};
|
||||||
|
println!(
|
||||||
|
" {:<3} {:<30} {:>30} {:>10} {}",
|
||||||
|
Theme::dim().render(&d.role),
|
||||||
|
d.mr_ref,
|
||||||
|
render::truncate(&format!("\"{}\"", d.title), 30),
|
||||||
|
notes_str,
|
||||||
|
Theme::dim().render(&render::format_relative_time(d.last_activity_ms)),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if details.len() > MAX_DETAIL_DISPLAY {
|
||||||
|
println!(
|
||||||
|
" {}",
|
||||||
|
Theme::dim().render(&format!("+{} more", details.len() - MAX_DETAIL_DISPLAY))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if r.truncated {
|
||||||
|
println!(
|
||||||
|
" {}",
|
||||||
|
Theme::dim().render("(showing first -n; rerun with a higher --limit)")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
println!();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn expert_to_json(r: &ExpertResult) -> serde_json::Value {
|
||||||
|
serde_json::json!({
|
||||||
|
"path_query": r.path_query,
|
||||||
|
"path_match": r.path_match,
|
||||||
|
"scoring_model_version": 2,
|
||||||
|
"truncated": r.truncated,
|
||||||
|
"experts": r.experts.iter().map(|e| {
|
||||||
|
let mut obj = serde_json::json!({
|
||||||
|
"username": e.username,
|
||||||
|
"score": e.score,
|
||||||
|
"review_mr_count": e.review_mr_count,
|
||||||
|
"review_note_count": e.review_note_count,
|
||||||
|
"author_mr_count": e.author_mr_count,
|
||||||
|
"last_seen_at": ms_to_iso(e.last_seen_ms),
|
||||||
|
"mr_refs": e.mr_refs,
|
||||||
|
"mr_refs_total": e.mr_refs_total,
|
||||||
|
"mr_refs_truncated": e.mr_refs_truncated,
|
||||||
|
});
|
||||||
|
if let Some(raw) = e.score_raw {
|
||||||
|
obj["score_raw"] = serde_json::json!(raw);
|
||||||
|
}
|
||||||
|
if let Some(comp) = &e.components {
|
||||||
|
obj["components"] = serde_json::json!({
|
||||||
|
"author": comp.author,
|
||||||
|
"reviewer_participated": comp.reviewer_participated,
|
||||||
|
"reviewer_assigned": comp.reviewer_assigned,
|
||||||
|
"notes": comp.notes,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if let Some(details) = &e.details {
|
||||||
|
obj["details"] = serde_json::json!(details.iter().map(|d| serde_json::json!({
|
||||||
|
"mr_ref": d.mr_ref,
|
||||||
|
"title": d.title,
|
||||||
|
"role": d.role,
|
||||||
|
"note_count": d.note_count,
|
||||||
|
"last_activity_at": ms_to_iso(d.last_activity_ms),
|
||||||
|
})).collect::<Vec<_>>());
|
||||||
|
}
|
||||||
|
obj
|
||||||
|
}).collect::<Vec<_>>(),
|
||||||
|
})
|
||||||
|
}
|
||||||
428
src/cli/commands/who/mod.rs
Normal file
428
src/cli/commands/who/mod.rs
Normal file
@@ -0,0 +1,428 @@
|
|||||||
|
mod active;
|
||||||
|
mod expert;
|
||||||
|
mod overlap;
|
||||||
|
mod reviews;
|
||||||
|
pub mod types;
|
||||||
|
mod workload;
|
||||||
|
|
||||||
|
pub use types::*;
|
||||||
|
|
||||||
|
// Re-export submodule functions for tests (tests use `use super::*`).
|
||||||
|
#[cfg(test)]
|
||||||
|
use active::query_active;
|
||||||
|
#[cfg(test)]
|
||||||
|
use expert::{build_expert_sql_v2, half_life_decay, query_expert};
|
||||||
|
#[cfg(test)]
|
||||||
|
use overlap::{format_overlap_role, query_overlap};
|
||||||
|
#[cfg(test)]
|
||||||
|
use reviews::{normalize_review_prefix, query_reviews};
|
||||||
|
#[cfg(test)]
|
||||||
|
use workload::query_workload;
|
||||||
|
|
||||||
|
use rusqlite::Connection;
|
||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
|
use crate::Config;
|
||||||
|
use crate::cli::WhoArgs;
|
||||||
|
use crate::cli::render::Theme;
|
||||||
|
use crate::cli::robot::RobotMeta;
|
||||||
|
use crate::core::db::create_connection;
|
||||||
|
use crate::core::error::{LoreError, Result};
|
||||||
|
use crate::core::path_resolver::normalize_repo_path;
|
||||||
|
use crate::core::paths::get_db_path;
|
||||||
|
use crate::core::project::resolve_project;
|
||||||
|
use crate::core::time::{ms_to_iso, now_ms, parse_since, parse_since_from};
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
use crate::core::config::ScoringConfig;
|
||||||
|
#[cfg(test)]
|
||||||
|
use crate::core::path_resolver::{SuffixResult, build_path_query, escape_like, suffix_probe};
|
||||||
|
|
||||||
|
// ─── Mode Discrimination ────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Determines which query mode to run based on args.
|
||||||
|
/// Path variants own their strings because path normalization produces new `String`s.
|
||||||
|
/// Username variants borrow from args since no normalization is needed.
|
||||||
|
enum WhoMode<'a> {
|
||||||
|
/// lore who <file-path> OR lore who --path <path>
|
||||||
|
Expert { path: String },
|
||||||
|
/// lore who <username>
|
||||||
|
Workload { username: &'a str },
|
||||||
|
/// lore who <username> --reviews
|
||||||
|
Reviews { username: &'a str },
|
||||||
|
/// lore who --active
|
||||||
|
Active,
|
||||||
|
/// lore who --overlap <path>
|
||||||
|
Overlap { path: String },
|
||||||
|
}
|
||||||
|
|
||||||
|
fn resolve_mode<'a>(args: &'a WhoArgs) -> Result<WhoMode<'a>> {
|
||||||
|
// Explicit --path flag always wins (handles root files like README.md,
|
||||||
|
// LICENSE, Makefile -- anything without a / that can't be auto-detected)
|
||||||
|
if let Some(p) = &args.path {
|
||||||
|
return Ok(WhoMode::Expert {
|
||||||
|
path: normalize_repo_path(p),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if args.active {
|
||||||
|
return Ok(WhoMode::Active);
|
||||||
|
}
|
||||||
|
if let Some(path) = &args.overlap {
|
||||||
|
return Ok(WhoMode::Overlap {
|
||||||
|
path: normalize_repo_path(path),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if let Some(target) = &args.target {
|
||||||
|
let clean = target.strip_prefix('@').unwrap_or(target);
|
||||||
|
if args.reviews {
|
||||||
|
return Ok(WhoMode::Reviews { username: clean });
|
||||||
|
}
|
||||||
|
// Disambiguation: if target contains '/', it's a file path.
|
||||||
|
// GitLab usernames never contain '/'.
|
||||||
|
// Root files (no '/') require --path.
|
||||||
|
if clean.contains('/') {
|
||||||
|
return Ok(WhoMode::Expert {
|
||||||
|
path: normalize_repo_path(clean),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return Ok(WhoMode::Workload { username: clean });
|
||||||
|
}
|
||||||
|
Err(LoreError::Other(
|
||||||
|
"Provide a username, file path, --active, or --overlap <path>.\n\n\
|
||||||
|
Examples:\n \
|
||||||
|
lore who src/features/auth/\n \
|
||||||
|
lore who @username\n \
|
||||||
|
lore who --active\n \
|
||||||
|
lore who --overlap src/features/\n \
|
||||||
|
lore who --path README.md\n \
|
||||||
|
lore who --path Makefile"
|
||||||
|
.to_string(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn validate_mode_flags(mode: &WhoMode<'_>, args: &WhoArgs) -> Result<()> {
|
||||||
|
if args.detail && !matches!(mode, WhoMode::Expert { .. }) {
|
||||||
|
return Err(LoreError::Other(
|
||||||
|
"--detail is only supported in expert mode (`lore who --path <path>` or `lore who <path/with/slash>`).".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Entry Point ─────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Main entry point. Resolves mode + resolved inputs once, then dispatches.
|
||||||
|
pub fn run_who(config: &Config, args: &WhoArgs) -> Result<WhoRun> {
|
||||||
|
let db_path = get_db_path(config.storage.db_path.as_deref());
|
||||||
|
let conn = create_connection(&db_path)?;
|
||||||
|
|
||||||
|
let project_id = args
|
||||||
|
.project
|
||||||
|
.as_deref()
|
||||||
|
.map(|p| resolve_project(&conn, p))
|
||||||
|
.transpose()?;
|
||||||
|
|
||||||
|
let project_path = project_id
|
||||||
|
.map(|id| lookup_project_path(&conn, id))
|
||||||
|
.transpose()?;
|
||||||
|
|
||||||
|
let mode = resolve_mode(args)?;
|
||||||
|
validate_mode_flags(&mode, args)?;
|
||||||
|
|
||||||
|
// since_mode semantics:
|
||||||
|
// - expert/reviews/active/overlap: default window applies if args.since is None -> "default"
|
||||||
|
// - workload: no default window; args.since None => "none"
|
||||||
|
let since_mode_for_defaulted = if args.since.is_some() {
|
||||||
|
"explicit"
|
||||||
|
} else {
|
||||||
|
"default"
|
||||||
|
};
|
||||||
|
let since_mode_for_workload = if args.since.is_some() {
|
||||||
|
"explicit"
|
||||||
|
} else {
|
||||||
|
"none"
|
||||||
|
};
|
||||||
|
|
||||||
|
match mode {
|
||||||
|
WhoMode::Expert { path } => {
|
||||||
|
// Compute as_of first so --since durations are relative to it.
|
||||||
|
let as_of_ms = match &args.as_of {
|
||||||
|
Some(v) => parse_since(v).ok_or_else(|| {
|
||||||
|
LoreError::Other(format!(
|
||||||
|
"Invalid --as-of value: '{v}'. Use a duration (30d, 6m) or date (2024-01-15)"
|
||||||
|
))
|
||||||
|
})?,
|
||||||
|
None => now_ms(),
|
||||||
|
};
|
||||||
|
let since_ms = if args.all_history {
|
||||||
|
0
|
||||||
|
} else {
|
||||||
|
resolve_since_from(args.since.as_deref(), "24m", as_of_ms)?
|
||||||
|
};
|
||||||
|
let limit = usize::from(args.limit);
|
||||||
|
let result = expert::query_expert(
|
||||||
|
&conn,
|
||||||
|
&path,
|
||||||
|
project_id,
|
||||||
|
since_ms,
|
||||||
|
as_of_ms,
|
||||||
|
limit,
|
||||||
|
&config.scoring,
|
||||||
|
args.detail,
|
||||||
|
args.explain_score,
|
||||||
|
args.include_bots,
|
||||||
|
)?;
|
||||||
|
Ok(WhoRun {
|
||||||
|
resolved_input: WhoResolvedInput {
|
||||||
|
mode: "expert".to_string(),
|
||||||
|
project_id,
|
||||||
|
project_path,
|
||||||
|
since_ms: Some(since_ms),
|
||||||
|
since_iso: Some(ms_to_iso(since_ms)),
|
||||||
|
since_mode: since_mode_for_defaulted.to_string(),
|
||||||
|
limit: args.limit,
|
||||||
|
},
|
||||||
|
result: WhoResult::Expert(result),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
WhoMode::Workload { username } => {
|
||||||
|
let since_ms = args
|
||||||
|
.since
|
||||||
|
.as_deref()
|
||||||
|
.map(resolve_since_required)
|
||||||
|
.transpose()?;
|
||||||
|
let limit = usize::from(args.limit);
|
||||||
|
let result = workload::query_workload(
|
||||||
|
&conn,
|
||||||
|
username,
|
||||||
|
project_id,
|
||||||
|
since_ms,
|
||||||
|
limit,
|
||||||
|
args.include_closed,
|
||||||
|
)?;
|
||||||
|
Ok(WhoRun {
|
||||||
|
resolved_input: WhoResolvedInput {
|
||||||
|
mode: "workload".to_string(),
|
||||||
|
project_id,
|
||||||
|
project_path,
|
||||||
|
since_ms,
|
||||||
|
since_iso: since_ms.map(ms_to_iso),
|
||||||
|
since_mode: since_mode_for_workload.to_string(),
|
||||||
|
limit: args.limit,
|
||||||
|
},
|
||||||
|
result: WhoResult::Workload(result),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
WhoMode::Reviews { username } => {
|
||||||
|
let since_ms = resolve_since(args.since.as_deref(), "6m")?;
|
||||||
|
let result = reviews::query_reviews(&conn, username, project_id, since_ms)?;
|
||||||
|
Ok(WhoRun {
|
||||||
|
resolved_input: WhoResolvedInput {
|
||||||
|
mode: "reviews".to_string(),
|
||||||
|
project_id,
|
||||||
|
project_path,
|
||||||
|
since_ms: Some(since_ms),
|
||||||
|
since_iso: Some(ms_to_iso(since_ms)),
|
||||||
|
since_mode: since_mode_for_defaulted.to_string(),
|
||||||
|
limit: args.limit,
|
||||||
|
},
|
||||||
|
result: WhoResult::Reviews(result),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
WhoMode::Active => {
|
||||||
|
let since_ms = resolve_since(args.since.as_deref(), "7d")?;
|
||||||
|
let limit = usize::from(args.limit);
|
||||||
|
let result =
|
||||||
|
active::query_active(&conn, project_id, since_ms, limit, args.include_closed)?;
|
||||||
|
Ok(WhoRun {
|
||||||
|
resolved_input: WhoResolvedInput {
|
||||||
|
mode: "active".to_string(),
|
||||||
|
project_id,
|
||||||
|
project_path,
|
||||||
|
since_ms: Some(since_ms),
|
||||||
|
since_iso: Some(ms_to_iso(since_ms)),
|
||||||
|
since_mode: since_mode_for_defaulted.to_string(),
|
||||||
|
limit: args.limit,
|
||||||
|
},
|
||||||
|
result: WhoResult::Active(result),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
WhoMode::Overlap { path } => {
|
||||||
|
let since_ms = resolve_since(args.since.as_deref(), "30d")?;
|
||||||
|
let limit = usize::from(args.limit);
|
||||||
|
let result = overlap::query_overlap(&conn, &path, project_id, since_ms, limit)?;
|
||||||
|
Ok(WhoRun {
|
||||||
|
resolved_input: WhoResolvedInput {
|
||||||
|
mode: "overlap".to_string(),
|
||||||
|
project_id,
|
||||||
|
project_path,
|
||||||
|
since_ms: Some(since_ms),
|
||||||
|
since_iso: Some(ms_to_iso(since_ms)),
|
||||||
|
since_mode: since_mode_for_defaulted.to_string(),
|
||||||
|
limit: args.limit,
|
||||||
|
},
|
||||||
|
result: WhoResult::Overlap(result),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Helpers ─────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Look up the project path for a resolved project ID.
|
||||||
|
fn lookup_project_path(conn: &Connection, project_id: i64) -> Result<String> {
|
||||||
|
conn.query_row(
|
||||||
|
"SELECT path_with_namespace FROM projects WHERE id = ?1",
|
||||||
|
rusqlite::params![project_id],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.map_err(|e| LoreError::Other(format!("Failed to look up project path: {e}")))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse --since with a default fallback.
|
||||||
|
fn resolve_since(input: Option<&str>, default: &str) -> Result<i64> {
|
||||||
|
let s = input.unwrap_or(default);
|
||||||
|
parse_since(s).ok_or_else(|| {
|
||||||
|
LoreError::Other(format!(
|
||||||
|
"Invalid --since value: '{s}'. Use a duration (7d, 2w, 6m) or date (2024-01-15)"
|
||||||
|
))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse --since with a default fallback, relative to a reference timestamp.
|
||||||
|
/// Durations (7d, 2w, 6m) are computed from `reference_ms` instead of now.
|
||||||
|
fn resolve_since_from(input: Option<&str>, default: &str, reference_ms: i64) -> Result<i64> {
|
||||||
|
let s = input.unwrap_or(default);
|
||||||
|
parse_since_from(s, reference_ms).ok_or_else(|| {
|
||||||
|
LoreError::Other(format!(
|
||||||
|
"Invalid --since value: '{s}'. Use a duration (7d, 2w, 6m) or date (2024-01-15)"
|
||||||
|
))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse --since without a default (returns error if invalid).
|
||||||
|
fn resolve_since_required(input: &str) -> Result<i64> {
|
||||||
|
parse_since(input).ok_or_else(|| {
|
||||||
|
LoreError::Other(format!(
|
||||||
|
"Invalid --since value: '{input}'. Use a duration (7d, 2w, 6m) or date (2024-01-15)"
|
||||||
|
))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Human Output ────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
pub fn print_who_human(result: &WhoResult, project_path: Option<&str>) {
|
||||||
|
match result {
|
||||||
|
WhoResult::Expert(r) => expert::print_expert_human(r, project_path),
|
||||||
|
WhoResult::Workload(r) => workload::print_workload_human(r),
|
||||||
|
WhoResult::Reviews(r) => reviews::print_reviews_human(r),
|
||||||
|
WhoResult::Active(r) => active::print_active_human(r, project_path),
|
||||||
|
WhoResult::Overlap(r) => overlap::print_overlap_human(r, project_path),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Print a dim hint when results aggregate across all projects.
|
||||||
|
pub(super) fn print_scope_hint(project_path: Option<&str>) {
|
||||||
|
if project_path.is_none() {
|
||||||
|
println!(
|
||||||
|
" {}",
|
||||||
|
Theme::dim().render("(aggregated across all projects; use -p to scope)")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Robot JSON Output ───────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
pub fn print_who_json(run: &WhoRun, args: &WhoArgs, elapsed_ms: u64) {
|
||||||
|
let (mode, data) = match &run.result {
|
||||||
|
WhoResult::Expert(r) => ("expert", expert::expert_to_json(r)),
|
||||||
|
WhoResult::Workload(r) => ("workload", workload::workload_to_json(r)),
|
||||||
|
WhoResult::Reviews(r) => ("reviews", reviews::reviews_to_json(r)),
|
||||||
|
WhoResult::Active(r) => ("active", active::active_to_json(r)),
|
||||||
|
WhoResult::Overlap(r) => ("overlap", overlap::overlap_to_json(r)),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Raw CLI args -- what the user typed
|
||||||
|
let input = serde_json::json!({
|
||||||
|
"target": args.target,
|
||||||
|
"path": args.path,
|
||||||
|
"project": args.project,
|
||||||
|
"since": args.since,
|
||||||
|
"limit": args.limit,
|
||||||
|
"detail": args.detail,
|
||||||
|
"as_of": args.as_of,
|
||||||
|
"explain_score": args.explain_score,
|
||||||
|
"include_bots": args.include_bots,
|
||||||
|
"all_history": args.all_history,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Resolved/computed values -- what actually ran
|
||||||
|
let resolved_input = serde_json::json!({
|
||||||
|
"mode": run.resolved_input.mode,
|
||||||
|
"project_id": run.resolved_input.project_id,
|
||||||
|
"project_path": run.resolved_input.project_path,
|
||||||
|
"since_ms": run.resolved_input.since_ms,
|
||||||
|
"since_iso": run.resolved_input.since_iso,
|
||||||
|
"since_mode": run.resolved_input.since_mode,
|
||||||
|
"limit": run.resolved_input.limit,
|
||||||
|
});
|
||||||
|
|
||||||
|
let output = WhoJsonEnvelope {
|
||||||
|
ok: true,
|
||||||
|
data: WhoJsonData {
|
||||||
|
mode: mode.to_string(),
|
||||||
|
input,
|
||||||
|
resolved_input,
|
||||||
|
result: data,
|
||||||
|
},
|
||||||
|
meta: RobotMeta { elapsed_ms },
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut value = serde_json::to_value(&output).unwrap_or_else(|e| {
|
||||||
|
serde_json::json!({"ok":false,"error":{"code":"INTERNAL_ERROR","message":format!("JSON serialization failed: {e}")}})
|
||||||
|
});
|
||||||
|
|
||||||
|
if let Some(f) = &args.fields {
|
||||||
|
let preset_key = format!("who_{mode}");
|
||||||
|
let expanded = crate::cli::robot::expand_fields_preset(f, &preset_key);
|
||||||
|
// Each who mode uses a different array key; try all possible keys
|
||||||
|
for key in &[
|
||||||
|
"experts",
|
||||||
|
"assigned_issues",
|
||||||
|
"authored_mrs",
|
||||||
|
"review_mrs",
|
||||||
|
"categories",
|
||||||
|
"discussions",
|
||||||
|
"users",
|
||||||
|
] {
|
||||||
|
crate::cli::robot::filter_fields(&mut value, key, &expanded);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
match serde_json::to_string(&value) {
|
||||||
|
Ok(json) => println!("{json}"),
|
||||||
|
Err(e) => eprintln!("Error serializing to JSON: {e}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct WhoJsonEnvelope {
|
||||||
|
ok: bool,
|
||||||
|
data: WhoJsonData,
|
||||||
|
meta: RobotMeta,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct WhoJsonData {
|
||||||
|
mode: String,
|
||||||
|
input: serde_json::Value,
|
||||||
|
resolved_input: serde_json::Value,
|
||||||
|
#[serde(flatten)]
|
||||||
|
result: serde_json::Value,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Tests ───────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
#[path = "../who_tests.rs"]
|
||||||
|
mod tests;
|
||||||
323
src/cli/commands/who/overlap.rs
Normal file
323
src/cli/commands/who/overlap.rs
Normal file
@@ -0,0 +1,323 @@
|
|||||||
|
use std::collections::{HashMap, HashSet};
|
||||||
|
|
||||||
|
use rusqlite::Connection;
|
||||||
|
|
||||||
|
use crate::cli::render::{self, Icons, Theme};
|
||||||
|
use crate::core::error::Result;
|
||||||
|
use crate::core::path_resolver::build_path_query;
|
||||||
|
use crate::core::time::ms_to_iso;
|
||||||
|
|
||||||
|
use super::types::*;
|
||||||
|
|
||||||
|
pub(super) fn query_overlap(
|
||||||
|
conn: &Connection,
|
||||||
|
path: &str,
|
||||||
|
project_id: Option<i64>,
|
||||||
|
since_ms: i64,
|
||||||
|
limit: usize,
|
||||||
|
) -> Result<OverlapResult> {
|
||||||
|
let pq = build_path_query(conn, path, project_id)?;
|
||||||
|
|
||||||
|
// Build SQL with 4 signal sources, matching the expert query expansion.
|
||||||
|
// Each row produces (username, role, mr_id, mr_ref, seen_at) for Rust-side accumulation.
|
||||||
|
let path_op = if pq.is_prefix {
|
||||||
|
"LIKE ?1 ESCAPE '\\'"
|
||||||
|
} else {
|
||||||
|
"= ?1"
|
||||||
|
};
|
||||||
|
// Match both new_path and old_path to capture activity on renamed files.
|
||||||
|
// INDEXED BY removed to allow OR across path columns; overlap runs once
|
||||||
|
// per command so the minor plan difference is acceptable.
|
||||||
|
let sql = format!(
|
||||||
|
"SELECT username, role, touch_count, last_seen_at, mr_refs FROM (
|
||||||
|
-- 1. DiffNote reviewer (matches both new_path and old_path)
|
||||||
|
SELECT
|
||||||
|
n.author_username AS username,
|
||||||
|
'reviewer' AS role,
|
||||||
|
COUNT(DISTINCT m.id) AS touch_count,
|
||||||
|
MAX(n.created_at) AS last_seen_at,
|
||||||
|
GROUP_CONCAT(DISTINCT (p.path_with_namespace || '!' || m.iid)) AS mr_refs
|
||||||
|
FROM notes n
|
||||||
|
JOIN discussions d ON n.discussion_id = d.id
|
||||||
|
JOIN merge_requests m ON d.merge_request_id = m.id
|
||||||
|
JOIN projects p ON m.project_id = p.id
|
||||||
|
WHERE n.note_type = 'DiffNote'
|
||||||
|
AND (n.position_new_path {path_op}
|
||||||
|
OR (n.position_old_path IS NOT NULL AND n.position_old_path {path_op}))
|
||||||
|
AND n.is_system = 0
|
||||||
|
AND n.author_username IS NOT NULL
|
||||||
|
AND (m.author_username IS NULL OR n.author_username != m.author_username)
|
||||||
|
AND m.state IN ('opened','merged','closed')
|
||||||
|
AND n.created_at >= ?2
|
||||||
|
AND (?3 IS NULL OR n.project_id = ?3)
|
||||||
|
GROUP BY n.author_username
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
-- 2. DiffNote MR author (matches both new_path and old_path)
|
||||||
|
SELECT
|
||||||
|
m.author_username AS username,
|
||||||
|
'author' AS role,
|
||||||
|
COUNT(DISTINCT m.id) AS touch_count,
|
||||||
|
MAX(n.created_at) AS last_seen_at,
|
||||||
|
GROUP_CONCAT(DISTINCT (p.path_with_namespace || '!' || m.iid)) AS mr_refs
|
||||||
|
FROM notes n
|
||||||
|
JOIN discussions d ON n.discussion_id = d.id
|
||||||
|
JOIN merge_requests m ON d.merge_request_id = m.id
|
||||||
|
JOIN projects p ON m.project_id = p.id
|
||||||
|
WHERE n.note_type = 'DiffNote'
|
||||||
|
AND (n.position_new_path {path_op}
|
||||||
|
OR (n.position_old_path IS NOT NULL AND n.position_old_path {path_op}))
|
||||||
|
AND n.is_system = 0
|
||||||
|
AND m.state IN ('opened','merged','closed')
|
||||||
|
AND m.author_username IS NOT NULL
|
||||||
|
AND n.created_at >= ?2
|
||||||
|
AND (?3 IS NULL OR n.project_id = ?3)
|
||||||
|
GROUP BY m.author_username
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
-- 3. MR author via file changes (matches both new_path and old_path)
|
||||||
|
SELECT
|
||||||
|
m.author_username AS username,
|
||||||
|
'author' AS role,
|
||||||
|
COUNT(DISTINCT m.id) AS touch_count,
|
||||||
|
MAX(m.updated_at) AS last_seen_at,
|
||||||
|
GROUP_CONCAT(DISTINCT (p.path_with_namespace || '!' || m.iid)) AS mr_refs
|
||||||
|
FROM mr_file_changes fc
|
||||||
|
JOIN merge_requests m ON fc.merge_request_id = m.id
|
||||||
|
JOIN projects p ON m.project_id = p.id
|
||||||
|
WHERE m.author_username IS NOT NULL
|
||||||
|
AND m.state IN ('opened','merged','closed')
|
||||||
|
AND (fc.new_path {path_op}
|
||||||
|
OR (fc.old_path IS NOT NULL AND fc.old_path {path_op}))
|
||||||
|
AND m.updated_at >= ?2
|
||||||
|
AND (?3 IS NULL OR fc.project_id = ?3)
|
||||||
|
GROUP BY m.author_username
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
-- 4. MR reviewer via file changes + mr_reviewers (matches both new_path and old_path)
|
||||||
|
SELECT
|
||||||
|
r.username AS username,
|
||||||
|
'reviewer' AS role,
|
||||||
|
COUNT(DISTINCT m.id) AS touch_count,
|
||||||
|
MAX(m.updated_at) AS last_seen_at,
|
||||||
|
GROUP_CONCAT(DISTINCT (p.path_with_namespace || '!' || m.iid)) AS mr_refs
|
||||||
|
FROM mr_file_changes fc
|
||||||
|
JOIN merge_requests m ON fc.merge_request_id = m.id
|
||||||
|
JOIN projects p ON m.project_id = p.id
|
||||||
|
JOIN mr_reviewers r ON r.merge_request_id = m.id
|
||||||
|
WHERE r.username IS NOT NULL
|
||||||
|
AND (m.author_username IS NULL OR r.username != m.author_username)
|
||||||
|
AND m.state IN ('opened','merged','closed')
|
||||||
|
AND (fc.new_path {path_op}
|
||||||
|
OR (fc.old_path IS NOT NULL AND fc.old_path {path_op}))
|
||||||
|
AND m.updated_at >= ?2
|
||||||
|
AND (?3 IS NULL OR fc.project_id = ?3)
|
||||||
|
GROUP BY r.username
|
||||||
|
)"
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut stmt = conn.prepare_cached(&sql)?;
|
||||||
|
let rows: Vec<(String, String, u32, i64, Option<String>)> = stmt
|
||||||
|
.query_map(rusqlite::params![pq.value, since_ms, project_id], |row| {
|
||||||
|
Ok((
|
||||||
|
row.get(0)?,
|
||||||
|
row.get(1)?,
|
||||||
|
row.get(2)?,
|
||||||
|
row.get(3)?,
|
||||||
|
row.get(4)?,
|
||||||
|
))
|
||||||
|
})?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
// Internal accumulator uses HashSet for MR refs from the start
|
||||||
|
struct OverlapAcc {
|
||||||
|
username: String,
|
||||||
|
author_touch_count: u32,
|
||||||
|
review_touch_count: u32,
|
||||||
|
touch_count: u32,
|
||||||
|
last_seen_at: i64,
|
||||||
|
mr_refs: HashSet<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut user_map: HashMap<String, OverlapAcc> = HashMap::new();
|
||||||
|
for (username, role, count, last_seen, mr_refs_csv) in &rows {
|
||||||
|
let mr_refs: Vec<String> = mr_refs_csv
|
||||||
|
.as_deref()
|
||||||
|
.map(|csv| csv.split(',').map(|s| s.trim().to_string()).collect())
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
let entry = user_map
|
||||||
|
.entry(username.clone())
|
||||||
|
.or_insert_with(|| OverlapAcc {
|
||||||
|
username: username.clone(),
|
||||||
|
author_touch_count: 0,
|
||||||
|
review_touch_count: 0,
|
||||||
|
touch_count: 0,
|
||||||
|
last_seen_at: 0,
|
||||||
|
mr_refs: HashSet::new(),
|
||||||
|
});
|
||||||
|
entry.touch_count += count;
|
||||||
|
if role == "author" {
|
||||||
|
entry.author_touch_count += count;
|
||||||
|
} else {
|
||||||
|
entry.review_touch_count += count;
|
||||||
|
}
|
||||||
|
if *last_seen > entry.last_seen_at {
|
||||||
|
entry.last_seen_at = *last_seen;
|
||||||
|
}
|
||||||
|
for r in mr_refs {
|
||||||
|
entry.mr_refs.insert(r);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert accumulators to output structs
|
||||||
|
let mut users: Vec<OverlapUser> = user_map
|
||||||
|
.into_values()
|
||||||
|
.map(|a| {
|
||||||
|
let mut mr_refs: Vec<String> = a.mr_refs.into_iter().collect();
|
||||||
|
mr_refs.sort();
|
||||||
|
let mr_refs_total = mr_refs.len() as u32;
|
||||||
|
let mr_refs_truncated = mr_refs.len() > MAX_MR_REFS_PER_USER;
|
||||||
|
if mr_refs_truncated {
|
||||||
|
mr_refs.truncate(MAX_MR_REFS_PER_USER);
|
||||||
|
}
|
||||||
|
OverlapUser {
|
||||||
|
username: a.username,
|
||||||
|
author_touch_count: a.author_touch_count,
|
||||||
|
review_touch_count: a.review_touch_count,
|
||||||
|
touch_count: a.touch_count,
|
||||||
|
last_seen_at: a.last_seen_at,
|
||||||
|
mr_refs,
|
||||||
|
mr_refs_total,
|
||||||
|
mr_refs_truncated,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Stable sort with full tie-breakers for deterministic output
|
||||||
|
users.sort_by(|a, b| {
|
||||||
|
b.touch_count
|
||||||
|
.cmp(&a.touch_count)
|
||||||
|
.then_with(|| b.last_seen_at.cmp(&a.last_seen_at))
|
||||||
|
.then_with(|| a.username.cmp(&b.username))
|
||||||
|
});
|
||||||
|
|
||||||
|
let truncated = users.len() > limit;
|
||||||
|
users.truncate(limit);
|
||||||
|
|
||||||
|
Ok(OverlapResult {
|
||||||
|
path_query: if pq.is_prefix {
|
||||||
|
path.trim_end_matches('/').to_string()
|
||||||
|
} else {
|
||||||
|
pq.value.clone()
|
||||||
|
},
|
||||||
|
path_match: if pq.is_prefix { "prefix" } else { "exact" }.to_string(),
|
||||||
|
users,
|
||||||
|
truncated,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Format overlap role for display: "A", "R", or "A+R".
|
||||||
|
pub(super) fn format_overlap_role(user: &OverlapUser) -> &'static str {
|
||||||
|
match (user.author_touch_count > 0, user.review_touch_count > 0) {
|
||||||
|
(true, true) => "A+R",
|
||||||
|
(true, false) => "A",
|
||||||
|
(false, true) => "R",
|
||||||
|
(false, false) => "-",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn print_overlap_human(r: &OverlapResult, project_path: Option<&str>) {
|
||||||
|
println!();
|
||||||
|
println!(
|
||||||
|
"{}",
|
||||||
|
Theme::bold().render(&format!("Overlap for {}", r.path_query))
|
||||||
|
);
|
||||||
|
println!("{}", "\u{2500}".repeat(60));
|
||||||
|
println!(
|
||||||
|
" {}",
|
||||||
|
Theme::dim().render(&format!(
|
||||||
|
"(matching {} {})",
|
||||||
|
r.path_match,
|
||||||
|
if r.path_match == "exact" {
|
||||||
|
"file"
|
||||||
|
} else {
|
||||||
|
"directory prefix"
|
||||||
|
}
|
||||||
|
))
|
||||||
|
);
|
||||||
|
super::print_scope_hint(project_path);
|
||||||
|
println!();
|
||||||
|
|
||||||
|
if r.users.is_empty() {
|
||||||
|
println!(
|
||||||
|
" {}",
|
||||||
|
Theme::dim().render("No overlapping users found for this path.")
|
||||||
|
);
|
||||||
|
println!();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
println!(
|
||||||
|
" {:<16} {:<6} {:>7} {:<12} {}",
|
||||||
|
Theme::bold().render("Username"),
|
||||||
|
Theme::bold().render("Role"),
|
||||||
|
Theme::bold().render("MRs"),
|
||||||
|
Theme::bold().render("Last Seen"),
|
||||||
|
Theme::bold().render("MR Refs"),
|
||||||
|
);
|
||||||
|
|
||||||
|
for user in &r.users {
|
||||||
|
let mr_str = user
|
||||||
|
.mr_refs
|
||||||
|
.iter()
|
||||||
|
.take(5)
|
||||||
|
.cloned()
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(", ");
|
||||||
|
let overflow = if user.mr_refs.len() > 5 {
|
||||||
|
format!(" +{}", user.mr_refs.len() - 5)
|
||||||
|
} else {
|
||||||
|
String::new()
|
||||||
|
};
|
||||||
|
|
||||||
|
println!(
|
||||||
|
" {:<16} {:<6} {:>7} {:<12} {}{}",
|
||||||
|
Theme::info().render(&format!("{} {}", Icons::user(), user.username)),
|
||||||
|
format_overlap_role(user),
|
||||||
|
user.touch_count,
|
||||||
|
render::format_relative_time(user.last_seen_at),
|
||||||
|
mr_str,
|
||||||
|
overflow,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if r.truncated {
|
||||||
|
println!(
|
||||||
|
" {}",
|
||||||
|
Theme::dim().render("(showing first -n; rerun with a higher --limit)")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
println!();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn overlap_to_json(r: &OverlapResult) -> serde_json::Value {
|
||||||
|
serde_json::json!({
|
||||||
|
"path_query": r.path_query,
|
||||||
|
"path_match": r.path_match,
|
||||||
|
"truncated": r.truncated,
|
||||||
|
"users": r.users.iter().map(|u| serde_json::json!({
|
||||||
|
"username": u.username,
|
||||||
|
"role": format_overlap_role(u),
|
||||||
|
"author_touch_count": u.author_touch_count,
|
||||||
|
"review_touch_count": u.review_touch_count,
|
||||||
|
"touch_count": u.touch_count,
|
||||||
|
"last_seen_at": ms_to_iso(u.last_seen_at),
|
||||||
|
"mr_refs": u.mr_refs,
|
||||||
|
"mr_refs_total": u.mr_refs_total,
|
||||||
|
"mr_refs_truncated": u.mr_refs_truncated,
|
||||||
|
})).collect::<Vec<_>>(),
|
||||||
|
})
|
||||||
|
}
|
||||||
214
src/cli/commands/who/reviews.rs
Normal file
214
src/cli/commands/who/reviews.rs
Normal file
@@ -0,0 +1,214 @@
|
|||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use rusqlite::Connection;
|
||||||
|
|
||||||
|
use crate::cli::render::{Icons, Theme};
|
||||||
|
use crate::core::error::Result;
|
||||||
|
|
||||||
|
use super::types::*;
|
||||||
|
|
||||||
|
// ─── Query: Reviews Mode ────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
pub(super) fn query_reviews(
|
||||||
|
conn: &Connection,
|
||||||
|
username: &str,
|
||||||
|
project_id: Option<i64>,
|
||||||
|
since_ms: i64,
|
||||||
|
) -> Result<ReviewsResult> {
|
||||||
|
// Force the partial index on DiffNote queries (same rationale as expert mode).
|
||||||
|
// COUNT + COUNT(DISTINCT) + category extraction all benefit from 26K DiffNote
|
||||||
|
// scan vs 282K notes full scan: measured 25x speedup.
|
||||||
|
let total_sql = "SELECT COUNT(*) FROM notes n
|
||||||
|
INDEXED BY idx_notes_diffnote_path_created
|
||||||
|
JOIN discussions d ON n.discussion_id = d.id
|
||||||
|
JOIN merge_requests m ON d.merge_request_id = m.id
|
||||||
|
WHERE n.author_username = ?1
|
||||||
|
AND n.note_type = 'DiffNote'
|
||||||
|
AND n.is_system = 0
|
||||||
|
AND (m.author_username IS NULL OR m.author_username != ?1)
|
||||||
|
AND n.created_at >= ?2
|
||||||
|
AND (?3 IS NULL OR n.project_id = ?3)";
|
||||||
|
|
||||||
|
let total_diffnotes: u32 = conn.query_row(
|
||||||
|
total_sql,
|
||||||
|
rusqlite::params![username, since_ms, project_id],
|
||||||
|
|row| row.get(0),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
// Count distinct MRs reviewed
|
||||||
|
let mrs_sql = "SELECT COUNT(DISTINCT m.id) FROM notes n
|
||||||
|
INDEXED BY idx_notes_diffnote_path_created
|
||||||
|
JOIN discussions d ON n.discussion_id = d.id
|
||||||
|
JOIN merge_requests m ON d.merge_request_id = m.id
|
||||||
|
WHERE n.author_username = ?1
|
||||||
|
AND n.note_type = 'DiffNote'
|
||||||
|
AND n.is_system = 0
|
||||||
|
AND (m.author_username IS NULL OR m.author_username != ?1)
|
||||||
|
AND n.created_at >= ?2
|
||||||
|
AND (?3 IS NULL OR n.project_id = ?3)";
|
||||||
|
|
||||||
|
let mrs_reviewed: u32 = conn.query_row(
|
||||||
|
mrs_sql,
|
||||||
|
rusqlite::params![username, since_ms, project_id],
|
||||||
|
|row| row.get(0),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
// Extract prefixed categories: body starts with **prefix**
|
||||||
|
let cat_sql = "SELECT
|
||||||
|
SUBSTR(ltrim(n.body), 3, INSTR(SUBSTR(ltrim(n.body), 3), '**') - 1) AS raw_prefix,
|
||||||
|
COUNT(*) AS cnt
|
||||||
|
FROM notes n INDEXED BY idx_notes_diffnote_path_created
|
||||||
|
JOIN discussions d ON n.discussion_id = d.id
|
||||||
|
JOIN merge_requests m ON d.merge_request_id = m.id
|
||||||
|
WHERE n.author_username = ?1
|
||||||
|
AND n.note_type = 'DiffNote'
|
||||||
|
AND n.is_system = 0
|
||||||
|
AND (m.author_username IS NULL OR m.author_username != ?1)
|
||||||
|
AND ltrim(n.body) LIKE '**%**%'
|
||||||
|
AND n.created_at >= ?2
|
||||||
|
AND (?3 IS NULL OR n.project_id = ?3)
|
||||||
|
GROUP BY raw_prefix
|
||||||
|
ORDER BY cnt DESC";
|
||||||
|
|
||||||
|
let mut stmt = conn.prepare_cached(cat_sql)?;
|
||||||
|
let raw_categories: Vec<(String, u32)> = stmt
|
||||||
|
.query_map(rusqlite::params![username, since_ms, project_id], |row| {
|
||||||
|
Ok((row.get::<_, String>(0)?, row.get(1)?))
|
||||||
|
})?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
// Normalize categories: lowercase, strip trailing colon/space,
|
||||||
|
// merge nit/nitpick variants, merge (non-blocking) variants
|
||||||
|
let mut merged: HashMap<String, u32> = HashMap::new();
|
||||||
|
for (raw, count) in &raw_categories {
|
||||||
|
let normalized = normalize_review_prefix(raw);
|
||||||
|
if !normalized.is_empty() {
|
||||||
|
*merged.entry(normalized).or_insert(0) += count;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let categorized_count: u32 = merged.values().sum();
|
||||||
|
|
||||||
|
let mut categories: Vec<ReviewCategory> = merged
|
||||||
|
.into_iter()
|
||||||
|
.map(|(name, count)| {
|
||||||
|
let percentage = if categorized_count > 0 {
|
||||||
|
f64::from(count) / f64::from(categorized_count) * 100.0
|
||||||
|
} else {
|
||||||
|
0.0
|
||||||
|
};
|
||||||
|
ReviewCategory {
|
||||||
|
name,
|
||||||
|
count,
|
||||||
|
percentage,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
categories.sort_by(|a, b| b.count.cmp(&a.count));
|
||||||
|
|
||||||
|
Ok(ReviewsResult {
|
||||||
|
username: username.to_string(),
|
||||||
|
total_diffnotes,
|
||||||
|
categorized_count,
|
||||||
|
mrs_reviewed,
|
||||||
|
categories,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Normalize a raw review prefix like "Suggestion (non-blocking):" into "suggestion".
|
||||||
|
pub(super) fn normalize_review_prefix(raw: &str) -> String {
|
||||||
|
let s = raw.trim().trim_end_matches(':').trim().to_lowercase();
|
||||||
|
|
||||||
|
// Strip "(non-blocking)" and similar parentheticals
|
||||||
|
let s = if let Some(idx) = s.find('(') {
|
||||||
|
s[..idx].trim().to_string()
|
||||||
|
} else {
|
||||||
|
s
|
||||||
|
};
|
||||||
|
|
||||||
|
// Merge nit/nitpick variants
|
||||||
|
match s.as_str() {
|
||||||
|
"nitpick" | "nit" => "nit".to_string(),
|
||||||
|
other => other.to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Human Renderer ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
pub(super) fn print_reviews_human(r: &ReviewsResult) {
|
||||||
|
println!();
|
||||||
|
println!(
|
||||||
|
"{}",
|
||||||
|
Theme::bold().render(&format!(
|
||||||
|
"{} {} -- Review Patterns",
|
||||||
|
Icons::user(),
|
||||||
|
r.username
|
||||||
|
))
|
||||||
|
);
|
||||||
|
println!("{}", "\u{2500}".repeat(60));
|
||||||
|
println!();
|
||||||
|
|
||||||
|
if r.total_diffnotes == 0 {
|
||||||
|
println!(
|
||||||
|
" {}",
|
||||||
|
Theme::dim().render("No review comments found for this user.")
|
||||||
|
);
|
||||||
|
println!();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
println!(
|
||||||
|
" {} DiffNotes across {} MRs ({} categorized)",
|
||||||
|
Theme::bold().render(&r.total_diffnotes.to_string()),
|
||||||
|
Theme::bold().render(&r.mrs_reviewed.to_string()),
|
||||||
|
Theme::bold().render(&r.categorized_count.to_string()),
|
||||||
|
);
|
||||||
|
println!();
|
||||||
|
|
||||||
|
if !r.categories.is_empty() {
|
||||||
|
println!(
|
||||||
|
" {:<16} {:>6} {:>6}",
|
||||||
|
Theme::bold().render("Category"),
|
||||||
|
Theme::bold().render("Count"),
|
||||||
|
Theme::bold().render("%"),
|
||||||
|
);
|
||||||
|
|
||||||
|
for cat in &r.categories {
|
||||||
|
println!(
|
||||||
|
" {:<16} {:>6} {:>5.1}%",
|
||||||
|
Theme::info().render(&cat.name),
|
||||||
|
cat.count,
|
||||||
|
cat.percentage,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let uncategorized = r.total_diffnotes - r.categorized_count;
|
||||||
|
if uncategorized > 0 {
|
||||||
|
println!();
|
||||||
|
println!(
|
||||||
|
" {} {} uncategorized (no **prefix** convention)",
|
||||||
|
Theme::dim().render("Note:"),
|
||||||
|
uncategorized,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
println!();
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Robot Renderer ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
pub(super) fn reviews_to_json(r: &ReviewsResult) -> serde_json::Value {
|
||||||
|
serde_json::json!({
|
||||||
|
"username": r.username,
|
||||||
|
"total_diffnotes": r.total_diffnotes,
|
||||||
|
"categorized_count": r.categorized_count,
|
||||||
|
"mrs_reviewed": r.mrs_reviewed,
|
||||||
|
"categories": r.categories.iter().map(|c| serde_json::json!({
|
||||||
|
"name": c.name,
|
||||||
|
"count": c.count,
|
||||||
|
"percentage": (c.percentage * 10.0).round() / 10.0,
|
||||||
|
})).collect::<Vec<_>>(),
|
||||||
|
})
|
||||||
|
}
|
||||||
185
src/cli/commands/who/types.rs
Normal file
185
src/cli/commands/who/types.rs
Normal file
@@ -0,0 +1,185 @@
|
|||||||
|
// ─── Result Types ────────────────────────────────────────────────────────────
|
||||||
|
//
|
||||||
|
// All pub result structs and enums for the `who` command family.
|
||||||
|
// Zero logic — pure data definitions.
|
||||||
|
|
||||||
|
/// Top-level run result: carries resolved inputs + the mode-specific result.
|
||||||
|
pub struct WhoRun {
|
||||||
|
pub resolved_input: WhoResolvedInput,
|
||||||
|
pub result: WhoResult,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Resolved query parameters -- computed once, used for robot JSON reproducibility.
|
||||||
|
pub struct WhoResolvedInput {
|
||||||
|
pub mode: String,
|
||||||
|
pub project_id: Option<i64>,
|
||||||
|
pub project_path: Option<String>,
|
||||||
|
pub since_ms: Option<i64>,
|
||||||
|
pub since_iso: Option<String>,
|
||||||
|
/// "default" (mode default applied), "explicit" (user provided --since), "none" (no window)
|
||||||
|
pub since_mode: String,
|
||||||
|
pub limit: u16,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Top-level result enum -- one variant per mode.
|
||||||
|
pub enum WhoResult {
|
||||||
|
Expert(ExpertResult),
|
||||||
|
Workload(WorkloadResult),
|
||||||
|
Reviews(ReviewsResult),
|
||||||
|
Active(ActiveResult),
|
||||||
|
Overlap(OverlapResult),
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Expert ---
|
||||||
|
|
||||||
|
pub struct ExpertResult {
|
||||||
|
pub path_query: String,
|
||||||
|
/// "exact" or "prefix" -- how the path was matched in SQL.
|
||||||
|
pub path_match: String,
|
||||||
|
pub experts: Vec<Expert>,
|
||||||
|
pub truncated: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Expert {
|
||||||
|
pub username: String,
|
||||||
|
pub score: i64,
|
||||||
|
/// Unrounded f64 score (only populated when explain_score is set).
|
||||||
|
pub score_raw: Option<f64>,
|
||||||
|
/// Per-component score breakdown (only populated when explain_score is set).
|
||||||
|
pub components: Option<ScoreComponents>,
|
||||||
|
pub review_mr_count: u32,
|
||||||
|
pub review_note_count: u32,
|
||||||
|
pub author_mr_count: u32,
|
||||||
|
pub last_seen_ms: i64,
|
||||||
|
/// Stable MR references like "group/project!123"
|
||||||
|
pub mr_refs: Vec<String>,
|
||||||
|
pub mr_refs_total: u32,
|
||||||
|
pub mr_refs_truncated: bool,
|
||||||
|
/// Per-MR detail breakdown (only populated when --detail is set)
|
||||||
|
pub details: Option<Vec<ExpertMrDetail>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Per-component score breakdown for explain mode.
|
||||||
|
pub struct ScoreComponents {
|
||||||
|
pub author: f64,
|
||||||
|
pub reviewer_participated: f64,
|
||||||
|
pub reviewer_assigned: f64,
|
||||||
|
pub notes: f64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct ExpertMrDetail {
|
||||||
|
pub mr_ref: String,
|
||||||
|
pub title: String,
|
||||||
|
/// "R", "A", or "A+R"
|
||||||
|
pub role: String,
|
||||||
|
pub note_count: u32,
|
||||||
|
pub last_activity_ms: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Workload ---
|
||||||
|
|
||||||
|
pub struct WorkloadResult {
|
||||||
|
pub username: String,
|
||||||
|
pub assigned_issues: Vec<WorkloadIssue>,
|
||||||
|
pub authored_mrs: Vec<WorkloadMr>,
|
||||||
|
pub reviewing_mrs: Vec<WorkloadMr>,
|
||||||
|
pub unresolved_discussions: Vec<WorkloadDiscussion>,
|
||||||
|
pub assigned_issues_truncated: bool,
|
||||||
|
pub authored_mrs_truncated: bool,
|
||||||
|
pub reviewing_mrs_truncated: bool,
|
||||||
|
pub unresolved_discussions_truncated: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct WorkloadIssue {
|
||||||
|
pub iid: i64,
|
||||||
|
/// Canonical reference: `group/project#iid`
|
||||||
|
pub ref_: String,
|
||||||
|
pub title: String,
|
||||||
|
pub project_path: String,
|
||||||
|
pub updated_at: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct WorkloadMr {
|
||||||
|
pub iid: i64,
|
||||||
|
/// Canonical reference: `group/project!iid`
|
||||||
|
pub ref_: String,
|
||||||
|
pub title: String,
|
||||||
|
pub draft: bool,
|
||||||
|
pub project_path: String,
|
||||||
|
pub author_username: Option<String>,
|
||||||
|
pub updated_at: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct WorkloadDiscussion {
|
||||||
|
pub entity_type: String,
|
||||||
|
pub entity_iid: i64,
|
||||||
|
/// Canonical reference: `group/project!iid` or `group/project#iid`
|
||||||
|
pub ref_: String,
|
||||||
|
pub entity_title: String,
|
||||||
|
pub project_path: String,
|
||||||
|
pub last_note_at: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Reviews ---
|
||||||
|
|
||||||
|
pub struct ReviewsResult {
|
||||||
|
pub username: String,
|
||||||
|
pub total_diffnotes: u32,
|
||||||
|
pub categorized_count: u32,
|
||||||
|
pub mrs_reviewed: u32,
|
||||||
|
pub categories: Vec<ReviewCategory>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ReviewCategory {
|
||||||
|
pub name: String,
|
||||||
|
pub count: u32,
|
||||||
|
pub percentage: f64,
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Active ---
|
||||||
|
|
||||||
|
pub struct ActiveResult {
|
||||||
|
pub discussions: Vec<ActiveDiscussion>,
|
||||||
|
/// Count of unresolved discussions *within the time window*, not total across all time.
|
||||||
|
pub total_unresolved_in_window: u32,
|
||||||
|
pub truncated: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ActiveDiscussion {
|
||||||
|
pub discussion_id: i64,
|
||||||
|
pub entity_type: String,
|
||||||
|
pub entity_iid: i64,
|
||||||
|
pub entity_title: String,
|
||||||
|
pub project_path: String,
|
||||||
|
pub last_note_at: i64,
|
||||||
|
pub note_count: u32,
|
||||||
|
pub participants: Vec<String>,
|
||||||
|
pub participants_total: u32,
|
||||||
|
pub participants_truncated: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Overlap ---
|
||||||
|
|
||||||
|
pub struct OverlapResult {
|
||||||
|
pub path_query: String,
|
||||||
|
/// "exact" or "prefix" -- how the path was matched in SQL.
|
||||||
|
pub path_match: String,
|
||||||
|
pub users: Vec<OverlapUser>,
|
||||||
|
pub truncated: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct OverlapUser {
|
||||||
|
pub username: String,
|
||||||
|
pub author_touch_count: u32,
|
||||||
|
pub review_touch_count: u32,
|
||||||
|
pub touch_count: u32,
|
||||||
|
pub last_seen_at: i64,
|
||||||
|
/// Stable MR references like "group/project!123"
|
||||||
|
pub mr_refs: Vec<String>,
|
||||||
|
pub mr_refs_total: u32,
|
||||||
|
pub mr_refs_truncated: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Maximum MR references to retain per user in output (shared across modes).
|
||||||
|
pub const MAX_MR_REFS_PER_USER: usize = 50;
|
||||||
370
src/cli/commands/who/workload.rs
Normal file
370
src/cli/commands/who/workload.rs
Normal file
@@ -0,0 +1,370 @@
|
|||||||
|
use rusqlite::Connection;
|
||||||
|
|
||||||
|
use crate::cli::render::{self, Icons, Theme};
|
||||||
|
use crate::core::error::Result;
|
||||||
|
use crate::core::time::ms_to_iso;
|
||||||
|
|
||||||
|
use super::types::*;
|
||||||
|
|
||||||
|
// ─── Query: Workload Mode ───────────────────────────────────────────────────
|
||||||
|
|
||||||
|
pub(super) fn query_workload(
|
||||||
|
conn: &Connection,
|
||||||
|
username: &str,
|
||||||
|
project_id: Option<i64>,
|
||||||
|
since_ms: Option<i64>,
|
||||||
|
limit: usize,
|
||||||
|
include_closed: bool,
|
||||||
|
) -> Result<WorkloadResult> {
|
||||||
|
let limit_plus_one = (limit + 1) as i64;
|
||||||
|
|
||||||
|
// Query 1: Open issues assigned to user
|
||||||
|
let issues_sql = "SELECT i.iid,
|
||||||
|
(p.path_with_namespace || '#' || i.iid) AS ref,
|
||||||
|
i.title, p.path_with_namespace, i.updated_at
|
||||||
|
FROM issues i
|
||||||
|
JOIN issue_assignees ia ON ia.issue_id = i.id
|
||||||
|
JOIN projects p ON i.project_id = p.id
|
||||||
|
WHERE ia.username = ?1
|
||||||
|
AND i.state = 'opened'
|
||||||
|
AND (?2 IS NULL OR i.project_id = ?2)
|
||||||
|
AND (?3 IS NULL OR i.updated_at >= ?3)
|
||||||
|
ORDER BY i.updated_at DESC
|
||||||
|
LIMIT ?4";
|
||||||
|
|
||||||
|
let mut stmt = conn.prepare_cached(issues_sql)?;
|
||||||
|
let assigned_issues: Vec<WorkloadIssue> = stmt
|
||||||
|
.query_map(
|
||||||
|
rusqlite::params![username, project_id, since_ms, limit_plus_one],
|
||||||
|
|row| {
|
||||||
|
Ok(WorkloadIssue {
|
||||||
|
iid: row.get(0)?,
|
||||||
|
ref_: row.get(1)?,
|
||||||
|
title: row.get(2)?,
|
||||||
|
project_path: row.get(3)?,
|
||||||
|
updated_at: row.get(4)?,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
)?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
// Query 2: Open MRs authored
|
||||||
|
let authored_sql = "SELECT m.iid,
|
||||||
|
(p.path_with_namespace || '!' || m.iid) AS ref,
|
||||||
|
m.title, m.draft, p.path_with_namespace, m.updated_at
|
||||||
|
FROM merge_requests m
|
||||||
|
JOIN projects p ON m.project_id = p.id
|
||||||
|
WHERE m.author_username = ?1
|
||||||
|
AND m.state = 'opened'
|
||||||
|
AND (?2 IS NULL OR m.project_id = ?2)
|
||||||
|
AND (?3 IS NULL OR m.updated_at >= ?3)
|
||||||
|
ORDER BY m.updated_at DESC
|
||||||
|
LIMIT ?4";
|
||||||
|
let mut stmt = conn.prepare_cached(authored_sql)?;
|
||||||
|
let authored_mrs: Vec<WorkloadMr> = stmt
|
||||||
|
.query_map(
|
||||||
|
rusqlite::params![username, project_id, since_ms, limit_plus_one],
|
||||||
|
|row| {
|
||||||
|
Ok(WorkloadMr {
|
||||||
|
iid: row.get(0)?,
|
||||||
|
ref_: row.get(1)?,
|
||||||
|
title: row.get(2)?,
|
||||||
|
draft: row.get::<_, i32>(3)? != 0,
|
||||||
|
project_path: row.get(4)?,
|
||||||
|
author_username: None,
|
||||||
|
updated_at: row.get(5)?,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
)?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
// Query 3: Open MRs where user is reviewer
|
||||||
|
let reviewing_sql = "SELECT m.iid,
|
||||||
|
(p.path_with_namespace || '!' || m.iid) AS ref,
|
||||||
|
m.title, m.draft, p.path_with_namespace,
|
||||||
|
m.author_username, m.updated_at
|
||||||
|
FROM merge_requests m
|
||||||
|
JOIN mr_reviewers r ON r.merge_request_id = m.id
|
||||||
|
JOIN projects p ON m.project_id = p.id
|
||||||
|
WHERE r.username = ?1
|
||||||
|
AND m.state = 'opened'
|
||||||
|
AND (?2 IS NULL OR m.project_id = ?2)
|
||||||
|
AND (?3 IS NULL OR m.updated_at >= ?3)
|
||||||
|
ORDER BY m.updated_at DESC
|
||||||
|
LIMIT ?4";
|
||||||
|
let mut stmt = conn.prepare_cached(reviewing_sql)?;
|
||||||
|
let reviewing_mrs: Vec<WorkloadMr> = stmt
|
||||||
|
.query_map(
|
||||||
|
rusqlite::params![username, project_id, since_ms, limit_plus_one],
|
||||||
|
|row| {
|
||||||
|
Ok(WorkloadMr {
|
||||||
|
iid: row.get(0)?,
|
||||||
|
ref_: row.get(1)?,
|
||||||
|
title: row.get(2)?,
|
||||||
|
draft: row.get::<_, i32>(3)? != 0,
|
||||||
|
project_path: row.get(4)?,
|
||||||
|
author_username: row.get(5)?,
|
||||||
|
updated_at: row.get(6)?,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
)?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
// Query 4: Unresolved discussions where user participated
|
||||||
|
let state_filter = if include_closed {
|
||||||
|
""
|
||||||
|
} else {
|
||||||
|
" AND (i.id IS NULL OR i.state = 'opened')
|
||||||
|
AND (m.id IS NULL OR m.state = 'opened')"
|
||||||
|
};
|
||||||
|
let disc_sql = format!(
|
||||||
|
"SELECT d.noteable_type,
|
||||||
|
COALESCE(i.iid, m.iid) AS entity_iid,
|
||||||
|
(p.path_with_namespace ||
|
||||||
|
CASE WHEN d.noteable_type = 'MergeRequest' THEN '!' ELSE '#' END ||
|
||||||
|
COALESCE(i.iid, m.iid)) AS ref,
|
||||||
|
COALESCE(i.title, m.title) AS entity_title,
|
||||||
|
p.path_with_namespace,
|
||||||
|
d.last_note_at
|
||||||
|
FROM discussions d
|
||||||
|
JOIN projects p ON d.project_id = p.id
|
||||||
|
LEFT JOIN issues i ON d.issue_id = i.id
|
||||||
|
LEFT JOIN merge_requests m ON d.merge_request_id = m.id
|
||||||
|
WHERE d.resolvable = 1 AND d.resolved = 0
|
||||||
|
AND EXISTS (
|
||||||
|
SELECT 1 FROM notes n
|
||||||
|
WHERE n.discussion_id = d.id
|
||||||
|
AND n.author_username = ?1
|
||||||
|
AND n.is_system = 0
|
||||||
|
)
|
||||||
|
AND (?2 IS NULL OR d.project_id = ?2)
|
||||||
|
AND (?3 IS NULL OR d.last_note_at >= ?3)
|
||||||
|
{state_filter}
|
||||||
|
ORDER BY d.last_note_at DESC
|
||||||
|
LIMIT ?4"
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut stmt = conn.prepare_cached(&disc_sql)?;
|
||||||
|
let unresolved_discussions: Vec<WorkloadDiscussion> = stmt
|
||||||
|
.query_map(
|
||||||
|
rusqlite::params![username, project_id, since_ms, limit_plus_one],
|
||||||
|
|row| {
|
||||||
|
let noteable_type: String = row.get(0)?;
|
||||||
|
let entity_type = if noteable_type == "MergeRequest" {
|
||||||
|
"MR"
|
||||||
|
} else {
|
||||||
|
"Issue"
|
||||||
|
};
|
||||||
|
Ok(WorkloadDiscussion {
|
||||||
|
entity_type: entity_type.to_string(),
|
||||||
|
entity_iid: row.get(1)?,
|
||||||
|
ref_: row.get(2)?,
|
||||||
|
entity_title: row.get(3)?,
|
||||||
|
project_path: row.get(4)?,
|
||||||
|
last_note_at: row.get(5)?,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
)?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
// Truncation detection
|
||||||
|
let assigned_issues_truncated = assigned_issues.len() > limit;
|
||||||
|
let authored_mrs_truncated = authored_mrs.len() > limit;
|
||||||
|
let reviewing_mrs_truncated = reviewing_mrs.len() > limit;
|
||||||
|
let unresolved_discussions_truncated = unresolved_discussions.len() > limit;
|
||||||
|
|
||||||
|
let assigned_issues: Vec<WorkloadIssue> = assigned_issues.into_iter().take(limit).collect();
|
||||||
|
let authored_mrs: Vec<WorkloadMr> = authored_mrs.into_iter().take(limit).collect();
|
||||||
|
let reviewing_mrs: Vec<WorkloadMr> = reviewing_mrs.into_iter().take(limit).collect();
|
||||||
|
let unresolved_discussions: Vec<WorkloadDiscussion> =
|
||||||
|
unresolved_discussions.into_iter().take(limit).collect();
|
||||||
|
|
||||||
|
Ok(WorkloadResult {
|
||||||
|
username: username.to_string(),
|
||||||
|
assigned_issues,
|
||||||
|
authored_mrs,
|
||||||
|
reviewing_mrs,
|
||||||
|
unresolved_discussions,
|
||||||
|
assigned_issues_truncated,
|
||||||
|
authored_mrs_truncated,
|
||||||
|
reviewing_mrs_truncated,
|
||||||
|
unresolved_discussions_truncated,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Human Renderer: Workload ───────────────────────────────────────────────
|
||||||
|
|
||||||
|
pub(super) fn print_workload_human(r: &WorkloadResult) {
|
||||||
|
println!();
|
||||||
|
println!(
|
||||||
|
"{}",
|
||||||
|
Theme::bold().render(&format!(
|
||||||
|
"{} {} -- Workload Summary",
|
||||||
|
Icons::user(),
|
||||||
|
r.username
|
||||||
|
))
|
||||||
|
);
|
||||||
|
println!("{}", "\u{2500}".repeat(60));
|
||||||
|
|
||||||
|
if !r.assigned_issues.is_empty() {
|
||||||
|
println!(
|
||||||
|
"{}",
|
||||||
|
render::section_divider(&format!("Assigned Issues ({})", r.assigned_issues.len()))
|
||||||
|
);
|
||||||
|
for item in &r.assigned_issues {
|
||||||
|
println!(
|
||||||
|
" {} {} {}",
|
||||||
|
Theme::info().render(&item.ref_),
|
||||||
|
render::truncate(&item.title, 40),
|
||||||
|
Theme::dim().render(&render::format_relative_time(item.updated_at)),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if r.assigned_issues_truncated {
|
||||||
|
println!(
|
||||||
|
" {}",
|
||||||
|
Theme::dim().render("(truncated; rerun with a higher --limit)")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !r.authored_mrs.is_empty() {
|
||||||
|
println!(
|
||||||
|
"{}",
|
||||||
|
render::section_divider(&format!("Authored MRs ({})", r.authored_mrs.len()))
|
||||||
|
);
|
||||||
|
for mr in &r.authored_mrs {
|
||||||
|
let draft = if mr.draft { " [draft]" } else { "" };
|
||||||
|
println!(
|
||||||
|
" {} {}{} {}",
|
||||||
|
Theme::info().render(&mr.ref_),
|
||||||
|
render::truncate(&mr.title, 35),
|
||||||
|
Theme::dim().render(draft),
|
||||||
|
Theme::dim().render(&render::format_relative_time(mr.updated_at)),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if r.authored_mrs_truncated {
|
||||||
|
println!(
|
||||||
|
" {}",
|
||||||
|
Theme::dim().render("(truncated; rerun with a higher --limit)")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !r.reviewing_mrs.is_empty() {
|
||||||
|
println!(
|
||||||
|
"{}",
|
||||||
|
render::section_divider(&format!("Reviewing MRs ({})", r.reviewing_mrs.len()))
|
||||||
|
);
|
||||||
|
for mr in &r.reviewing_mrs {
|
||||||
|
let author = mr
|
||||||
|
.author_username
|
||||||
|
.as_deref()
|
||||||
|
.map(|a| format!(" by @{a}"))
|
||||||
|
.unwrap_or_default();
|
||||||
|
println!(
|
||||||
|
" {} {}{} {}",
|
||||||
|
Theme::info().render(&mr.ref_),
|
||||||
|
render::truncate(&mr.title, 30),
|
||||||
|
Theme::dim().render(&author),
|
||||||
|
Theme::dim().render(&render::format_relative_time(mr.updated_at)),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if r.reviewing_mrs_truncated {
|
||||||
|
println!(
|
||||||
|
" {}",
|
||||||
|
Theme::dim().render("(truncated; rerun with a higher --limit)")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !r.unresolved_discussions.is_empty() {
|
||||||
|
println!(
|
||||||
|
"{}",
|
||||||
|
render::section_divider(&format!(
|
||||||
|
"Unresolved Discussions ({})",
|
||||||
|
r.unresolved_discussions.len()
|
||||||
|
))
|
||||||
|
);
|
||||||
|
for disc in &r.unresolved_discussions {
|
||||||
|
println!(
|
||||||
|
" {} {} {} {}",
|
||||||
|
Theme::dim().render(&disc.entity_type),
|
||||||
|
Theme::info().render(&disc.ref_),
|
||||||
|
render::truncate(&disc.entity_title, 35),
|
||||||
|
Theme::dim().render(&render::format_relative_time(disc.last_note_at)),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if r.unresolved_discussions_truncated {
|
||||||
|
println!(
|
||||||
|
" {}",
|
||||||
|
Theme::dim().render("(truncated; rerun with a higher --limit)")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.assigned_issues.is_empty()
|
||||||
|
&& r.authored_mrs.is_empty()
|
||||||
|
&& r.reviewing_mrs.is_empty()
|
||||||
|
&& r.unresolved_discussions.is_empty()
|
||||||
|
{
|
||||||
|
println!();
|
||||||
|
println!(
|
||||||
|
" {}",
|
||||||
|
Theme::dim().render("No open work items found for this user.")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
println!();
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── JSON Renderer: Workload ────────────────────────────────────────────────
|
||||||
|
|
||||||
|
pub(super) fn workload_to_json(r: &WorkloadResult) -> serde_json::Value {
|
||||||
|
serde_json::json!({
|
||||||
|
"username": r.username,
|
||||||
|
"assigned_issues": r.assigned_issues.iter().map(|i| serde_json::json!({
|
||||||
|
"iid": i.iid,
|
||||||
|
"ref": i.ref_,
|
||||||
|
"title": i.title,
|
||||||
|
"project_path": i.project_path,
|
||||||
|
"updated_at": ms_to_iso(i.updated_at),
|
||||||
|
})).collect::<Vec<_>>(),
|
||||||
|
"authored_mrs": r.authored_mrs.iter().map(|m| serde_json::json!({
|
||||||
|
"iid": m.iid,
|
||||||
|
"ref": m.ref_,
|
||||||
|
"title": m.title,
|
||||||
|
"draft": m.draft,
|
||||||
|
"project_path": m.project_path,
|
||||||
|
"updated_at": ms_to_iso(m.updated_at),
|
||||||
|
})).collect::<Vec<_>>(),
|
||||||
|
"reviewing_mrs": r.reviewing_mrs.iter().map(|m| serde_json::json!({
|
||||||
|
"iid": m.iid,
|
||||||
|
"ref": m.ref_,
|
||||||
|
"title": m.title,
|
||||||
|
"draft": m.draft,
|
||||||
|
"project_path": m.project_path,
|
||||||
|
"author_username": m.author_username,
|
||||||
|
"updated_at": ms_to_iso(m.updated_at),
|
||||||
|
})).collect::<Vec<_>>(),
|
||||||
|
"unresolved_discussions": r.unresolved_discussions.iter().map(|d| serde_json::json!({
|
||||||
|
"entity_type": d.entity_type,
|
||||||
|
"entity_iid": d.entity_iid,
|
||||||
|
"ref": d.ref_,
|
||||||
|
"entity_title": d.entity_title,
|
||||||
|
"project_path": d.project_path,
|
||||||
|
"last_note_at": ms_to_iso(d.last_note_at),
|
||||||
|
})).collect::<Vec<_>>(),
|
||||||
|
"summary": {
|
||||||
|
"assigned_issue_count": r.assigned_issues.len(),
|
||||||
|
"authored_mr_count": r.authored_mrs.len(),
|
||||||
|
"reviewing_mr_count": r.reviewing_mrs.len(),
|
||||||
|
"unresolved_discussion_count": r.unresolved_discussions.len(),
|
||||||
|
},
|
||||||
|
"truncation": {
|
||||||
|
"assigned_issues_truncated": r.assigned_issues_truncated,
|
||||||
|
"authored_mrs_truncated": r.authored_mrs_truncated,
|
||||||
|
"reviewing_mrs_truncated": r.reviewing_mrs_truncated,
|
||||||
|
"unresolved_discussions_truncated": r.unresolved_discussions_truncated,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
@@ -54,15 +54,27 @@ fn insert_mr(conn: &Connection, id: i64, project_id: i64, iid: i64, author: &str
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn insert_issue(conn: &Connection, id: i64, project_id: i64, iid: i64, author: &str) {
|
fn insert_issue(conn: &Connection, id: i64, project_id: i64, iid: i64, author: &str) {
|
||||||
|
insert_issue_with_state(conn, id, project_id, iid, author, "opened");
|
||||||
|
}
|
||||||
|
|
||||||
|
fn insert_issue_with_state(
|
||||||
|
conn: &Connection,
|
||||||
|
id: i64,
|
||||||
|
project_id: i64,
|
||||||
|
iid: i64,
|
||||||
|
author: &str,
|
||||||
|
state: &str,
|
||||||
|
) {
|
||||||
conn.execute(
|
conn.execute(
|
||||||
"INSERT INTO issues (id, gitlab_id, project_id, iid, title, state, author_username, created_at, updated_at, last_seen_at)
|
"INSERT INTO issues (id, gitlab_id, project_id, iid, title, state, author_username, created_at, updated_at, last_seen_at)
|
||||||
VALUES (?1, ?2, ?3, ?4, ?5, 'opened', ?6, ?7, ?8, ?9)",
|
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10)",
|
||||||
rusqlite::params![
|
rusqlite::params![
|
||||||
id,
|
id,
|
||||||
id * 10,
|
id * 10,
|
||||||
project_id,
|
project_id,
|
||||||
iid,
|
iid,
|
||||||
format!("Issue {iid}"),
|
format!("Issue {iid}"),
|
||||||
|
state,
|
||||||
author,
|
author,
|
||||||
now_ms(),
|
now_ms(),
|
||||||
now_ms(),
|
now_ms(),
|
||||||
@@ -134,6 +146,24 @@ fn insert_diffnote(
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn insert_note(conn: &Connection, id: i64, discussion_id: i64, project_id: i64, author: &str) {
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO notes (id, gitlab_id, discussion_id, project_id, note_type, is_system, author_username, body, created_at, updated_at, last_seen_at)
|
||||||
|
VALUES (?1, ?2, ?3, ?4, 'DiscussionNote', 0, ?5, 'comment', ?6, ?7, ?8)",
|
||||||
|
rusqlite::params![
|
||||||
|
id,
|
||||||
|
id * 10,
|
||||||
|
discussion_id,
|
||||||
|
project_id,
|
||||||
|
author,
|
||||||
|
now_ms(),
|
||||||
|
now_ms(),
|
||||||
|
now_ms()
|
||||||
|
],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
fn insert_assignee(conn: &Connection, issue_id: i64, username: &str) {
|
fn insert_assignee(conn: &Connection, issue_id: i64, username: &str) {
|
||||||
conn.execute(
|
conn.execute(
|
||||||
"INSERT INTO issue_assignees (issue_id, username) VALUES (?1, ?2)",
|
"INSERT INTO issue_assignees (issue_id, username) VALUES (?1, ?2)",
|
||||||
@@ -263,6 +293,7 @@ fn test_is_file_path_discrimination() {
|
|||||||
as_of: None,
|
as_of: None,
|
||||||
explain_score: false,
|
explain_score: false,
|
||||||
include_bots: false,
|
include_bots: false,
|
||||||
|
include_closed: false,
|
||||||
all_history: false,
|
all_history: false,
|
||||||
})
|
})
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
@@ -286,6 +317,7 @@ fn test_is_file_path_discrimination() {
|
|||||||
as_of: None,
|
as_of: None,
|
||||||
explain_score: false,
|
explain_score: false,
|
||||||
include_bots: false,
|
include_bots: false,
|
||||||
|
include_closed: false,
|
||||||
all_history: false,
|
all_history: false,
|
||||||
})
|
})
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
@@ -309,6 +341,7 @@ fn test_is_file_path_discrimination() {
|
|||||||
as_of: None,
|
as_of: None,
|
||||||
explain_score: false,
|
explain_score: false,
|
||||||
include_bots: false,
|
include_bots: false,
|
||||||
|
include_closed: false,
|
||||||
all_history: false,
|
all_history: false,
|
||||||
})
|
})
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
@@ -332,6 +365,7 @@ fn test_is_file_path_discrimination() {
|
|||||||
as_of: None,
|
as_of: None,
|
||||||
explain_score: false,
|
explain_score: false,
|
||||||
include_bots: false,
|
include_bots: false,
|
||||||
|
include_closed: false,
|
||||||
all_history: false,
|
all_history: false,
|
||||||
})
|
})
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
@@ -355,6 +389,7 @@ fn test_is_file_path_discrimination() {
|
|||||||
as_of: None,
|
as_of: None,
|
||||||
explain_score: false,
|
explain_score: false,
|
||||||
include_bots: false,
|
include_bots: false,
|
||||||
|
include_closed: false,
|
||||||
all_history: false,
|
all_history: false,
|
||||||
})
|
})
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
@@ -378,6 +413,7 @@ fn test_is_file_path_discrimination() {
|
|||||||
as_of: None,
|
as_of: None,
|
||||||
explain_score: false,
|
explain_score: false,
|
||||||
include_bots: false,
|
include_bots: false,
|
||||||
|
include_closed: false,
|
||||||
all_history: false,
|
all_history: false,
|
||||||
})
|
})
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
@@ -402,6 +438,7 @@ fn test_detail_rejected_outside_expert_mode() {
|
|||||||
as_of: None,
|
as_of: None,
|
||||||
explain_score: false,
|
explain_score: false,
|
||||||
include_bots: false,
|
include_bots: false,
|
||||||
|
include_closed: false,
|
||||||
all_history: false,
|
all_history: false,
|
||||||
};
|
};
|
||||||
let mode = resolve_mode(&args).unwrap();
|
let mode = resolve_mode(&args).unwrap();
|
||||||
@@ -430,6 +467,7 @@ fn test_detail_allowed_in_expert_mode() {
|
|||||||
as_of: None,
|
as_of: None,
|
||||||
explain_score: false,
|
explain_score: false,
|
||||||
include_bots: false,
|
include_bots: false,
|
||||||
|
include_closed: false,
|
||||||
all_history: false,
|
all_history: false,
|
||||||
};
|
};
|
||||||
let mode = resolve_mode(&args).unwrap();
|
let mode = resolve_mode(&args).unwrap();
|
||||||
@@ -579,7 +617,7 @@ fn test_workload_query() {
|
|||||||
insert_assignee(&conn, 1, "dev_a");
|
insert_assignee(&conn, 1, "dev_a");
|
||||||
insert_mr(&conn, 1, 1, 100, "dev_a", "opened");
|
insert_mr(&conn, 1, 1, 100, "dev_a", "opened");
|
||||||
|
|
||||||
let result = query_workload(&conn, "dev_a", None, None, 20).unwrap();
|
let result = query_workload(&conn, "dev_a", None, None, 20, true).unwrap();
|
||||||
assert_eq!(result.assigned_issues.len(), 1);
|
assert_eq!(result.assigned_issues.len(), 1);
|
||||||
assert_eq!(result.authored_mrs.len(), 1);
|
assert_eq!(result.authored_mrs.len(), 1);
|
||||||
}
|
}
|
||||||
@@ -626,7 +664,7 @@ fn test_active_query() {
|
|||||||
// Second note by same participant -- note_count should be 2, participants still ["reviewer_b"]
|
// Second note by same participant -- note_count should be 2, participants still ["reviewer_b"]
|
||||||
insert_diffnote(&conn, 2, 1, 1, "reviewer_b", "src/foo.rs", "follow-up");
|
insert_diffnote(&conn, 2, 1, 1, "reviewer_b", "src/foo.rs", "follow-up");
|
||||||
|
|
||||||
let result = query_active(&conn, None, 0, 20).unwrap();
|
let result = query_active(&conn, None, 0, 20, true).unwrap();
|
||||||
assert_eq!(result.total_unresolved_in_window, 1);
|
assert_eq!(result.total_unresolved_in_window, 1);
|
||||||
assert_eq!(result.discussions.len(), 1);
|
assert_eq!(result.discussions.len(), 1);
|
||||||
assert_eq!(result.discussions[0].participants, vec!["reviewer_b"]);
|
assert_eq!(result.discussions[0].participants, vec!["reviewer_b"]);
|
||||||
@@ -878,7 +916,7 @@ fn test_active_participants_sorted() {
|
|||||||
insert_diffnote(&conn, 1, 1, 1, "zebra_user", "src/foo.rs", "note 1");
|
insert_diffnote(&conn, 1, 1, 1, "zebra_user", "src/foo.rs", "note 1");
|
||||||
insert_diffnote(&conn, 2, 1, 1, "alpha_user", "src/foo.rs", "note 2");
|
insert_diffnote(&conn, 2, 1, 1, "alpha_user", "src/foo.rs", "note 2");
|
||||||
|
|
||||||
let result = query_active(&conn, None, 0, 20).unwrap();
|
let result = query_active(&conn, None, 0, 20, true).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.discussions[0].participants,
|
result.discussions[0].participants,
|
||||||
vec!["alpha_user", "zebra_user"]
|
vec!["alpha_user", "zebra_user"]
|
||||||
@@ -3265,3 +3303,94 @@ fn test_deterministic_accumulation_order() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ─── Tests: include_closed filter ────────────────────────────────────────────
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn workload_excludes_closed_entity_discussions() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
insert_project(&conn, 1, "group/repo");
|
||||||
|
|
||||||
|
// Open issue with unresolved discussion
|
||||||
|
insert_issue_with_state(&conn, 10, 1, 10, "someone", "opened");
|
||||||
|
insert_discussion(&conn, 100, 1, None, Some(10), true, false);
|
||||||
|
insert_note(&conn, 1000, 100, 1, "alice");
|
||||||
|
|
||||||
|
// Closed issue with unresolved discussion
|
||||||
|
insert_issue_with_state(&conn, 20, 1, 20, "someone", "closed");
|
||||||
|
insert_discussion(&conn, 200, 1, None, Some(20), true, false);
|
||||||
|
insert_note(&conn, 2000, 200, 1, "alice");
|
||||||
|
|
||||||
|
// Default: exclude closed
|
||||||
|
let result = query_workload(&conn, "alice", None, None, 50, false).unwrap();
|
||||||
|
assert_eq!(result.unresolved_discussions.len(), 1);
|
||||||
|
assert_eq!(result.unresolved_discussions[0].entity_iid, 10);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn workload_include_closed_flag_shows_all() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
insert_project(&conn, 1, "group/repo");
|
||||||
|
|
||||||
|
insert_issue_with_state(&conn, 10, 1, 10, "someone", "opened");
|
||||||
|
insert_discussion(&conn, 100, 1, None, Some(10), true, false);
|
||||||
|
insert_note(&conn, 1000, 100, 1, "alice");
|
||||||
|
|
||||||
|
insert_issue_with_state(&conn, 20, 1, 20, "someone", "closed");
|
||||||
|
insert_discussion(&conn, 200, 1, None, Some(20), true, false);
|
||||||
|
insert_note(&conn, 2000, 200, 1, "alice");
|
||||||
|
|
||||||
|
let result = query_workload(&conn, "alice", None, None, 50, true).unwrap();
|
||||||
|
assert_eq!(result.unresolved_discussions.len(), 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn workload_excludes_merged_mr_discussions() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
insert_project(&conn, 1, "group/repo");
|
||||||
|
|
||||||
|
// Open MR with unresolved discussion
|
||||||
|
insert_mr(&conn, 10, 1, 10, "someone", "opened");
|
||||||
|
insert_discussion(&conn, 100, 1, Some(10), None, true, false);
|
||||||
|
insert_note(&conn, 1000, 100, 1, "alice");
|
||||||
|
|
||||||
|
// Merged MR with unresolved discussion
|
||||||
|
insert_mr(&conn, 20, 1, 20, "someone", "merged");
|
||||||
|
insert_discussion(&conn, 200, 1, Some(20), None, true, false);
|
||||||
|
insert_note(&conn, 2000, 200, 1, "alice");
|
||||||
|
|
||||||
|
let result = query_workload(&conn, "alice", None, None, 50, false).unwrap();
|
||||||
|
assert_eq!(result.unresolved_discussions.len(), 1);
|
||||||
|
assert_eq!(result.unresolved_discussions[0].entity_iid, 10);
|
||||||
|
|
||||||
|
// include_closed shows both
|
||||||
|
let result = query_workload(&conn, "alice", None, None, 50, true).unwrap();
|
||||||
|
assert_eq!(result.unresolved_discussions.len(), 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn active_excludes_closed_entity_discussions() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
insert_project(&conn, 1, "group/repo");
|
||||||
|
|
||||||
|
// Open issue with unresolved discussion
|
||||||
|
insert_issue_with_state(&conn, 10, 1, 10, "someone", "opened");
|
||||||
|
insert_discussion(&conn, 100, 1, None, Some(10), true, false);
|
||||||
|
insert_note(&conn, 1000, 100, 1, "alice");
|
||||||
|
|
||||||
|
// Closed issue with unresolved discussion
|
||||||
|
insert_issue_with_state(&conn, 20, 1, 20, "someone", "closed");
|
||||||
|
insert_discussion(&conn, 200, 1, None, Some(20), true, false);
|
||||||
|
insert_note(&conn, 2000, 200, 1, "alice");
|
||||||
|
|
||||||
|
// Default: exclude closed
|
||||||
|
let result = query_active(&conn, None, 0, 50, false).unwrap();
|
||||||
|
assert_eq!(result.discussions.len(), 1);
|
||||||
|
assert_eq!(result.discussions[0].entity_iid, 10);
|
||||||
|
assert_eq!(result.total_unresolved_in_window, 1);
|
||||||
|
|
||||||
|
// include_closed shows both
|
||||||
|
let result = query_active(&conn, None, 0, 50, true).unwrap();
|
||||||
|
assert_eq!(result.discussions.len(), 2);
|
||||||
|
assert_eq!(result.total_unresolved_in_window, 2);
|
||||||
|
}
|
||||||
|
|||||||
103
src/cli/mod.rs
103
src/cli/mod.rs
@@ -16,7 +16,9 @@ use std::io::IsTerminal;
|
|||||||
GITLAB_TOKEN GitLab personal access token (or name set in config)
|
GITLAB_TOKEN GitLab personal access token (or name set in config)
|
||||||
LORE_ROBOT Enable robot/JSON mode (non-empty, non-zero value)
|
LORE_ROBOT Enable robot/JSON mode (non-empty, non-zero value)
|
||||||
LORE_CONFIG_PATH Override config file location
|
LORE_CONFIG_PATH Override config file location
|
||||||
NO_COLOR Disable color output (any non-empty value)")]
|
NO_COLOR Disable color output (any non-empty value)
|
||||||
|
LORE_ICONS Override icon set: nerd, unicode, or ascii
|
||||||
|
NERD_FONTS Enable Nerd Font icons when set to a non-empty value")]
|
||||||
pub struct Cli {
|
pub struct Cli {
|
||||||
/// Path to config file
|
/// Path to config file
|
||||||
#[arg(short = 'c', long, global = true, help = "Path to config file")]
|
#[arg(short = 'c', long, global = true, help = "Path to config file")]
|
||||||
@@ -135,19 +137,35 @@ pub enum Commands {
|
|||||||
Count(CountArgs),
|
Count(CountArgs),
|
||||||
|
|
||||||
/// Show sync state
|
/// Show sync state
|
||||||
#[command(visible_alias = "st")]
|
#[command(
|
||||||
|
visible_alias = "st",
|
||||||
|
after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore status # Show last sync times per project
|
||||||
|
lore --robot status # JSON output for automation"
|
||||||
|
)]
|
||||||
Status,
|
Status,
|
||||||
|
|
||||||
/// Verify GitLab authentication
|
/// Verify GitLab authentication
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore auth # Verify token and show user info
|
||||||
|
lore --robot auth # JSON output for automation")]
|
||||||
Auth,
|
Auth,
|
||||||
|
|
||||||
/// Check environment health
|
/// Check environment health
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore doctor # Check config, token, database, Ollama
|
||||||
|
lore --robot doctor # JSON output for automation")]
|
||||||
Doctor,
|
Doctor,
|
||||||
|
|
||||||
/// Show version information
|
/// Show version information
|
||||||
Version,
|
Version,
|
||||||
|
|
||||||
/// Initialize configuration and database
|
/// Initialize configuration and database
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore init # Interactive setup
|
||||||
|
lore init --force # Overwrite existing config
|
||||||
|
lore --robot init --gitlab-url https://gitlab.com \\
|
||||||
|
--token-env-var GITLAB_TOKEN --projects group/repo # Non-interactive setup")]
|
||||||
Init {
|
Init {
|
||||||
/// Skip overwrite confirmation
|
/// Skip overwrite confirmation
|
||||||
#[arg(short = 'f', long)]
|
#[arg(short = 'f', long)]
|
||||||
@@ -174,11 +192,14 @@ pub enum Commands {
|
|||||||
default_project: Option<String>,
|
default_project: Option<String>,
|
||||||
},
|
},
|
||||||
|
|
||||||
|
/// Back up local database (not yet implemented)
|
||||||
#[command(hide = true)]
|
#[command(hide = true)]
|
||||||
Backup,
|
Backup,
|
||||||
|
|
||||||
|
/// Reset local database (not yet implemented)
|
||||||
#[command(hide = true)]
|
#[command(hide = true)]
|
||||||
Reset {
|
Reset {
|
||||||
|
/// Skip confirmation prompt
|
||||||
#[arg(short = 'y', long)]
|
#[arg(short = 'y', long)]
|
||||||
yes: bool,
|
yes: bool,
|
||||||
},
|
},
|
||||||
@@ -202,9 +223,15 @@ pub enum Commands {
|
|||||||
Sync(SyncArgs),
|
Sync(SyncArgs),
|
||||||
|
|
||||||
/// Run pending database migrations
|
/// Run pending database migrations
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore migrate # Apply pending migrations
|
||||||
|
lore --robot migrate # JSON output for automation")]
|
||||||
Migrate,
|
Migrate,
|
||||||
|
|
||||||
/// Quick health check: config, database, schema version
|
/// Quick health check: config, database, schema version
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore health # Quick pre-flight check (exit 0 = healthy)
|
||||||
|
lore --robot health # JSON output for automation")]
|
||||||
Health,
|
Health,
|
||||||
|
|
||||||
/// Machine-readable command manifest for agent self-discovery
|
/// Machine-readable command manifest for agent self-discovery
|
||||||
@@ -242,6 +269,10 @@ pub enum Commands {
|
|||||||
Trace(TraceArgs),
|
Trace(TraceArgs),
|
||||||
|
|
||||||
/// Detect discussion divergence from original intent
|
/// Detect discussion divergence from original intent
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore drift issues 42 # Check drift on issue #42
|
||||||
|
lore drift issues 42 --threshold 0.3 # Custom similarity threshold
|
||||||
|
lore --robot drift issues 42 -p group/repo # JSON output, scoped to project")]
|
||||||
Drift {
|
Drift {
|
||||||
/// Entity type (currently only "issues" supported)
|
/// Entity type (currently only "issues" supported)
|
||||||
#[arg(value_parser = ["issues"])]
|
#[arg(value_parser = ["issues"])]
|
||||||
@@ -259,6 +290,14 @@ pub enum Commands {
|
|||||||
project: Option<String>,
|
project: Option<String>,
|
||||||
},
|
},
|
||||||
|
|
||||||
|
/// Manage cron-based automatic syncing
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore cron install # Install cron job (every 8 minutes)
|
||||||
|
lore cron install --interval 15 # Custom interval
|
||||||
|
lore cron status # Check if cron is installed
|
||||||
|
lore cron uninstall # Remove cron job")]
|
||||||
|
Cron(CronArgs),
|
||||||
|
|
||||||
#[command(hide = true)]
|
#[command(hide = true)]
|
||||||
List {
|
List {
|
||||||
#[arg(value_parser = ["issues", "mrs"])]
|
#[arg(value_parser = ["issues", "mrs"])]
|
||||||
@@ -344,7 +383,7 @@ pub struct IssuesArgs {
|
|||||||
pub fields: Option<Vec<String>>,
|
pub fields: Option<Vec<String>>,
|
||||||
|
|
||||||
/// Filter by state (opened, closed, all)
|
/// Filter by state (opened, closed, all)
|
||||||
#[arg(short = 's', long, help_heading = "Filters")]
|
#[arg(short = 's', long, help_heading = "Filters", value_parser = ["opened", "closed", "all"])]
|
||||||
pub state: Option<String>,
|
pub state: Option<String>,
|
||||||
|
|
||||||
/// Filter by project path
|
/// Filter by project path
|
||||||
@@ -438,7 +477,7 @@ pub struct MrsArgs {
|
|||||||
pub fields: Option<Vec<String>>,
|
pub fields: Option<Vec<String>>,
|
||||||
|
|
||||||
/// Filter by state (opened, merged, closed, locked, all)
|
/// Filter by state (opened, merged, closed, locked, all)
|
||||||
#[arg(short = 's', long, help_heading = "Filters")]
|
#[arg(short = 's', long, help_heading = "Filters", value_parser = ["opened", "merged", "closed", "locked", "all"])]
|
||||||
pub state: Option<String>,
|
pub state: Option<String>,
|
||||||
|
|
||||||
/// Filter by project path
|
/// Filter by project path
|
||||||
@@ -535,15 +574,6 @@ pub struct NotesArgs {
|
|||||||
#[arg(long, help_heading = "Output", value_delimiter = ',')]
|
#[arg(long, help_heading = "Output", value_delimiter = ',')]
|
||||||
pub fields: Option<Vec<String>>,
|
pub fields: Option<Vec<String>>,
|
||||||
|
|
||||||
/// Output format (table, json, jsonl, csv)
|
|
||||||
#[arg(
|
|
||||||
long,
|
|
||||||
default_value = "table",
|
|
||||||
value_parser = ["table", "json", "jsonl", "csv"],
|
|
||||||
help_heading = "Output"
|
|
||||||
)]
|
|
||||||
pub format: String,
|
|
||||||
|
|
||||||
/// Filter by author username
|
/// Filter by author username
|
||||||
#[arg(short = 'a', long, help_heading = "Filters")]
|
#[arg(short = 'a', long, help_heading = "Filters")]
|
||||||
pub author: Option<String>,
|
pub author: Option<String>,
|
||||||
@@ -655,6 +685,11 @@ pub struct IngestArgs {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Parser)]
|
#[derive(Parser)]
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore stats # Show document and index statistics
|
||||||
|
lore stats --check # Run integrity checks
|
||||||
|
lore stats --repair --dry-run # Preview what repair would fix
|
||||||
|
lore --robot stats # JSON output for automation")]
|
||||||
pub struct StatsArgs {
|
pub struct StatsArgs {
|
||||||
/// Run integrity checks
|
/// Run integrity checks
|
||||||
#[arg(long, overrides_with = "no_check")]
|
#[arg(long, overrides_with = "no_check")]
|
||||||
@@ -743,6 +778,10 @@ pub struct SearchArgs {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Parser)]
|
#[derive(Parser)]
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore generate-docs # Generate docs for dirty entities
|
||||||
|
lore generate-docs --full # Full rebuild of all documents
|
||||||
|
lore generate-docs --full -p group/repo # Full rebuild for one project")]
|
||||||
pub struct GenerateDocsArgs {
|
pub struct GenerateDocsArgs {
|
||||||
/// Full rebuild: seed all entities into dirty queue, then drain
|
/// Full rebuild: seed all entities into dirty queue, then drain
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
@@ -805,9 +844,17 @@ pub struct SyncArgs {
|
|||||||
/// Show detailed timing breakdown for sync stages
|
/// Show detailed timing breakdown for sync stages
|
||||||
#[arg(short = 't', long = "timings")]
|
#[arg(short = 't', long = "timings")]
|
||||||
pub timings: bool,
|
pub timings: bool,
|
||||||
|
|
||||||
|
/// Acquire file lock before syncing (skip if another sync is running)
|
||||||
|
#[arg(long)]
|
||||||
|
pub lock: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Parser)]
|
#[derive(Parser)]
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore embed # Embed new/changed documents
|
||||||
|
lore embed --full # Re-embed all documents from scratch
|
||||||
|
lore embed --retry-failed # Retry previously failed embeddings")]
|
||||||
pub struct EmbedArgs {
|
pub struct EmbedArgs {
|
||||||
/// Re-embed all documents (clears existing embeddings first)
|
/// Re-embed all documents (clears existing embeddings first)
|
||||||
#[arg(long, overrides_with = "no_full")]
|
#[arg(long, overrides_with = "no_full")]
|
||||||
@@ -964,6 +1011,10 @@ pub struct WhoArgs {
|
|||||||
#[arg(long = "include-bots", help_heading = "Scoring")]
|
#[arg(long = "include-bots", help_heading = "Scoring")]
|
||||||
pub include_bots: bool,
|
pub include_bots: bool,
|
||||||
|
|
||||||
|
/// Include discussions on closed issues and merged/closed MRs
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub include_closed: bool,
|
||||||
|
|
||||||
/// Remove the default time window (query all history). Conflicts with --since.
|
/// Remove the default time window (query all history). Conflicts with --since.
|
||||||
#[arg(
|
#[arg(
|
||||||
long = "all-history",
|
long = "all-history",
|
||||||
@@ -1042,6 +1093,10 @@ pub struct TraceArgs {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Parser)]
|
#[derive(Parser)]
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore count issues # Total issues in local database
|
||||||
|
lore count notes --for mr # Notes on merge requests only
|
||||||
|
lore count discussions --for issue # Discussions on issues only")]
|
||||||
pub struct CountArgs {
|
pub struct CountArgs {
|
||||||
/// Entity type to count (issues, mrs, discussions, notes, events)
|
/// Entity type to count (issues, mrs, discussions, notes, events)
|
||||||
#[arg(value_parser = ["issues", "mrs", "discussions", "notes", "events"])]
|
#[arg(value_parser = ["issues", "mrs", "discussions", "notes", "events"])]
|
||||||
@@ -1051,3 +1106,25 @@ pub struct CountArgs {
|
|||||||
#[arg(short = 'f', long = "for", value_parser = ["issue", "mr"])]
|
#[arg(short = 'f', long = "for", value_parser = ["issue", "mr"])]
|
||||||
pub for_entity: Option<String>,
|
pub for_entity: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
pub struct CronArgs {
|
||||||
|
#[command(subcommand)]
|
||||||
|
pub action: CronAction,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Subcommand)]
|
||||||
|
pub enum CronAction {
|
||||||
|
/// Install cron job for automatic syncing
|
||||||
|
Install {
|
||||||
|
/// Sync interval in minutes (default: 8)
|
||||||
|
#[arg(long, default_value = "8")]
|
||||||
|
interval: u32,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Remove cron job
|
||||||
|
Uninstall,
|
||||||
|
|
||||||
|
/// Show current cron configuration
|
||||||
|
Status,
|
||||||
|
}
|
||||||
|
|||||||
369
src/core/cron.rs
Normal file
369
src/core/cron.rs
Normal file
@@ -0,0 +1,369 @@
|
|||||||
|
use std::fs::{self, File};
|
||||||
|
use std::io::{self, Write};
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::process::Command;
|
||||||
|
|
||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
|
use super::error::{LoreError, Result};
|
||||||
|
use super::paths::get_data_dir;
|
||||||
|
|
||||||
|
const CRON_TAG: &str = "# lore-sync";
|
||||||
|
|
||||||
|
// ── File-based sync lock (fcntl F_SETLK) ──
|
||||||
|
|
||||||
|
/// RAII guard that holds an `fcntl` write lock on a file.
|
||||||
|
/// The lock is released when the guard is dropped.
|
||||||
|
pub struct SyncLockGuard {
|
||||||
|
_file: File,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Try to acquire an exclusive file lock (non-blocking).
|
||||||
|
///
|
||||||
|
/// Returns `Ok(Some(guard))` if the lock was acquired, `Ok(None)` if another
|
||||||
|
/// process holds it, or `Err` on I/O failure.
|
||||||
|
#[cfg(unix)]
|
||||||
|
pub fn acquire_sync_lock() -> Result<Option<SyncLockGuard>> {
|
||||||
|
acquire_sync_lock_at(&lock_path())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn lock_path() -> PathBuf {
|
||||||
|
get_data_dir().join("sync.lock")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
fn acquire_sync_lock_at(path: &Path) -> Result<Option<SyncLockGuard>> {
|
||||||
|
use std::os::unix::io::AsRawFd;
|
||||||
|
|
||||||
|
if let Some(parent) = path.parent() {
|
||||||
|
fs::create_dir_all(parent)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let file = File::options()
|
||||||
|
.create(true)
|
||||||
|
.truncate(false)
|
||||||
|
.write(true)
|
||||||
|
.open(path)?;
|
||||||
|
|
||||||
|
let fd = file.as_raw_fd();
|
||||||
|
|
||||||
|
// SAFETY: zeroed memory is valid for libc::flock (all-zero is a valid
|
||||||
|
// representation on every Unix platform). We then set only the fields we need.
|
||||||
|
let mut flock = unsafe { std::mem::zeroed::<libc::flock>() };
|
||||||
|
flock.l_type = libc::F_WRLCK as libc::c_short;
|
||||||
|
flock.l_whence = libc::SEEK_SET as libc::c_short;
|
||||||
|
|
||||||
|
// SAFETY: fd is a valid open file descriptor; flock is stack-allocated.
|
||||||
|
let rc = unsafe { libc::fcntl(fd, libc::F_SETLK, &mut flock) };
|
||||||
|
if rc == -1 {
|
||||||
|
let err = io::Error::last_os_error();
|
||||||
|
if err.kind() == io::ErrorKind::WouldBlock
|
||||||
|
|| err.raw_os_error() == Some(libc::EAGAIN)
|
||||||
|
|| err.raw_os_error() == Some(libc::EACCES)
|
||||||
|
{
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
return Err(LoreError::Io(err));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Some(SyncLockGuard { _file: file }))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Crontab management ──
|
||||||
|
|
||||||
|
/// The crontab entry that `lore cron install` writes.
|
||||||
|
///
|
||||||
|
/// Paths are single-quoted so spaces in binary or log paths don't break
|
||||||
|
/// the cron expression.
|
||||||
|
pub fn build_cron_entry(interval_minutes: u32) -> String {
|
||||||
|
let binary = std::env::current_exe()
|
||||||
|
.unwrap_or_else(|_| PathBuf::from("lore"))
|
||||||
|
.display()
|
||||||
|
.to_string();
|
||||||
|
let log_path = sync_log_path();
|
||||||
|
format!(
|
||||||
|
"*/{interval_minutes} * * * * '{binary}' sync -q --lock >> '{log}' 2>&1 {CRON_TAG}",
|
||||||
|
log = log_path.display(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Path where cron-triggered sync output is appended.
|
||||||
|
pub fn sync_log_path() -> PathBuf {
|
||||||
|
get_data_dir().join("sync.log")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read the current user crontab. Returns empty string when no crontab exists.
|
||||||
|
fn read_crontab() -> Result<String> {
|
||||||
|
let output = Command::new("crontab").arg("-l").output()?;
|
||||||
|
if output.status.success() {
|
||||||
|
Ok(String::from_utf8_lossy(&output.stdout).into_owned())
|
||||||
|
} else {
|
||||||
|
// exit 1 with "no crontab for <user>" is normal — treat as empty
|
||||||
|
Ok(String::new())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Write a full crontab string. Replaces the current crontab entirely.
|
||||||
|
fn write_crontab(content: &str) -> Result<()> {
|
||||||
|
let mut child = Command::new("crontab")
|
||||||
|
.arg("-")
|
||||||
|
.stdin(std::process::Stdio::piped())
|
||||||
|
.spawn()?;
|
||||||
|
if let Some(ref mut stdin) = child.stdin {
|
||||||
|
stdin.write_all(content.as_bytes())?;
|
||||||
|
}
|
||||||
|
let status = child.wait()?;
|
||||||
|
if !status.success() {
|
||||||
|
return Err(LoreError::Other(format!(
|
||||||
|
"crontab exited with status {status}"
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Install (or update) the lore-sync crontab entry.
|
||||||
|
pub fn install_cron(interval_minutes: u32) -> Result<CronInstallResult> {
|
||||||
|
let entry = build_cron_entry(interval_minutes);
|
||||||
|
|
||||||
|
let existing = read_crontab()?;
|
||||||
|
let replaced = existing.contains(CRON_TAG);
|
||||||
|
|
||||||
|
// Strip ALL old lore-sync lines first, then append one new entry.
|
||||||
|
// This is idempotent even if the crontab somehow has duplicate tagged lines.
|
||||||
|
let mut filtered: String = existing
|
||||||
|
.lines()
|
||||||
|
.filter(|line| !line.contains(CRON_TAG))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join("\n");
|
||||||
|
if !filtered.is_empty() && !filtered.ends_with('\n') {
|
||||||
|
filtered.push('\n');
|
||||||
|
}
|
||||||
|
filtered.push_str(&entry);
|
||||||
|
filtered.push('\n');
|
||||||
|
|
||||||
|
write_crontab(&filtered)?;
|
||||||
|
|
||||||
|
Ok(CronInstallResult {
|
||||||
|
entry,
|
||||||
|
interval_minutes,
|
||||||
|
log_path: sync_log_path(),
|
||||||
|
replaced,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove the lore-sync crontab entry.
|
||||||
|
pub fn uninstall_cron() -> Result<CronUninstallResult> {
|
||||||
|
let existing = read_crontab()?;
|
||||||
|
if !existing.contains(CRON_TAG) {
|
||||||
|
return Ok(CronUninstallResult {
|
||||||
|
was_installed: false,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let new_crontab: String = existing
|
||||||
|
.lines()
|
||||||
|
.filter(|line| !line.contains(CRON_TAG))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join("\n")
|
||||||
|
+ "\n";
|
||||||
|
|
||||||
|
// If the crontab would be empty (only whitespace), remove it entirely
|
||||||
|
if new_crontab.trim().is_empty() {
|
||||||
|
let status = Command::new("crontab").arg("-r").status()?;
|
||||||
|
if !status.success() {
|
||||||
|
return Err(LoreError::Other("crontab -r failed".to_string()));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
write_crontab(&new_crontab)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(CronUninstallResult {
|
||||||
|
was_installed: true,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Inspect the current crontab for a lore-sync entry.
|
||||||
|
pub fn cron_status() -> Result<CronStatusResult> {
|
||||||
|
let existing = read_crontab()?;
|
||||||
|
let lore_line = existing.lines().find(|l| l.contains(CRON_TAG));
|
||||||
|
|
||||||
|
match lore_line {
|
||||||
|
Some(line) => {
|
||||||
|
let interval = parse_interval(line);
|
||||||
|
let binary_path = parse_binary_path(line);
|
||||||
|
|
||||||
|
let current_exe = std::env::current_exe()
|
||||||
|
.ok()
|
||||||
|
.map(|p| p.display().to_string());
|
||||||
|
let binary_mismatch = current_exe
|
||||||
|
.as_ref()
|
||||||
|
.zip(binary_path.as_ref())
|
||||||
|
.is_some_and(|(current, cron)| current != cron);
|
||||||
|
|
||||||
|
Ok(CronStatusResult {
|
||||||
|
installed: true,
|
||||||
|
interval_minutes: interval,
|
||||||
|
binary_path,
|
||||||
|
current_binary: current_exe,
|
||||||
|
binary_mismatch,
|
||||||
|
log_path: Some(sync_log_path()),
|
||||||
|
cron_entry: Some(line.to_string()),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
None => Ok(CronStatusResult {
|
||||||
|
installed: false,
|
||||||
|
interval_minutes: None,
|
||||||
|
binary_path: None,
|
||||||
|
current_binary: std::env::current_exe()
|
||||||
|
.ok()
|
||||||
|
.map(|p| p.display().to_string()),
|
||||||
|
binary_mismatch: false,
|
||||||
|
log_path: None,
|
||||||
|
cron_entry: None,
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse the interval from a cron expression like `*/8 * * * * ...`
|
||||||
|
fn parse_interval(line: &str) -> Option<u32> {
|
||||||
|
let first_field = line.split_whitespace().next()?;
|
||||||
|
if let Some(n) = first_field.strip_prefix("*/") {
|
||||||
|
n.parse().ok()
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse the binary path from the cron entry after the 5 time fields.
|
||||||
|
///
|
||||||
|
/// Handles both quoted (`'/path with spaces/lore'`) and unquoted paths.
|
||||||
|
/// We skip the time fields manually to avoid `split_whitespace` breaking
|
||||||
|
/// on spaces inside single-quoted paths.
|
||||||
|
fn parse_binary_path(line: &str) -> Option<String> {
|
||||||
|
// Skip the 5 cron time fields (min hour dom month dow).
|
||||||
|
// These never contain spaces, so whitespace-splitting is safe here.
|
||||||
|
let mut rest = line;
|
||||||
|
for _ in 0..5 {
|
||||||
|
rest = rest.trim_start();
|
||||||
|
let end = rest.find(char::is_whitespace)?;
|
||||||
|
rest = &rest[end..];
|
||||||
|
}
|
||||||
|
rest = rest.trim_start();
|
||||||
|
|
||||||
|
// The command starts here — it may be single-quoted.
|
||||||
|
if let Some(after_quote) = rest.strip_prefix('\'') {
|
||||||
|
let end = after_quote.find('\'')?;
|
||||||
|
Some(after_quote[..end].to_string())
|
||||||
|
} else {
|
||||||
|
let end = rest.find(char::is_whitespace).unwrap_or(rest.len());
|
||||||
|
Some(rest[..end].to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Result types ──
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct CronInstallResult {
|
||||||
|
pub entry: String,
|
||||||
|
pub interval_minutes: u32,
|
||||||
|
pub log_path: PathBuf,
|
||||||
|
pub replaced: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct CronUninstallResult {
|
||||||
|
pub was_installed: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct CronStatusResult {
|
||||||
|
pub installed: bool,
|
||||||
|
pub interval_minutes: Option<u32>,
|
||||||
|
pub binary_path: Option<String>,
|
||||||
|
pub current_binary: Option<String>,
|
||||||
|
pub binary_mismatch: bool,
|
||||||
|
pub log_path: Option<PathBuf>,
|
||||||
|
pub cron_entry: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn build_cron_entry_formats_correctly() {
|
||||||
|
let entry = build_cron_entry(8);
|
||||||
|
assert!(entry.starts_with("*/8 * * * * "));
|
||||||
|
assert!(entry.contains("sync -q --lock"));
|
||||||
|
assert!(entry.ends_with(CRON_TAG));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_interval_extracts_number() {
|
||||||
|
assert_eq!(parse_interval("*/8 * * * * /usr/bin/lore sync"), Some(8));
|
||||||
|
assert_eq!(parse_interval("*/15 * * * * /usr/bin/lore sync"), Some(15));
|
||||||
|
assert_eq!(parse_interval("0 * * * * /usr/bin/lore sync"), None);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_binary_path_extracts_sixth_field() {
|
||||||
|
// Unquoted path
|
||||||
|
assert_eq!(
|
||||||
|
parse_binary_path(
|
||||||
|
"*/8 * * * * /usr/local/bin/lore sync -q --lock >> /tmp/log 2>&1 # lore-sync"
|
||||||
|
),
|
||||||
|
Some("/usr/local/bin/lore".to_string())
|
||||||
|
);
|
||||||
|
// Single-quoted path without spaces
|
||||||
|
assert_eq!(
|
||||||
|
parse_binary_path(
|
||||||
|
"*/8 * * * * '/usr/local/bin/lore' sync -q --lock >> '/tmp/log' 2>&1 # lore-sync"
|
||||||
|
),
|
||||||
|
Some("/usr/local/bin/lore".to_string())
|
||||||
|
);
|
||||||
|
// Single-quoted path WITH spaces (common on macOS)
|
||||||
|
assert_eq!(
|
||||||
|
parse_binary_path(
|
||||||
|
"*/8 * * * * '/Users/Taylor Eernisse/.cargo/bin/lore' sync -q --lock >> '/tmp/log' 2>&1 # lore-sync"
|
||||||
|
),
|
||||||
|
Some("/Users/Taylor Eernisse/.cargo/bin/lore".to_string())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn sync_lock_at_nonexistent_dir_creates_parents() {
|
||||||
|
let dir = tempfile::tempdir().unwrap();
|
||||||
|
let lock_file = dir.path().join("nested").join("deep").join("sync.lock");
|
||||||
|
let guard = acquire_sync_lock_at(&lock_file).unwrap();
|
||||||
|
assert!(guard.is_some());
|
||||||
|
assert!(lock_file.exists());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn sync_lock_is_exclusive_across_processes() {
|
||||||
|
// POSIX fcntl locks are per-process, so same-process re-lock always
|
||||||
|
// succeeds. We verify cross-process exclusion using a Python child
|
||||||
|
// that attempts the same fcntl F_SETLK.
|
||||||
|
let dir = tempfile::tempdir().unwrap();
|
||||||
|
let lock_file = dir.path().join("sync.lock");
|
||||||
|
let _guard = acquire_sync_lock_at(&lock_file).unwrap().unwrap();
|
||||||
|
|
||||||
|
let script = r#"
|
||||||
|
import fcntl, struct, sys
|
||||||
|
fd = open(sys.argv[1], "w")
|
||||||
|
try:
|
||||||
|
fcntl.fcntl(fd, fcntl.F_SETLK, struct.pack("hhllhh", fcntl.F_WRLCK, 0, 0, 0, 0, 0))
|
||||||
|
sys.exit(0)
|
||||||
|
except (IOError, OSError):
|
||||||
|
sys.exit(1)
|
||||||
|
"#;
|
||||||
|
let status = std::process::Command::new("python3")
|
||||||
|
.args(["-c", script, &lock_file.display().to_string()])
|
||||||
|
.status()
|
||||||
|
.unwrap();
|
||||||
|
assert!(
|
||||||
|
!status.success(),
|
||||||
|
"child process should fail to acquire fcntl lock held by parent"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -44,15 +44,13 @@ pub fn resolve_rename_chain(
|
|||||||
let mut fwd_stmt = conn.prepare_cached(forward_sql)?;
|
let mut fwd_stmt = conn.prepare_cached(forward_sql)?;
|
||||||
let forward: Vec<String> = fwd_stmt
|
let forward: Vec<String> = fwd_stmt
|
||||||
.query_map(rusqlite::params![project_id, ¤t], |row| row.get(0))?
|
.query_map(rusqlite::params![project_id, ¤t], |row| row.get(0))?
|
||||||
.filter_map(std::result::Result::ok)
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
.collect();
|
|
||||||
|
|
||||||
// Backward: current was the new name -> discover old names
|
// Backward: current was the new name -> discover old names
|
||||||
let mut bwd_stmt = conn.prepare_cached(backward_sql)?;
|
let mut bwd_stmt = conn.prepare_cached(backward_sql)?;
|
||||||
let backward: Vec<String> = bwd_stmt
|
let backward: Vec<String> = bwd_stmt
|
||||||
.query_map(rusqlite::params![project_id, ¤t], |row| row.get(0))?
|
.query_map(rusqlite::params![project_id, ¤t], |row| row.get(0))?
|
||||||
.filter_map(std::result::Result::ok)
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
.collect();
|
|
||||||
|
|
||||||
for discovered in forward.into_iter().chain(backward) {
|
for discovered in forward.into_iter().chain(backward) {
|
||||||
if visited.insert(discovered.clone()) {
|
if visited.insert(discovered.clone()) {
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
pub mod backoff;
|
pub mod backoff;
|
||||||
pub mod config;
|
pub mod config;
|
||||||
|
#[cfg(unix)]
|
||||||
|
pub mod cron;
|
||||||
pub mod db;
|
pub mod db;
|
||||||
pub mod dependent_queue;
|
pub mod dependent_queue;
|
||||||
pub mod error;
|
pub mod error;
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
use rusqlite::Connection;
|
use rusqlite::Connection;
|
||||||
|
|
||||||
use super::error::{LoreError, Result};
|
use super::error::{LoreError, Result};
|
||||||
|
use super::file_history::resolve_rename_chain;
|
||||||
|
|
||||||
// ─── SQL Helpers ─────────────────────────────────────────────────────────────
|
// ─── SQL Helpers ─────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
@@ -149,6 +150,16 @@ pub fn build_path_query(
|
|||||||
is_prefix: false,
|
is_prefix: false,
|
||||||
}),
|
}),
|
||||||
SuffixResult::Ambiguous(candidates) => {
|
SuffixResult::Ambiguous(candidates) => {
|
||||||
|
// Check if all candidates are the same file connected by renames.
|
||||||
|
// resolve_rename_chain requires a concrete project_id.
|
||||||
|
if let Some(pid) = project_id
|
||||||
|
&& let Some(resolved) = try_resolve_rename_ambiguity(conn, pid, &candidates)?
|
||||||
|
{
|
||||||
|
return Ok(PathQuery {
|
||||||
|
value: resolved,
|
||||||
|
is_prefix: false,
|
||||||
|
});
|
||||||
|
}
|
||||||
let list = candidates
|
let list = candidates
|
||||||
.iter()
|
.iter()
|
||||||
.map(|p| format!(" {p}"))
|
.map(|p| format!(" {p}"))
|
||||||
@@ -239,6 +250,58 @@ pub fn suffix_probe(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Maximum rename hops when resolving ambiguity.
|
||||||
|
const AMBIGUITY_MAX_RENAME_HOPS: usize = 10;
|
||||||
|
|
||||||
|
/// When suffix probe returns multiple paths, check if they are all the same file
|
||||||
|
/// connected by renames. If so, return the "newest" path (the leaf of the chain
|
||||||
|
/// that is never renamed away from). Returns `None` if truly ambiguous.
|
||||||
|
fn try_resolve_rename_ambiguity(
|
||||||
|
conn: &Connection,
|
||||||
|
project_id: i64,
|
||||||
|
candidates: &[String],
|
||||||
|
) -> Result<Option<String>> {
|
||||||
|
// BFS from the first candidate to discover the full rename chain.
|
||||||
|
let chain = resolve_rename_chain(conn, project_id, &candidates[0], AMBIGUITY_MAX_RENAME_HOPS)?;
|
||||||
|
|
||||||
|
// If any candidate is NOT in the chain, these are genuinely different files.
|
||||||
|
if !candidates.iter().all(|c| chain.contains(c)) {
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
// All candidates are the same file. Find the "newest" path: the one that
|
||||||
|
// appears as new_path in a rename but is never old_path in a subsequent rename
|
||||||
|
// (within the chain). This is the leaf of the rename DAG.
|
||||||
|
let placeholders: Vec<String> = (0..chain.len()).map(|i| format!("?{}", i + 2)).collect();
|
||||||
|
let in_clause = placeholders.join(", ");
|
||||||
|
|
||||||
|
// Find paths that are old_path in a rename where new_path is also in the chain.
|
||||||
|
let sql = format!(
|
||||||
|
"SELECT DISTINCT old_path FROM mr_file_changes \
|
||||||
|
WHERE project_id = ?1 \
|
||||||
|
AND change_type = 'renamed' \
|
||||||
|
AND old_path IN ({in_clause}) \
|
||||||
|
AND new_path IN ({in_clause})"
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut stmt = conn.prepare(&sql)?;
|
||||||
|
let mut params: Vec<Box<dyn rusqlite::types::ToSql>> = Vec::new();
|
||||||
|
params.push(Box::new(project_id));
|
||||||
|
for p in &chain {
|
||||||
|
params.push(Box::new(p.clone()));
|
||||||
|
}
|
||||||
|
let param_refs: Vec<&dyn rusqlite::types::ToSql> = params.iter().map(|p| p.as_ref()).collect();
|
||||||
|
|
||||||
|
let old_paths: Vec<String> = stmt
|
||||||
|
.query_map(param_refs.as_slice(), |row| row.get(0))?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
// The newest path is a candidate that is NOT an old_path in any intra-chain rename.
|
||||||
|
let newest = candidates.iter().find(|c| !old_paths.contains(c));
|
||||||
|
|
||||||
|
Ok(newest.cloned().or_else(|| Some(candidates[0].clone())))
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
#[path = "path_resolver_tests.rs"]
|
#[path = "path_resolver_tests.rs"]
|
||||||
mod tests;
|
mod tests;
|
||||||
|
|||||||
@@ -288,3 +288,80 @@ fn test_exact_match_preferred_over_suffix() {
|
|||||||
assert_eq!(pq.value, "README.md");
|
assert_eq!(pq.value, "README.md");
|
||||||
assert!(!pq.is_prefix);
|
assert!(!pq.is_prefix);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn seed_rename(conn: &Connection, mr_id: i64, project_id: i64, old_path: &str, new_path: &str) {
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO mr_file_changes (merge_request_id, project_id, old_path, new_path, change_type)
|
||||||
|
VALUES (?1, ?2, ?3, ?4, 'renamed')",
|
||||||
|
rusqlite::params![mr_id, project_id, old_path, new_path],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── rename-aware ambiguity resolution ──────────────────────────────────────
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_ambiguity_resolved_by_rename_chain() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
seed_project(&conn, 1);
|
||||||
|
seed_mr(&conn, 1, 1);
|
||||||
|
seed_mr(&conn, 2, 1);
|
||||||
|
|
||||||
|
// File was at src/old/operators.ts, then renamed to src/new/operators.ts
|
||||||
|
seed_file_change(&conn, 1, 1, "src/old/operators.ts");
|
||||||
|
seed_rename(&conn, 2, 1, "src/old/operators.ts", "src/new/operators.ts");
|
||||||
|
|
||||||
|
// Bare "operators.ts" matches both paths via suffix probe, but they're
|
||||||
|
// connected by a rename — should auto-resolve to the newest path.
|
||||||
|
let pq = build_path_query(&conn, "operators.ts", Some(1)).unwrap();
|
||||||
|
assert_eq!(pq.value, "src/new/operators.ts");
|
||||||
|
assert!(!pq.is_prefix);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_ambiguity_not_resolved_when_genuinely_different_files() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
seed_project(&conn, 1);
|
||||||
|
seed_mr(&conn, 1, 1);
|
||||||
|
|
||||||
|
// Two genuinely different files with the same name (no rename connecting them)
|
||||||
|
seed_file_change(&conn, 1, 1, "src/utils/helpers.ts");
|
||||||
|
seed_file_change(&conn, 1, 1, "tests/utils/helpers.ts");
|
||||||
|
|
||||||
|
let err = build_path_query(&conn, "helpers.ts", Some(1)).unwrap_err();
|
||||||
|
assert!(err.to_string().contains("matches multiple paths"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_ambiguity_rename_chain_with_three_hops() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
seed_project(&conn, 1);
|
||||||
|
seed_mr(&conn, 1, 1);
|
||||||
|
seed_mr(&conn, 2, 1);
|
||||||
|
seed_mr(&conn, 3, 1);
|
||||||
|
|
||||||
|
// File named "config.ts" moved twice: lib/ -> src/ -> src/core/
|
||||||
|
seed_file_change(&conn, 1, 1, "lib/config.ts");
|
||||||
|
seed_rename(&conn, 2, 1, "lib/config.ts", "src/config.ts");
|
||||||
|
seed_rename(&conn, 3, 1, "src/config.ts", "src/core/config.ts");
|
||||||
|
|
||||||
|
// "config.ts" matches lib/config.ts, src/config.ts, src/core/config.ts via suffix
|
||||||
|
let pq = build_path_query(&conn, "config.ts", Some(1)).unwrap();
|
||||||
|
assert_eq!(pq.value, "src/core/config.ts");
|
||||||
|
assert!(!pq.is_prefix);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_ambiguity_rename_without_project_id_stays_ambiguous() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
seed_project(&conn, 1);
|
||||||
|
seed_mr(&conn, 1, 1);
|
||||||
|
seed_mr(&conn, 2, 1);
|
||||||
|
|
||||||
|
seed_file_change(&conn, 1, 1, "src/old/utils.ts");
|
||||||
|
seed_rename(&conn, 2, 1, "src/old/utils.ts", "src/new/utils.ts");
|
||||||
|
|
||||||
|
// Without project_id, rename resolution is skipped → stays ambiguous
|
||||||
|
let err = build_path_query(&conn, "utils.ts", None).unwrap_err();
|
||||||
|
assert!(err.to_string().contains("matches multiple paths"));
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
|
use tracing::info;
|
||||||
|
|
||||||
use super::error::Result;
|
use super::error::Result;
|
||||||
use super::file_history::resolve_rename_chain;
|
use super::file_history::resolve_rename_chain;
|
||||||
@@ -51,6 +52,9 @@ pub struct TraceResult {
|
|||||||
pub renames_followed: bool,
|
pub renames_followed: bool,
|
||||||
pub trace_chains: Vec<TraceChain>,
|
pub trace_chains: Vec<TraceChain>,
|
||||||
pub total_chains: usize,
|
pub total_chains: usize,
|
||||||
|
/// Diagnostic hints explaining why results may be empty.
|
||||||
|
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||||
|
pub hints: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Run the trace query: file -> MR -> issue chain.
|
/// Run the trace query: file -> MR -> issue chain.
|
||||||
@@ -75,6 +79,14 @@ pub fn run_trace(
|
|||||||
(vec![path.to_string()], false)
|
(vec![path.to_string()], false)
|
||||||
};
|
};
|
||||||
|
|
||||||
|
info!(
|
||||||
|
paths = all_paths.len(),
|
||||||
|
renames_followed,
|
||||||
|
"trace: resolved {} path(s) for '{}'",
|
||||||
|
all_paths.len(),
|
||||||
|
path
|
||||||
|
);
|
||||||
|
|
||||||
// Build placeholders for IN clause
|
// Build placeholders for IN clause
|
||||||
let placeholders: Vec<String> = (0..all_paths.len())
|
let placeholders: Vec<String> = (0..all_paths.len())
|
||||||
.map(|i| format!("?{}", i + 2))
|
.map(|i| format!("?{}", i + 2))
|
||||||
@@ -100,7 +112,7 @@ pub fn run_trace(
|
|||||||
all_paths.len() + 2
|
all_paths.len() + 2
|
||||||
);
|
);
|
||||||
|
|
||||||
let mut stmt = conn.prepare(&mr_sql)?;
|
let mut stmt = conn.prepare_cached(&mr_sql)?;
|
||||||
|
|
||||||
let mut params: Vec<Box<dyn rusqlite::types::ToSql>> = Vec::new();
|
let mut params: Vec<Box<dyn rusqlite::types::ToSql>> = Vec::new();
|
||||||
params.push(Box::new(project_id.unwrap_or(0)));
|
params.push(Box::new(project_id.unwrap_or(0)));
|
||||||
@@ -137,8 +149,14 @@ pub fn run_trace(
|
|||||||
web_url: row.get(8)?,
|
web_url: row.get(8)?,
|
||||||
})
|
})
|
||||||
})?
|
})?
|
||||||
.filter_map(std::result::Result::ok)
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
.collect();
|
|
||||||
|
info!(
|
||||||
|
mr_count = mr_rows.len(),
|
||||||
|
"trace: found {} MR(s) touching '{}'",
|
||||||
|
mr_rows.len(),
|
||||||
|
path
|
||||||
|
);
|
||||||
|
|
||||||
// Step 2: For each MR, find linked issues + optional discussions
|
// Step 2: For each MR, find linked issues + optional discussions
|
||||||
let mut trace_chains = Vec::with_capacity(mr_rows.len());
|
let mut trace_chains = Vec::with_capacity(mr_rows.len());
|
||||||
@@ -152,6 +170,16 @@ pub fn run_trace(
|
|||||||
Vec::new()
|
Vec::new()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
info!(
|
||||||
|
mr_iid = mr.iid,
|
||||||
|
issues = issues.len(),
|
||||||
|
discussions = discussions.len(),
|
||||||
|
"trace: MR !{}: {} issue(s), {} discussion(s)",
|
||||||
|
mr.iid,
|
||||||
|
issues.len(),
|
||||||
|
discussions.len()
|
||||||
|
);
|
||||||
|
|
||||||
trace_chains.push(TraceChain {
|
trace_chains.push(TraceChain {
|
||||||
mr_iid: mr.iid,
|
mr_iid: mr.iid,
|
||||||
mr_title: mr.title.clone(),
|
mr_title: mr.title.clone(),
|
||||||
@@ -168,12 +196,20 @@ pub fn run_trace(
|
|||||||
|
|
||||||
let total_chains = trace_chains.len();
|
let total_chains = trace_chains.len();
|
||||||
|
|
||||||
|
// Build diagnostic hints when no results found
|
||||||
|
let hints = if total_chains == 0 {
|
||||||
|
build_trace_hints(conn, project_id, &all_paths)?
|
||||||
|
} else {
|
||||||
|
Vec::new()
|
||||||
|
};
|
||||||
|
|
||||||
Ok(TraceResult {
|
Ok(TraceResult {
|
||||||
path: path.to_string(),
|
path: path.to_string(),
|
||||||
resolved_paths: all_paths,
|
resolved_paths: all_paths,
|
||||||
renames_followed,
|
renames_followed,
|
||||||
trace_chains,
|
trace_chains,
|
||||||
total_chains,
|
total_chains,
|
||||||
|
hints,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -191,7 +227,7 @@ fn fetch_linked_issues(conn: &rusqlite::Connection, mr_id: i64) -> Result<Vec<Tr
|
|||||||
CASE er.reference_type WHEN 'closes' THEN 0 WHEN 'related' THEN 1 ELSE 2 END, \
|
CASE er.reference_type WHEN 'closes' THEN 0 WHEN 'related' THEN 1 ELSE 2 END, \
|
||||||
i.iid";
|
i.iid";
|
||||||
|
|
||||||
let mut stmt = conn.prepare(sql)?;
|
let mut stmt = conn.prepare_cached(sql)?;
|
||||||
let issues: Vec<TraceIssue> = stmt
|
let issues: Vec<TraceIssue> = stmt
|
||||||
.query_map(rusqlite::params![mr_id], |row| {
|
.query_map(rusqlite::params![mr_id], |row| {
|
||||||
Ok(TraceIssue {
|
Ok(TraceIssue {
|
||||||
@@ -202,8 +238,7 @@ fn fetch_linked_issues(conn: &rusqlite::Connection, mr_id: i64) -> Result<Vec<Tr
|
|||||||
web_url: row.get(4)?,
|
web_url: row.get(4)?,
|
||||||
})
|
})
|
||||||
})?
|
})?
|
||||||
.filter_map(std::result::Result::ok)
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok(issues)
|
Ok(issues)
|
||||||
}
|
}
|
||||||
@@ -225,11 +260,10 @@ fn fetch_trace_discussions(
|
|||||||
WHERE d.merge_request_id = ?1 \
|
WHERE d.merge_request_id = ?1 \
|
||||||
AND n.position_new_path IN ({in_clause}) \
|
AND n.position_new_path IN ({in_clause}) \
|
||||||
AND n.is_system = 0 \
|
AND n.is_system = 0 \
|
||||||
ORDER BY n.created_at DESC \
|
ORDER BY n.created_at DESC"
|
||||||
LIMIT 20"
|
|
||||||
);
|
);
|
||||||
|
|
||||||
let mut stmt = conn.prepare(&sql)?;
|
let mut stmt = conn.prepare_cached(&sql)?;
|
||||||
|
|
||||||
let mut params: Vec<Box<dyn rusqlite::types::ToSql>> = Vec::new();
|
let mut params: Vec<Box<dyn rusqlite::types::ToSql>> = Vec::new();
|
||||||
params.push(Box::new(mr_id));
|
params.push(Box::new(mr_id));
|
||||||
@@ -251,12 +285,57 @@ fn fetch_trace_discussions(
|
|||||||
created_at_iso: ms_to_iso(created_at),
|
created_at_iso: ms_to_iso(created_at),
|
||||||
})
|
})
|
||||||
})?
|
})?
|
||||||
.filter_map(std::result::Result::ok)
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok(discussions)
|
Ok(discussions)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Build diagnostic hints explaining why a trace query returned no results.
|
||||||
|
fn build_trace_hints(
|
||||||
|
conn: &rusqlite::Connection,
|
||||||
|
project_id: Option<i64>,
|
||||||
|
paths: &[String],
|
||||||
|
) -> Result<Vec<String>> {
|
||||||
|
let mut hints = Vec::new();
|
||||||
|
|
||||||
|
// Check if mr_file_changes has ANY rows for this project
|
||||||
|
let has_file_changes: bool = if let Some(pid) = project_id {
|
||||||
|
conn.query_row(
|
||||||
|
"SELECT EXISTS(SELECT 1 FROM mr_file_changes WHERE project_id = ?1 LIMIT 1)",
|
||||||
|
rusqlite::params![pid],
|
||||||
|
|row| row.get(0),
|
||||||
|
)?
|
||||||
|
} else {
|
||||||
|
conn.query_row(
|
||||||
|
"SELECT EXISTS(SELECT 1 FROM mr_file_changes LIMIT 1)",
|
||||||
|
[],
|
||||||
|
|row| row.get(0),
|
||||||
|
)?
|
||||||
|
};
|
||||||
|
|
||||||
|
if !has_file_changes {
|
||||||
|
hints.push(
|
||||||
|
"No MR file changes have been synced yet. Run 'lore sync' to fetch file change data."
|
||||||
|
.to_string(),
|
||||||
|
);
|
||||||
|
return Ok(hints);
|
||||||
|
}
|
||||||
|
|
||||||
|
// File changes exist but none match these paths
|
||||||
|
let path_list = paths
|
||||||
|
.iter()
|
||||||
|
.map(|p| format!("'{p}'"))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(", ");
|
||||||
|
hints.push(format!(
|
||||||
|
"Searched paths [{}] were not found in MR file changes. \
|
||||||
|
The file may predate the sync window or use a different path.",
|
||||||
|
path_list
|
||||||
|
));
|
||||||
|
|
||||||
|
Ok(hints)
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
#[path = "trace_tests.rs"]
|
#[path = "trace_tests.rs"]
|
||||||
mod tests;
|
mod tests;
|
||||||
|
|||||||
148
src/main.rs
148
src/main.rs
@@ -11,26 +11,29 @@ use lore::cli::autocorrect::{self, CorrectionResult};
|
|||||||
use lore::cli::commands::{
|
use lore::cli::commands::{
|
||||||
IngestDisplay, InitInputs, InitOptions, InitResult, ListFilters, MrListFilters,
|
IngestDisplay, InitInputs, InitOptions, InitResult, ListFilters, MrListFilters,
|
||||||
NoteListFilters, SearchCliFilters, SyncOptions, TimelineParams, open_issue_in_browser,
|
NoteListFilters, SearchCliFilters, SyncOptions, TimelineParams, open_issue_in_browser,
|
||||||
open_mr_in_browser, parse_trace_path, print_count, print_count_json, print_doctor_results,
|
open_mr_in_browser, parse_trace_path, print_count, print_count_json, print_cron_install,
|
||||||
print_drift_human, print_drift_json, print_dry_run_preview, print_dry_run_preview_json,
|
print_cron_install_json, print_cron_status, print_cron_status_json, print_cron_uninstall,
|
||||||
print_embed, print_embed_json, print_event_count, print_event_count_json, print_file_history,
|
print_cron_uninstall_json, print_doctor_results, print_drift_human, print_drift_json,
|
||||||
print_file_history_json, print_generate_docs, print_generate_docs_json, print_ingest_summary,
|
print_dry_run_preview, print_dry_run_preview_json, print_embed, print_embed_json,
|
||||||
print_ingest_summary_json, print_list_issues, print_list_issues_json, print_list_mrs,
|
print_event_count, print_event_count_json, print_file_history, print_file_history_json,
|
||||||
print_list_mrs_json, print_list_notes, print_list_notes_csv, print_list_notes_json,
|
print_generate_docs, print_generate_docs_json, print_ingest_summary, print_ingest_summary_json,
|
||||||
print_list_notes_jsonl, print_search_results, print_search_results_json, print_show_issue,
|
print_list_issues, print_list_issues_json, print_list_mrs, print_list_mrs_json,
|
||||||
print_show_issue_json, print_show_mr, print_show_mr_json, print_stats, print_stats_json,
|
print_list_notes, print_list_notes_json, print_search_results, print_search_results_json,
|
||||||
print_sync, print_sync_json, print_sync_status, print_sync_status_json, print_timeline,
|
print_show_issue, print_show_issue_json, print_show_mr, print_show_mr_json, print_stats,
|
||||||
print_timeline_json_with_meta, print_trace, print_trace_json, print_who_human, print_who_json,
|
print_stats_json, print_sync, print_sync_json, print_sync_status, print_sync_status_json,
|
||||||
query_notes, run_auth_test, run_count, run_count_events, run_doctor, run_drift, run_embed,
|
print_timeline, print_timeline_json_with_meta, print_trace, print_trace_json, print_who_human,
|
||||||
run_file_history, run_generate_docs, run_ingest, run_ingest_dry_run, run_init, run_list_issues,
|
print_who_json, query_notes, run_auth_test, run_count, run_count_events, run_cron_install,
|
||||||
run_list_mrs, run_search, run_show_issue, run_show_mr, run_stats, run_sync, run_sync_status,
|
run_cron_status, run_cron_uninstall, run_doctor, run_drift, run_embed, run_file_history,
|
||||||
run_timeline, run_who,
|
run_generate_docs, run_ingest, run_ingest_dry_run, run_init, run_list_issues, run_list_mrs,
|
||||||
|
run_search, run_show_issue, run_show_mr, run_stats, run_sync, run_sync_status, run_timeline,
|
||||||
|
run_who,
|
||||||
};
|
};
|
||||||
use lore::cli::render::{ColorMode, GlyphMode, Icons, LoreRenderer, Theme};
|
use lore::cli::render::{ColorMode, GlyphMode, Icons, LoreRenderer, Theme};
|
||||||
use lore::cli::robot::{RobotMeta, strip_schemas};
|
use lore::cli::robot::{RobotMeta, strip_schemas};
|
||||||
use lore::cli::{
|
use lore::cli::{
|
||||||
Cli, Commands, CountArgs, EmbedArgs, FileHistoryArgs, GenerateDocsArgs, IngestArgs, IssuesArgs,
|
Cli, Commands, CountArgs, CronAction, CronArgs, EmbedArgs, FileHistoryArgs, GenerateDocsArgs,
|
||||||
MrsArgs, NotesArgs, SearchArgs, StatsArgs, SyncArgs, TimelineArgs, TraceArgs, WhoArgs,
|
IngestArgs, IssuesArgs, MrsArgs, NotesArgs, SearchArgs, StatsArgs, SyncArgs, TimelineArgs,
|
||||||
|
TraceArgs, WhoArgs,
|
||||||
};
|
};
|
||||||
use lore::core::db::{
|
use lore::core::db::{
|
||||||
LATEST_SCHEMA_VERSION, create_connection, get_schema_version, run_migrations,
|
LATEST_SCHEMA_VERSION, create_connection, get_schema_version, run_migrations,
|
||||||
@@ -39,6 +42,7 @@ use lore::core::dependent_queue::release_all_locked_jobs;
|
|||||||
use lore::core::error::{LoreError, RobotErrorOutput};
|
use lore::core::error::{LoreError, RobotErrorOutput};
|
||||||
use lore::core::logging;
|
use lore::core::logging;
|
||||||
use lore::core::metrics::MetricsLayer;
|
use lore::core::metrics::MetricsLayer;
|
||||||
|
use lore::core::path_resolver::{build_path_query, normalize_repo_path};
|
||||||
use lore::core::paths::{get_config_path, get_db_path, get_log_dir};
|
use lore::core::paths::{get_config_path, get_db_path, get_log_dir};
|
||||||
use lore::core::project::resolve_project;
|
use lore::core::project::resolve_project;
|
||||||
use lore::core::shutdown::ShutdownSignal;
|
use lore::core::shutdown::ShutdownSignal;
|
||||||
@@ -202,6 +206,7 @@ async fn main() {
|
|||||||
handle_file_history(cli.config.as_deref(), args, robot_mode)
|
handle_file_history(cli.config.as_deref(), args, robot_mode)
|
||||||
}
|
}
|
||||||
Some(Commands::Trace(args)) => handle_trace(cli.config.as_deref(), args, robot_mode),
|
Some(Commands::Trace(args)) => handle_trace(cli.config.as_deref(), args, robot_mode),
|
||||||
|
Some(Commands::Cron(args)) => handle_cron(cli.config.as_deref(), args, robot_mode),
|
||||||
Some(Commands::Drift {
|
Some(Commands::Drift {
|
||||||
entity_type,
|
entity_type,
|
||||||
iid,
|
iid,
|
||||||
@@ -921,21 +926,14 @@ fn handle_notes(
|
|||||||
|
|
||||||
let result = query_notes(&conn, &filters, &config)?;
|
let result = query_notes(&conn, &filters, &config)?;
|
||||||
|
|
||||||
let format = if robot_mode && args.format == "table" {
|
if robot_mode {
|
||||||
"json"
|
print_list_notes_json(
|
||||||
} else {
|
|
||||||
&args.format
|
|
||||||
};
|
|
||||||
|
|
||||||
match format {
|
|
||||||
"json" => print_list_notes_json(
|
|
||||||
&result,
|
&result,
|
||||||
start.elapsed().as_millis() as u64,
|
start.elapsed().as_millis() as u64,
|
||||||
args.fields.as_deref(),
|
args.fields.as_deref(),
|
||||||
),
|
);
|
||||||
"jsonl" => print_list_notes_jsonl(&result),
|
} else {
|
||||||
"csv" => print_list_notes_csv(&result),
|
print_list_notes(&result);
|
||||||
_ => print_list_notes(&result),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -1641,6 +1639,7 @@ struct VersionOutput {
|
|||||||
|
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
struct VersionData {
|
struct VersionData {
|
||||||
|
name: &'static str,
|
||||||
version: String,
|
version: String,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
git_hash: Option<String>,
|
git_hash: Option<String>,
|
||||||
@@ -1654,6 +1653,7 @@ fn handle_version(robot_mode: bool) -> Result<(), Box<dyn std::error::Error>> {
|
|||||||
let output = VersionOutput {
|
let output = VersionOutput {
|
||||||
ok: true,
|
ok: true,
|
||||||
data: VersionData {
|
data: VersionData {
|
||||||
|
name: "lore",
|
||||||
version,
|
version,
|
||||||
git_hash: if git_hash.is_empty() {
|
git_hash: if git_hash.is_empty() {
|
||||||
None
|
None
|
||||||
@@ -1874,9 +1874,27 @@ fn handle_file_history(
|
|||||||
.effective_project(args.project.as_deref())
|
.effective_project(args.project.as_deref())
|
||||||
.map(String::from);
|
.map(String::from);
|
||||||
|
|
||||||
|
let normalized = normalize_repo_path(&args.path);
|
||||||
|
|
||||||
|
// Resolve bare filenames before querying (same path resolution as trace/who)
|
||||||
|
let db_path_tmp = get_db_path(config.storage.db_path.as_deref());
|
||||||
|
let conn_tmp = create_connection(&db_path_tmp)?;
|
||||||
|
let project_id_tmp = project
|
||||||
|
.as_deref()
|
||||||
|
.map(|p| resolve_project(&conn_tmp, p))
|
||||||
|
.transpose()?;
|
||||||
|
let pq = build_path_query(&conn_tmp, &normalized, project_id_tmp)?;
|
||||||
|
let resolved_path = if pq.is_prefix {
|
||||||
|
// Directory prefix — file-history is file-oriented, pass the raw path.
|
||||||
|
// Don't use pq.value which contains LIKE-escaped metacharacters.
|
||||||
|
normalized.trim_end_matches('/').to_string()
|
||||||
|
} else {
|
||||||
|
pq.value
|
||||||
|
};
|
||||||
|
|
||||||
let result = run_file_history(
|
let result = run_file_history(
|
||||||
&config,
|
&config,
|
||||||
&args.path,
|
&resolved_path,
|
||||||
project.as_deref(),
|
project.as_deref(),
|
||||||
args.no_follow_renames,
|
args.no_follow_renames,
|
||||||
args.merged,
|
args.merged,
|
||||||
@@ -1901,7 +1919,8 @@ fn handle_trace(
|
|||||||
let start = std::time::Instant::now();
|
let start = std::time::Instant::now();
|
||||||
let config = Config::load(config_override)?;
|
let config = Config::load(config_override)?;
|
||||||
|
|
||||||
let (path, line_requested) = parse_trace_path(&args.path);
|
let (raw_path, line_requested) = parse_trace_path(&args.path);
|
||||||
|
let normalized = normalize_repo_path(&raw_path);
|
||||||
|
|
||||||
if line_requested.is_some() && !robot_mode {
|
if line_requested.is_some() && !robot_mode {
|
||||||
eprintln!(
|
eprintln!(
|
||||||
@@ -1920,6 +1939,16 @@ fn handle_trace(
|
|||||||
.map(|p| resolve_project(&conn, p))
|
.map(|p| resolve_project(&conn, p))
|
||||||
.transpose()?;
|
.transpose()?;
|
||||||
|
|
||||||
|
// Resolve bare filenames (e.g. "operators.ts" -> "src/utils/operators.ts")
|
||||||
|
let pq = build_path_query(&conn, &normalized, project_id)?;
|
||||||
|
let path = if pq.is_prefix {
|
||||||
|
// Directory prefix — trace is file-oriented, pass the raw path.
|
||||||
|
// Don't use pq.value which contains LIKE-escaped metacharacters.
|
||||||
|
normalized.trim_end_matches('/').to_string()
|
||||||
|
} else {
|
||||||
|
pq.value
|
||||||
|
};
|
||||||
|
|
||||||
let result = run_trace(
|
let result = run_trace(
|
||||||
&conn,
|
&conn,
|
||||||
project_id,
|
project_id,
|
||||||
@@ -2152,6 +2181,24 @@ async fn handle_sync_cmd(
|
|||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Acquire file lock if --lock was passed (used by cron to skip overlapping runs)
|
||||||
|
let _sync_lock = if args.lock {
|
||||||
|
match lore::core::cron::acquire_sync_lock() {
|
||||||
|
Ok(Some(guard)) => Some(guard),
|
||||||
|
Ok(None) => {
|
||||||
|
// Another sync is running — silently exit (expected for cron)
|
||||||
|
tracing::debug!("--lock: another sync is running, skipping");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
tracing::warn!(error = %e, "--lock: failed to acquire file lock, skipping sync");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
let db_path = get_db_path(config.storage.db_path.as_deref());
|
let db_path = get_db_path(config.storage.db_path.as_deref());
|
||||||
let recorder_conn = create_connection(&db_path)?;
|
let recorder_conn = create_connection(&db_path)?;
|
||||||
let run_id = uuid::Uuid::new_v4().simple().to_string();
|
let run_id = uuid::Uuid::new_v4().simple().to_string();
|
||||||
@@ -2224,6 +2271,47 @@ async fn handle_sync_cmd(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn handle_cron(
|
||||||
|
config_override: Option<&str>,
|
||||||
|
args: CronArgs,
|
||||||
|
robot_mode: bool,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
let start = std::time::Instant::now();
|
||||||
|
|
||||||
|
match args.action {
|
||||||
|
CronAction::Install { interval } => {
|
||||||
|
let result = run_cron_install(interval)?;
|
||||||
|
let elapsed_ms = start.elapsed().as_millis() as u64;
|
||||||
|
if robot_mode {
|
||||||
|
print_cron_install_json(&result, elapsed_ms);
|
||||||
|
} else {
|
||||||
|
print_cron_install(&result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
CronAction::Uninstall => {
|
||||||
|
let result = run_cron_uninstall()?;
|
||||||
|
let elapsed_ms = start.elapsed().as_millis() as u64;
|
||||||
|
if robot_mode {
|
||||||
|
print_cron_uninstall_json(&result, elapsed_ms);
|
||||||
|
} else {
|
||||||
|
print_cron_uninstall(&result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
CronAction::Status => {
|
||||||
|
let config = Config::load(config_override)?;
|
||||||
|
let info = run_cron_status(&config)?;
|
||||||
|
let elapsed_ms = start.elapsed().as_millis() as u64;
|
||||||
|
if robot_mode {
|
||||||
|
print_cron_status_json(&info, elapsed_ms);
|
||||||
|
} else {
|
||||||
|
print_cron_status(&info);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
struct HealthOutput {
|
struct HealthOutput {
|
||||||
ok: bool,
|
ok: bool,
|
||||||
|
|||||||
Reference in New Issue
Block a user