Compare commits
10 Commits
59088af2ab
...
v0.9.4
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
60075cd400 | ||
|
|
ddab186315 | ||
|
|
d6d1686f8e | ||
|
|
5c44ee91fb | ||
|
|
6aff96d32f | ||
|
|
06889ec85a | ||
|
|
08bda08934 | ||
|
|
32134ea933 | ||
|
|
16cc58b17f | ||
|
|
a10d870863 |
File diff suppressed because one or more lines are too long
@@ -1 +1 @@
|
||||
bd-23xb
|
||||
bd-9lbr
|
||||
|
||||
2
Cargo.lock
generated
2
Cargo.lock
generated
@@ -1324,7 +1324,7 @@ checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897"
|
||||
|
||||
[[package]]
|
||||
name = "lore"
|
||||
version = "0.9.3"
|
||||
version = "0.9.4"
|
||||
dependencies = [
|
||||
"asupersync",
|
||||
"async-stream",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "lore"
|
||||
version = "0.9.3"
|
||||
version = "0.9.4"
|
||||
edition = "2024"
|
||||
description = "Gitlore - Local GitLab data management with semantic search"
|
||||
authors = ["Taylor Eernisse"]
|
||||
|
||||
@@ -37,11 +37,10 @@
|
||||
| 29 | *help* | — | — | — | (clap built-in) |
|
||||
| | **Hidden/deprecated:** | | | | |
|
||||
| 30 | `list` | — | `<ENTITY>` | 14 | deprecated, use issues/mrs |
|
||||
| 31 | `show` | — | `<ENTITY> <IID>` | 1 | deprecated, use issues/mrs |
|
||||
| 32 | `auth-test` | — | — | 0 | deprecated, use auth |
|
||||
| 33 | `sync-status` | — | — | 0 | deprecated, use status |
|
||||
| 34 | `backup` | — | — | 0 | Stub (not implemented) |
|
||||
| 35 | `reset` | — | — | 1 | Stub (not implemented) |
|
||||
| 31 | `auth-test` | — | — | 0 | deprecated, use auth |
|
||||
| 32 | `sync-status` | — | — | 0 | deprecated, use status |
|
||||
| 33 | `backup` | — | — | 0 | Stub (not implemented) |
|
||||
| 34 | `reset` | — | — | 1 | Stub (not implemented) |
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
1. **Make `gitlab_note_id` explicit in all note-level payloads without breaking existing consumers**
|
||||
Rationale: Your Bridge Contract already requires `gitlab_note_id`, but current plan keeps `gitlab_id` only in `notes` list while adding `gitlab_note_id` only in `show`. That forces agents to special-case commands. Add `gitlab_note_id` as an alias field everywhere note-level data appears, while keeping `gitlab_id` for compatibility.
|
||||
Rationale: Your Bridge Contract already requires `gitlab_note_id`, but current plan keeps `gitlab_id` only in `notes` list while adding `gitlab_note_id` only in detail views. That forces agents to special-case commands. Add `gitlab_note_id` as an alias field everywhere note-level data appears, while keeping `gitlab_id` for compatibility.
|
||||
|
||||
```diff
|
||||
@@ Bridge Contract (Cross-Cutting)
|
||||
|
||||
@@ -43,7 +43,7 @@ construct API calls without a separate project-ID lookup, even after path change
|
||||
**Back-compat rule**: Note payloads in the `notes` list command continue exposing `gitlab_id`
|
||||
for existing consumers, but **MUST also** expose `gitlab_note_id` with the same value. This
|
||||
ensures agents can use a single field name (`gitlab_note_id`) across all commands — `notes`,
|
||||
`show`, and `discussions --include-notes` — without special-casing by command.
|
||||
`issues <IID>`/`mrs <IID>`, and `discussions --include-notes` — without special-casing by command.
|
||||
|
||||
This contract exists so agents can deterministically construct `glab api` write calls without
|
||||
cross-referencing multiple commands. Each workstream below must satisfy these fields in its
|
||||
|
||||
@@ -107,12 +107,12 @@ Each criterion is independently testable. Implementation is complete when ALL pa
|
||||
|
||||
### AC-7: Show Issue Display (E2E)
|
||||
|
||||
**Human (`lore show issue 123`):**
|
||||
**Human (`lore issues 123`):**
|
||||
- [ ] New line after "State": `Status: In progress` (colored by `status_color` hex → nearest terminal color)
|
||||
- [ ] Status line only shown when `status_name IS NOT NULL`
|
||||
- [ ] Category shown in parens when available, lowercased: `Status: In progress (in_progress)`
|
||||
|
||||
**Robot (`lore --robot show issue 123`):**
|
||||
**Robot (`lore --robot issues 123`):**
|
||||
- [ ] JSON includes `status_name`, `status_category`, `status_color`, `status_icon_name`, `status_synced_at` fields
|
||||
- [ ] Fields are `null` (not absent) when status not available
|
||||
- [ ] `status_synced_at` is integer (ms epoch UTC) or `null` — enables freshness checks by consumers
|
||||
|
||||
@@ -7,6 +7,10 @@ struct FallbackErrorOutput {
|
||||
struct FallbackError {
|
||||
code: String,
|
||||
message: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
suggestion: Option<String>,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||
actions: Vec<String>,
|
||||
}
|
||||
|
||||
fn handle_error(e: Box<dyn std::error::Error>, robot_mode: bool) -> ! {
|
||||
@@ -20,6 +24,8 @@ fn handle_error(e: Box<dyn std::error::Error>, robot_mode: bool) -> ! {
|
||||
error: FallbackError {
|
||||
code: "INTERNAL_ERROR".to_string(),
|
||||
message: gi_error.to_string(),
|
||||
suggestion: None,
|
||||
actions: Vec::new(),
|
||||
},
|
||||
};
|
||||
serde_json::to_string(&fallback)
|
||||
@@ -59,6 +65,8 @@ fn handle_error(e: Box<dyn std::error::Error>, robot_mode: bool) -> ! {
|
||||
error: FallbackError {
|
||||
code: "INTERNAL_ERROR".to_string(),
|
||||
message: e.to_string(),
|
||||
suggestion: None,
|
||||
actions: Vec::new(),
|
||||
},
|
||||
};
|
||||
eprintln!(
|
||||
|
||||
@@ -361,7 +361,7 @@ fn print_combined_ingest_json(
|
||||
notes_upserted: mrs.notes_upserted,
|
||||
},
|
||||
},
|
||||
meta: RobotMeta { elapsed_ms },
|
||||
meta: RobotMeta::new(elapsed_ms),
|
||||
};
|
||||
|
||||
println!(
|
||||
@@ -735,7 +735,7 @@ async fn handle_init(
|
||||
}
|
||||
|
||||
let project_paths: Vec<String> = projects_flag
|
||||
.unwrap()
|
||||
.expect("validated: checked for None at lines 714-721")
|
||||
.split(',')
|
||||
.map(|p| p.trim().to_string())
|
||||
.filter(|p| !p.is_empty())
|
||||
@@ -743,8 +743,10 @@ async fn handle_init(
|
||||
|
||||
let result = run_init(
|
||||
InitInputs {
|
||||
gitlab_url: gitlab_url_flag.unwrap(),
|
||||
token_env_var: token_env_var_flag.unwrap(),
|
||||
gitlab_url: gitlab_url_flag
|
||||
.expect("validated: checked for None at lines 714-721"),
|
||||
token_env_var: token_env_var_flag
|
||||
.expect("validated: checked for None at lines 714-721"),
|
||||
project_paths,
|
||||
default_project: default_project_flag.clone(),
|
||||
},
|
||||
@@ -973,9 +975,7 @@ async fn handle_auth_test(
|
||||
name: result.name.clone(),
|
||||
gitlab_url: result.base_url.clone(),
|
||||
},
|
||||
meta: RobotMeta {
|
||||
elapsed_ms: start.elapsed().as_millis() as u64,
|
||||
},
|
||||
meta: RobotMeta::new(start.elapsed().as_millis() as u64),
|
||||
};
|
||||
println!("{}", serde_json::to_string(&output)?);
|
||||
} else {
|
||||
@@ -1036,9 +1036,7 @@ async fn handle_doctor(
|
||||
success: result.success,
|
||||
checks: result.checks,
|
||||
},
|
||||
meta: RobotMeta {
|
||||
elapsed_ms: start.elapsed().as_millis() as u64,
|
||||
},
|
||||
meta: RobotMeta::new(start.elapsed().as_millis() as u64),
|
||||
};
|
||||
println!("{}", serde_json::to_string(&output)?);
|
||||
} else {
|
||||
@@ -1083,9 +1081,7 @@ fn handle_version(robot_mode: bool) -> Result<(), Box<dyn std::error::Error>> {
|
||||
Some(git_hash)
|
||||
},
|
||||
},
|
||||
meta: RobotMeta {
|
||||
elapsed_ms: start.elapsed().as_millis() as u64,
|
||||
},
|
||||
meta: RobotMeta::new(start.elapsed().as_millis() as u64),
|
||||
};
|
||||
println!("{}", serde_json::to_string(&output)?);
|
||||
} else if git_hash.is_empty() {
|
||||
@@ -1243,9 +1239,7 @@ async fn handle_migrate(
|
||||
after_version,
|
||||
migrated: after_version > before_version,
|
||||
},
|
||||
meta: RobotMeta {
|
||||
elapsed_ms: start.elapsed().as_millis() as u64,
|
||||
},
|
||||
meta: RobotMeta::new(start.elapsed().as_millis() as u64),
|
||||
};
|
||||
println!("{}", serde_json::to_string(&output)?);
|
||||
} else if after_version > before_version {
|
||||
@@ -1326,7 +1320,7 @@ fn handle_file_history(
|
||||
|
||||
if robot_mode {
|
||||
let elapsed_ms = start.elapsed().as_millis() as u64;
|
||||
print_file_history_json(&result, elapsed_ms);
|
||||
print_file_history_json(&result, elapsed_ms)?;
|
||||
} else {
|
||||
print_file_history(&result);
|
||||
}
|
||||
@@ -1382,7 +1376,7 @@ fn handle_trace(
|
||||
|
||||
if robot_mode {
|
||||
let elapsed_ms = start.elapsed().as_millis() as u64;
|
||||
print_trace_json(&result, elapsed_ms, line_requested);
|
||||
print_trace_json(&result, elapsed_ms, line_requested)?;
|
||||
} else {
|
||||
print_trace(&result);
|
||||
}
|
||||
@@ -1960,9 +1954,7 @@ async fn handle_health(
|
||||
schema_version,
|
||||
actions,
|
||||
},
|
||||
meta: RobotMeta {
|
||||
elapsed_ms: start.elapsed().as_millis() as u64,
|
||||
},
|
||||
meta: RobotMeta::new(start.elapsed().as_millis() as u64),
|
||||
};
|
||||
println!("{}", serde_json::to_string(&output)?);
|
||||
} else {
|
||||
|
||||
@@ -115,7 +115,7 @@ fn handle_robot_docs(robot_mode: bool, brief: bool) -> Result<(), Box<dyn std::e
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"description": "List or show issues",
|
||||
"description": "List issues, or view detail with <IID>",
|
||||
"flags": ["<IID>", "-n/--limit", "--fields <list>", "-s/--state", "--status <name>", "-p/--project", "-a/--author", "-A/--assignee", "-l/--label", "-m/--milestone", "--since", "--due-before", "--has-due", "--no-has-due", "--sort", "--asc", "--no-asc", "-o/--open", "--no-open"],
|
||||
"example": "lore --robot issues --state opened --limit 10",
|
||||
"notes": {
|
||||
@@ -128,7 +128,7 @@ fn handle_robot_docs(robot_mode: bool, brief: bool) -> Result<(), Box<dyn std::e
|
||||
"data": {"issues": "[{iid:int, title:string, state:string, author_username:string, labels:[string], assignees:[string], discussion_count:int, unresolved_count:int, created_at_iso:string, updated_at_iso:string, web_url:string?, project_path:string, status_name:string?}]", "total_count": "int", "showing": "int"},
|
||||
"meta": {"elapsed_ms": "int", "available_statuses": "[string] — all distinct status names in the database, for use with --status filter"}
|
||||
},
|
||||
"show": {
|
||||
"detail": {
|
||||
"ok": "bool",
|
||||
"data": "IssueDetail (full entity with description, discussions, notes, events)",
|
||||
"meta": {"elapsed_ms": "int"}
|
||||
@@ -138,7 +138,7 @@ fn handle_robot_docs(robot_mode: bool, brief: bool) -> Result<(), Box<dyn std::e
|
||||
"fields_presets": {"minimal": ["iid", "title", "state", "updated_at_iso"]}
|
||||
},
|
||||
"mrs": {
|
||||
"description": "List or show merge requests",
|
||||
"description": "List merge requests, or view detail with <IID>",
|
||||
"flags": ["<IID>", "-n/--limit", "--fields <list>", "-s/--state", "-p/--project", "-a/--author", "-A/--assignee", "-r/--reviewer", "-l/--label", "--since", "-d/--draft", "-D/--no-draft", "--target", "--source", "--sort", "--asc", "--no-asc", "-o/--open", "--no-open"],
|
||||
"example": "lore --robot mrs --state opened",
|
||||
"response_schema": {
|
||||
@@ -147,7 +147,7 @@ fn handle_robot_docs(robot_mode: bool, brief: bool) -> Result<(), Box<dyn std::e
|
||||
"data": {"mrs": "[{iid:int, title:string, state:string, author_username:string, labels:[string], draft:bool, target_branch:string, source_branch:string, discussion_count:int, unresolved_count:int, created_at_iso:string, updated_at_iso:string, web_url:string?, project_path:string, reviewers:[string]}]", "total_count": "int", "showing": "int"},
|
||||
"meta": {"elapsed_ms": "int"}
|
||||
},
|
||||
"show": {
|
||||
"detail": {
|
||||
"ok": "bool",
|
||||
"data": "MrDetail (full entity with description, discussions, notes, events)",
|
||||
"meta": {"elapsed_ms": "int"}
|
||||
@@ -316,6 +316,17 @@ fn handle_robot_docs(robot_mode: bool, brief: bool) -> Result<(), Box<dyn std::e
|
||||
"meta": {"elapsed_ms": "int"}
|
||||
}
|
||||
},
|
||||
"explain": {
|
||||
"description": "Auto-generate a structured narrative of an issue or MR",
|
||||
"flags": ["<entity_type: issues|mrs>", "<IID>", "-p/--project <path>", "--sections <comma-list>", "--no-timeline", "--max-decisions <N>", "--since <period>"],
|
||||
"valid_sections": ["entity", "description", "key_decisions", "activity", "open_threads", "related", "timeline"],
|
||||
"example": "lore --robot explain issues 42 --sections key_decisions,activity --since 30d",
|
||||
"response_schema": {
|
||||
"ok": "bool",
|
||||
"data": {"entity": "{type:string, iid:int, title:string, state:string, author:string, assignees:[string], labels:[string], created_at:string, updated_at:string, url:string?, status_name:string?}", "description_excerpt": "string?", "key_decisions": "[{timestamp:string, actor:string, action:string, context_note:string}]?", "activity": "{state_changes:int, label_changes:int, notes:int, first_event:string?, last_event:string?}?", "open_threads": "[{discussion_id:string, started_by:string, started_at:string, note_count:int, last_note_at:string}]?", "related": "{closing_mrs:[{iid:int, title:string, state:string, web_url:string?}], related_issues:[{entity_type:string, iid:int, title:string?, reference_type:string}]}?", "timeline_excerpt": "[{timestamp:string, event_type:string, actor:string?, summary:string}]?"},
|
||||
"meta": {"elapsed_ms": "int"}
|
||||
}
|
||||
},
|
||||
"notes": {
|
||||
"description": "List notes from discussions with rich filtering",
|
||||
"flags": ["--limit/-n <N>", "--author/-a <username>", "--note-type <type>", "--contains <text>", "--for-issue <iid>", "--for-mr <iid>", "-p/--project <path>", "--since <period>", "--until <period>", "--path <filepath>", "--resolution <any|unresolved|resolved>", "--sort <created|updated>", "--asc", "--include-system", "--note-id <id>", "--gitlab-note-id <id>", "--discussion-id <id>", "--fields <list|minimal>", "--open"],
|
||||
@@ -371,7 +382,7 @@ fn handle_robot_docs(robot_mode: bool, brief: bool) -> Result<(), Box<dyn std::e
|
||||
"mentioned_in": "[{entity_type:string, project:string, iid:int, title:string, state:string, attention_state:string, attention_reason:string, updated_at_iso:string, web_url:string?}]",
|
||||
"activity": "[{timestamp_iso:string, event_type:string, entity_type:string, entity_iid:int, project:string, actor:string?, is_own:bool, summary:string, body_preview:string?}]"
|
||||
},
|
||||
"meta": {"elapsed_ms": "int"}
|
||||
"meta": {"elapsed_ms": "int", "gitlab_base_url": "string (GitLab instance URL for constructing entity links: {base_url}/{project}/-/issues/{iid})"}
|
||||
},
|
||||
"fields_presets": {
|
||||
"me_items_minimal": ["iid", "title", "attention_state", "attention_reason", "updated_at_iso"],
|
||||
@@ -385,7 +396,8 @@ fn handle_robot_docs(robot_mode: bool, brief: bool) -> Result<(), Box<dyn std::e
|
||||
"since_default": "1d for activity feed",
|
||||
"issue_filter": "Only In Progress / In Review status issues shown",
|
||||
"since_last_check": "Cursor-based inbox showing events since last run. Null on first run (no cursor yet). Groups events by entity (issue/MR). Sources: others' comments on your items, @mentions, assignment/review-request notes. Cursor auto-advances after each run. Use --reset-cursor to clear.",
|
||||
"cursor_persistence": "Stored per user in ~/.local/share/lore/me_cursor_<username>.json. --project filters display only for since-last-check; cursor still advances for all projects for that user."
|
||||
"cursor_persistence": "Stored per user in ~/.local/share/lore/me_cursor_<username>.json. --project filters display only for since-last-check; cursor still advances for all projects for that user.",
|
||||
"url_construction": "Use meta.gitlab_base_url + project + entity_type + iid to build links: {gitlab_base_url}/{project}/-/{issues|merge_requests}/{iid}"
|
||||
}
|
||||
},
|
||||
"robot-docs": {
|
||||
@@ -449,7 +461,8 @@ fn handle_robot_docs(robot_mode: bool, brief: bool) -> Result<(), Box<dyn std::e
|
||||
"17": "Not found",
|
||||
"18": "Ambiguous match",
|
||||
"19": "Health check failed",
|
||||
"20": "Config not found"
|
||||
"20": "Config not found",
|
||||
"21": "Embeddings not built"
|
||||
});
|
||||
|
||||
let workflows = serde_json::json!({
|
||||
@@ -780,42 +793,3 @@ async fn handle_list_compat(
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_show_compat(
|
||||
config_override: Option<&str>,
|
||||
entity: &str,
|
||||
iid: i64,
|
||||
project_filter: Option<&str>,
|
||||
robot_mode: bool,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let start = std::time::Instant::now();
|
||||
let config = Config::load(config_override)?;
|
||||
let project_filter = config.effective_project(project_filter);
|
||||
|
||||
match entity {
|
||||
"issue" => {
|
||||
let result = run_show_issue(&config, iid, project_filter)?;
|
||||
if robot_mode {
|
||||
print_show_issue_json(&result, start.elapsed().as_millis() as u64);
|
||||
} else {
|
||||
print_show_issue(&result);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
"mr" => {
|
||||
let result = run_show_mr(&config, iid, project_filter)?;
|
||||
if robot_mode {
|
||||
print_show_mr_json(&result, start.elapsed().as_millis() as u64);
|
||||
} else {
|
||||
print_show_mr(&result);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
_ => {
|
||||
eprintln!(
|
||||
"{}",
|
||||
Theme::error().render(&format!("Unknown entity: {entity}"))
|
||||
);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -209,6 +209,16 @@ const COMMAND_FLAGS: &[(&str, &[&str])] = &[
|
||||
],
|
||||
),
|
||||
("drift", &["--threshold", "--project"]),
|
||||
(
|
||||
"explain",
|
||||
&[
|
||||
"--project",
|
||||
"--sections",
|
||||
"--no-timeline",
|
||||
"--max-decisions",
|
||||
"--since",
|
||||
],
|
||||
),
|
||||
(
|
||||
"notes",
|
||||
&[
|
||||
@@ -290,7 +300,6 @@ const COMMAND_FLAGS: &[(&str, &[&str])] = &[
|
||||
"--source-branch",
|
||||
],
|
||||
),
|
||||
("show", &["--project"]),
|
||||
("reset", &["--yes"]),
|
||||
(
|
||||
"me",
|
||||
@@ -389,6 +398,7 @@ const CANONICAL_SUBCOMMANDS: &[&str] = &[
|
||||
"file-history",
|
||||
"trace",
|
||||
"drift",
|
||||
"explain",
|
||||
"related",
|
||||
"cron",
|
||||
"token",
|
||||
@@ -396,7 +406,6 @@ const CANONICAL_SUBCOMMANDS: &[&str] = &[
|
||||
"backup",
|
||||
"reset",
|
||||
"list",
|
||||
"show",
|
||||
"auth-test",
|
||||
"sync-status",
|
||||
];
|
||||
|
||||
@@ -254,7 +254,7 @@ pub fn print_event_count_json(counts: &EventCounts, elapsed_ms: u64) {
|
||||
},
|
||||
total: counts.total(),
|
||||
},
|
||||
meta: RobotMeta { elapsed_ms },
|
||||
meta: RobotMeta::new(elapsed_ms),
|
||||
};
|
||||
|
||||
match serde_json::to_string(&output) {
|
||||
@@ -325,7 +325,7 @@ pub fn print_count_json(result: &CountResult, elapsed_ms: u64) {
|
||||
system_excluded: result.system_count,
|
||||
breakdown,
|
||||
},
|
||||
meta: RobotMeta { elapsed_ms },
|
||||
meta: RobotMeta::new(elapsed_ms),
|
||||
};
|
||||
|
||||
match serde_json::to_string(&output) {
|
||||
|
||||
@@ -80,7 +80,7 @@ pub fn print_cron_install_json(result: &CronInstallResult, elapsed_ms: u64) {
|
||||
log_path: result.log_path.display().to_string(),
|
||||
replaced: result.replaced,
|
||||
},
|
||||
meta: RobotMeta { elapsed_ms },
|
||||
meta: RobotMeta::new(elapsed_ms),
|
||||
};
|
||||
if let Ok(json) = serde_json::to_string(&output) {
|
||||
println!("{json}");
|
||||
@@ -128,7 +128,7 @@ pub fn print_cron_uninstall_json(result: &CronUninstallResult, elapsed_ms: u64)
|
||||
action: "uninstall",
|
||||
was_installed: result.was_installed,
|
||||
},
|
||||
meta: RobotMeta { elapsed_ms },
|
||||
meta: RobotMeta::new(elapsed_ms),
|
||||
};
|
||||
if let Ok(json) = serde_json::to_string(&output) {
|
||||
println!("{json}");
|
||||
@@ -284,7 +284,7 @@ pub fn print_cron_status_json(info: &CronStatusInfo, elapsed_ms: u64) {
|
||||
last_sync_at: info.last_sync.as_ref().map(|s| s.started_at_iso.clone()),
|
||||
last_sync_status: info.last_sync.as_ref().map(|s| s.status.clone()),
|
||||
},
|
||||
meta: RobotMeta { elapsed_ms },
|
||||
meta: RobotMeta::new(elapsed_ms),
|
||||
};
|
||||
if let Ok(json) = serde_json::to_string(&output) {
|
||||
println!("{json}");
|
||||
|
||||
@@ -468,7 +468,7 @@ pub fn print_drift_human(response: &DriftResponse) {
|
||||
}
|
||||
|
||||
pub fn print_drift_json(response: &DriftResponse, elapsed_ms: u64) {
|
||||
let meta = RobotMeta { elapsed_ms };
|
||||
let meta = RobotMeta::new(elapsed_ms);
|
||||
let output = serde_json::json!({
|
||||
"ok": true,
|
||||
"data": response,
|
||||
|
||||
@@ -135,7 +135,7 @@ pub fn print_embed_json(result: &EmbedCommandResult, elapsed_ms: u64) {
|
||||
let output = EmbedJsonOutput {
|
||||
ok: true,
|
||||
data: result,
|
||||
meta: RobotMeta { elapsed_ms },
|
||||
meta: RobotMeta::new(elapsed_ms),
|
||||
};
|
||||
match serde_json::to_string(&output) {
|
||||
Ok(json) => println!("{json}"),
|
||||
|
||||
1977
src/cli/commands/explain.rs
Normal file
1977
src/cli/commands/explain.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -5,7 +5,7 @@ use tracing::info;
|
||||
use crate::Config;
|
||||
use crate::cli::render::{self, Icons, Theme};
|
||||
use crate::core::db::create_connection;
|
||||
use crate::core::error::Result;
|
||||
use crate::core::error::{LoreError, Result};
|
||||
use crate::core::file_history::resolve_rename_chain;
|
||||
use crate::core::paths::get_db_path;
|
||||
use crate::core::project::resolve_project;
|
||||
@@ -391,7 +391,7 @@ pub fn print_file_history(result: &FileHistoryResult) {
|
||||
|
||||
// ── Robot (JSON) output ─────────────────────────────────────────────────────
|
||||
|
||||
pub fn print_file_history_json(result: &FileHistoryResult, elapsed_ms: u64) {
|
||||
pub fn print_file_history_json(result: &FileHistoryResult, elapsed_ms: u64) -> Result<()> {
|
||||
let output = serde_json::json!({
|
||||
"ok": true,
|
||||
"data": {
|
||||
@@ -409,5 +409,10 @@ pub fn print_file_history_json(result: &FileHistoryResult, elapsed_ms: u64) {
|
||||
}
|
||||
});
|
||||
|
||||
println!("{}", serde_json::to_string(&output).unwrap_or_default());
|
||||
println!(
|
||||
"{}",
|
||||
serde_json::to_string(&output)
|
||||
.map_err(|e| LoreError::Other(format!("JSON serialization failed: {e}")))?
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -257,7 +257,7 @@ pub fn print_generate_docs_json(result: &GenerateDocsResult, elapsed_ms: u64) {
|
||||
unchanged: result.unchanged,
|
||||
errored: result.errored,
|
||||
},
|
||||
meta: RobotMeta { elapsed_ms },
|
||||
meta: RobotMeta::new(elapsed_ms),
|
||||
};
|
||||
match serde_json::to_string(&output) {
|
||||
Ok(json) => println!("{json}"),
|
||||
|
||||
@@ -191,7 +191,7 @@ pub fn print_ingest_summary_json(result: &IngestResult, elapsed_ms: u64) {
|
||||
status_enrichment,
|
||||
status_enrichment_errors: result.status_enrichment_errors,
|
||||
},
|
||||
meta: RobotMeta { elapsed_ms },
|
||||
meta: RobotMeta::new(elapsed_ms),
|
||||
};
|
||||
|
||||
match serde_json::to_string(&output) {
|
||||
|
||||
@@ -370,7 +370,7 @@ pub fn print_list_mrs(result: &MrListResult) {
|
||||
|
||||
pub fn print_list_mrs_json(result: &MrListResult, elapsed_ms: u64, fields: Option<&[String]>) {
|
||||
let json_result = MrListResultJson::from(result);
|
||||
let meta = RobotMeta { elapsed_ms };
|
||||
let meta = RobotMeta::new(elapsed_ms);
|
||||
let output = serde_json::json!({
|
||||
"ok": true,
|
||||
"data": json_result,
|
||||
|
||||
@@ -193,7 +193,7 @@ pub fn print_list_notes(result: &NoteListResult) {
|
||||
|
||||
pub fn print_list_notes_json(result: &NoteListResult, elapsed_ms: u64, fields: Option<&[String]>) {
|
||||
let json_result = NoteListResultJson::from(result);
|
||||
let meta = RobotMeta { elapsed_ms };
|
||||
let meta = RobotMeta::new(elapsed_ms);
|
||||
let output = serde_json::json!({
|
||||
"ok": true,
|
||||
"data": json_result,
|
||||
|
||||
@@ -247,7 +247,7 @@ pub fn run_me(config: &Config, args: &MeArgs, robot_mode: bool) -> Result<()> {
|
||||
|
||||
if robot_mode {
|
||||
let fields = args.fields.as_deref();
|
||||
render_robot::print_me_json(&dashboard, elapsed_ms, fields)?;
|
||||
render_robot::print_me_json(&dashboard, elapsed_ms, fields, &config.gitlab.base_url)?;
|
||||
} else if show_all {
|
||||
render_human::print_me_dashboard(&dashboard, single_project);
|
||||
} else {
|
||||
|
||||
@@ -15,11 +15,12 @@ pub fn print_me_json(
|
||||
dashboard: &MeDashboard,
|
||||
elapsed_ms: u64,
|
||||
fields: Option<&[String]>,
|
||||
gitlab_base_url: &str,
|
||||
) -> crate::core::error::Result<()> {
|
||||
let envelope = MeJsonEnvelope {
|
||||
ok: true,
|
||||
data: MeDataJson::from_dashboard(dashboard),
|
||||
meta: RobotMeta { elapsed_ms },
|
||||
meta: RobotMeta::with_base_url(elapsed_ms, gitlab_base_url),
|
||||
};
|
||||
|
||||
let mut value = serde_json::to_value(&envelope)
|
||||
|
||||
@@ -5,6 +5,7 @@ pub mod cron;
|
||||
pub mod doctor;
|
||||
pub mod drift;
|
||||
pub mod embed;
|
||||
pub mod explain;
|
||||
pub mod file_history;
|
||||
pub mod generate_docs;
|
||||
pub mod ingest;
|
||||
@@ -35,6 +36,7 @@ pub use cron::{
|
||||
pub use doctor::{DoctorChecks, print_doctor_results, run_doctor};
|
||||
pub use drift::{DriftResponse, print_drift_human, print_drift_json, run_drift};
|
||||
pub use embed::{print_embed, print_embed_json, run_embed};
|
||||
pub use explain::{handle_explain, print_explain, print_explain_json, run_explain};
|
||||
pub use file_history::{print_file_history, print_file_history_json, run_file_history};
|
||||
pub use generate_docs::{print_generate_docs, print_generate_docs_json, run_generate_docs};
|
||||
pub use ingest::{
|
||||
|
||||
@@ -558,7 +558,7 @@ pub fn print_related_human(response: &RelatedResponse) {
|
||||
}
|
||||
|
||||
pub fn print_related_json(response: &RelatedResponse, elapsed_ms: u64) {
|
||||
let meta = RobotMeta { elapsed_ms };
|
||||
let meta = RobotMeta::new(elapsed_ms);
|
||||
let output = serde_json::json!({
|
||||
"ok": true,
|
||||
"data": response,
|
||||
|
||||
@@ -557,7 +557,7 @@ impl From<&MrNoteDetail> for MrNoteDetailJson {
|
||||
|
||||
pub fn print_show_issue_json(issue: &IssueDetail, elapsed_ms: u64) {
|
||||
let json_result = IssueDetailJson::from(issue);
|
||||
let meta = RobotMeta { elapsed_ms };
|
||||
let meta = RobotMeta::new(elapsed_ms);
|
||||
let output = serde_json::json!({
|
||||
"ok": true,
|
||||
"data": json_result,
|
||||
@@ -571,7 +571,7 @@ pub fn print_show_issue_json(issue: &IssueDetail, elapsed_ms: u64) {
|
||||
|
||||
pub fn print_show_mr_json(mr: &MrDetail, elapsed_ms: u64) {
|
||||
let json_result = MrDetailJson::from(mr);
|
||||
let meta = RobotMeta { elapsed_ms };
|
||||
let meta = RobotMeta::new(elapsed_ms);
|
||||
let output = serde_json::json!({
|
||||
"ok": true,
|
||||
"data": json_result,
|
||||
|
||||
@@ -583,7 +583,7 @@ pub fn print_stats_json(result: &StatsResult, elapsed_ms: u64) {
|
||||
}),
|
||||
}),
|
||||
},
|
||||
meta: RobotMeta { elapsed_ms },
|
||||
meta: RobotMeta::new(elapsed_ms),
|
||||
};
|
||||
match serde_json::to_string(&output) {
|
||||
Ok(json) => println!("{json}"),
|
||||
|
||||
@@ -313,7 +313,7 @@ pub fn print_sync_status_json(result: &SyncStatusResult, elapsed_ms: u64) {
|
||||
system_notes: result.summary.system_note_count,
|
||||
},
|
||||
},
|
||||
meta: RobotMeta { elapsed_ms },
|
||||
meta: RobotMeta::new(elapsed_ms),
|
||||
};
|
||||
|
||||
match serde_json::to_string(&output) {
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use crate::cli::render::{Icons, Theme};
|
||||
use crate::core::error::{LoreError, Result};
|
||||
use crate::core::trace::{TraceChain, TraceResult};
|
||||
|
||||
/// Parse a path with optional `:line` suffix.
|
||||
@@ -152,7 +153,11 @@ fn truncate_body(body: &str, max: usize) -> String {
|
||||
format!("{}...", &body[..boundary])
|
||||
}
|
||||
|
||||
pub fn print_trace_json(result: &TraceResult, elapsed_ms: u64, line_requested: Option<u32>) {
|
||||
pub fn print_trace_json(
|
||||
result: &TraceResult,
|
||||
elapsed_ms: u64,
|
||||
line_requested: Option<u32>,
|
||||
) -> Result<()> {
|
||||
// Truncate discussion bodies for token efficiency in robot mode
|
||||
let chains: Vec<serde_json::Value> = result
|
||||
.trace_chains
|
||||
@@ -205,7 +210,12 @@ pub fn print_trace_json(result: &TraceResult, elapsed_ms: u64, line_requested: O
|
||||
}
|
||||
});
|
||||
|
||||
println!("{}", serde_json::to_string(&output).unwrap_or_default());
|
||||
println!(
|
||||
"{}",
|
||||
serde_json::to_string(&output)
|
||||
.map_err(|e| LoreError::Other(format!("JSON serialization failed: {e}")))?
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
@@ -376,7 +376,7 @@ pub fn print_who_json(run: &WhoRun, args: &WhoArgs, elapsed_ms: u64) {
|
||||
resolved_input,
|
||||
result: data,
|
||||
},
|
||||
meta: RobotMeta { elapsed_ms },
|
||||
meta: RobotMeta::new(elapsed_ms),
|
||||
};
|
||||
|
||||
let mut value = serde_json::to_value(&output).unwrap_or_else(|e| {
|
||||
|
||||
@@ -277,6 +277,44 @@ pub enum Commands {
|
||||
/// Trace why code was introduced: file -> MR -> issue -> discussion
|
||||
Trace(TraceArgs),
|
||||
|
||||
/// Auto-generate a structured narrative of an issue or MR
|
||||
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||
lore explain issues 42 # Narrative for issue #42
|
||||
lore explain mrs 99 -p group/repo # Narrative for MR !99 in specific project
|
||||
lore -J explain issues 42 # JSON output for automation
|
||||
lore explain issues 42 --sections key_decisions,open_threads # Specific sections only
|
||||
lore explain issues 42 --since 30d # Narrative scoped to last 30 days
|
||||
lore explain issues 42 --no-timeline # Skip timeline (faster)")]
|
||||
Explain {
|
||||
/// Entity type: "issues" or "mrs" (singular forms also accepted)
|
||||
#[arg(value_parser = ["issues", "mrs", "issue", "mr"])]
|
||||
entity_type: String,
|
||||
|
||||
/// Entity IID
|
||||
iid: i64,
|
||||
|
||||
/// Scope to project (fuzzy match)
|
||||
#[arg(short, long)]
|
||||
project: Option<String>,
|
||||
|
||||
/// Select specific sections (comma-separated)
|
||||
/// Valid: entity, description, key_decisions, activity, open_threads, related, timeline
|
||||
#[arg(long, value_delimiter = ',', help_heading = "Output")]
|
||||
sections: Option<Vec<String>>,
|
||||
|
||||
/// Skip timeline excerpt (faster execution)
|
||||
#[arg(long, help_heading = "Output")]
|
||||
no_timeline: bool,
|
||||
|
||||
/// Maximum key decisions to include
|
||||
#[arg(long, default_value = "10", help_heading = "Output")]
|
||||
max_decisions: usize,
|
||||
|
||||
/// Time scope for events/notes (e.g. 7d, 2w, 1m, or YYYY-MM-DD)
|
||||
#[arg(long, help_heading = "Filters")]
|
||||
since: Option<String>,
|
||||
},
|
||||
|
||||
/// Detect discussion divergence from original intent
|
||||
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||
lore drift issues 42 # Check drift on issue #42
|
||||
@@ -381,17 +419,6 @@ pub enum Commands {
|
||||
source_branch: Option<String>,
|
||||
},
|
||||
|
||||
#[command(hide = true)]
|
||||
Show {
|
||||
#[arg(value_parser = ["issue", "mr"])]
|
||||
entity: String,
|
||||
|
||||
iid: i64,
|
||||
|
||||
#[arg(long)]
|
||||
project: Option<String>,
|
||||
},
|
||||
|
||||
#[command(hide = true, name = "auth-test")]
|
||||
AuthTest,
|
||||
|
||||
|
||||
@@ -3,6 +3,26 @@ use serde::Serialize;
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct RobotMeta {
|
||||
pub elapsed_ms: u64,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub gitlab_base_url: Option<String>,
|
||||
}
|
||||
|
||||
impl RobotMeta {
|
||||
/// Standard meta with timing only.
|
||||
pub fn new(elapsed_ms: u64) -> Self {
|
||||
Self {
|
||||
elapsed_ms,
|
||||
gitlab_base_url: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Meta with GitLab base URL for URL construction by consumers.
|
||||
pub fn with_base_url(elapsed_ms: u64, base_url: &str) -> Self {
|
||||
Self {
|
||||
elapsed_ms,
|
||||
gitlab_base_url: Some(base_url.trim_end_matches('/').to_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Filter JSON object fields in-place for `--fields` support.
|
||||
@@ -133,4 +153,27 @@ mod tests {
|
||||
let expanded = expand_fields_preset(&fields, "notes");
|
||||
assert_eq!(expanded, ["id", "body"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn meta_new_omits_base_url() {
|
||||
let meta = RobotMeta::new(42);
|
||||
let json = serde_json::to_value(&meta).unwrap();
|
||||
assert_eq!(json["elapsed_ms"], 42);
|
||||
assert!(json.get("gitlab_base_url").is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn meta_with_base_url_includes_it() {
|
||||
let meta = RobotMeta::with_base_url(99, "https://gitlab.example.com");
|
||||
let json = serde_json::to_value(&meta).unwrap();
|
||||
assert_eq!(json["elapsed_ms"], 99);
|
||||
assert_eq!(json["gitlab_base_url"], "https://gitlab.example.com");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn meta_with_base_url_strips_trailing_slash() {
|
||||
let meta = RobotMeta::with_base_url(0, "https://gitlab.example.com/");
|
||||
let json = serde_json::to_value(&meta).unwrap();
|
||||
assert_eq!(json["gitlab_base_url"], "https://gitlab.example.com");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,8 +28,11 @@ pub enum ErrorCode {
|
||||
OllamaUnavailable,
|
||||
OllamaModelNotFound,
|
||||
EmbeddingFailed,
|
||||
EmbeddingsNotBuilt,
|
||||
NotFound,
|
||||
Ambiguous,
|
||||
HealthCheckFailed,
|
||||
UsageError,
|
||||
SurgicalPreflightFailed,
|
||||
}
|
||||
|
||||
@@ -52,8 +55,11 @@ impl std::fmt::Display for ErrorCode {
|
||||
Self::OllamaUnavailable => "OLLAMA_UNAVAILABLE",
|
||||
Self::OllamaModelNotFound => "OLLAMA_MODEL_NOT_FOUND",
|
||||
Self::EmbeddingFailed => "EMBEDDING_FAILED",
|
||||
Self::EmbeddingsNotBuilt => "EMBEDDINGS_NOT_BUILT",
|
||||
Self::NotFound => "NOT_FOUND",
|
||||
Self::Ambiguous => "AMBIGUOUS",
|
||||
Self::HealthCheckFailed => "HEALTH_CHECK_FAILED",
|
||||
Self::UsageError => "USAGE_ERROR",
|
||||
Self::SurgicalPreflightFailed => "SURGICAL_PREFLIGHT_FAILED",
|
||||
};
|
||||
write!(f, "{code}")
|
||||
@@ -79,8 +85,11 @@ impl ErrorCode {
|
||||
Self::OllamaUnavailable => 14,
|
||||
Self::OllamaModelNotFound => 15,
|
||||
Self::EmbeddingFailed => 16,
|
||||
Self::EmbeddingsNotBuilt => 21,
|
||||
Self::NotFound => 17,
|
||||
Self::Ambiguous => 18,
|
||||
Self::HealthCheckFailed => 19,
|
||||
Self::UsageError => 2,
|
||||
// Shares exit code 6 with GitLabNotFound — same semantic category (resource not found).
|
||||
// Robot consumers distinguish via ErrorCode string, not exit code.
|
||||
Self::SurgicalPreflightFailed => 6,
|
||||
@@ -201,7 +210,7 @@ impl LoreError {
|
||||
Self::OllamaUnavailable { .. } => ErrorCode::OllamaUnavailable,
|
||||
Self::OllamaModelNotFound { .. } => ErrorCode::OllamaModelNotFound,
|
||||
Self::EmbeddingFailed { .. } => ErrorCode::EmbeddingFailed,
|
||||
Self::EmbeddingsNotBuilt => ErrorCode::EmbeddingFailed,
|
||||
Self::EmbeddingsNotBuilt => ErrorCode::EmbeddingsNotBuilt,
|
||||
Self::SurgicalPreflightFailed { .. } => ErrorCode::SurgicalPreflightFailed,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,70 +0,0 @@
|
||||
pub const CHUNK_ROWID_MULTIPLIER: i64 = 1000;
|
||||
|
||||
pub fn encode_rowid(document_id: i64, chunk_index: i64) -> i64 {
|
||||
assert!(
|
||||
(0..CHUNK_ROWID_MULTIPLIER).contains(&chunk_index),
|
||||
"chunk_index {chunk_index} out of range [0, {CHUNK_ROWID_MULTIPLIER})"
|
||||
);
|
||||
document_id
|
||||
.checked_mul(CHUNK_ROWID_MULTIPLIER)
|
||||
.and_then(|v| v.checked_add(chunk_index))
|
||||
.unwrap_or_else(|| {
|
||||
panic!("encode_rowid overflow: document_id={document_id}, chunk_index={chunk_index}")
|
||||
})
|
||||
}
|
||||
|
||||
pub fn decode_rowid(rowid: i64) -> (i64, i64) {
|
||||
assert!(
|
||||
rowid >= 0,
|
||||
"decode_rowid called with negative rowid: {rowid}"
|
||||
);
|
||||
let document_id = rowid / CHUNK_ROWID_MULTIPLIER;
|
||||
let chunk_index = rowid % CHUNK_ROWID_MULTIPLIER;
|
||||
(document_id, chunk_index)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_encode_single_chunk() {
|
||||
assert_eq!(encode_rowid(1, 0), 1000);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_encode_multi_chunk() {
|
||||
assert_eq!(encode_rowid(1, 5), 1005);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_encode_specific_values() {
|
||||
assert_eq!(encode_rowid(42, 0), 42000);
|
||||
assert_eq!(encode_rowid(42, 5), 42005);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_decode_zero_chunk() {
|
||||
assert_eq!(decode_rowid(42000), (42, 0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_decode_roundtrip() {
|
||||
for doc_id in [0, 1, 42, 100, 999, 10000] {
|
||||
for chunk_idx in [0, 1, 5, 99, 999] {
|
||||
let rowid = encode_rowid(doc_id, chunk_idx);
|
||||
let (decoded_doc, decoded_chunk) = decode_rowid(rowid);
|
||||
assert_eq!(
|
||||
(decoded_doc, decoded_chunk),
|
||||
(doc_id, chunk_idx),
|
||||
"Roundtrip failed for doc_id={doc_id}, chunk_idx={chunk_idx}"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multiplier_value() {
|
||||
assert_eq!(CHUNK_ROWID_MULTIPLIER, 1000);
|
||||
}
|
||||
}
|
||||
@@ -1,107 +0,0 @@
|
||||
pub const CHUNK_MAX_BYTES: usize = 1_500;
|
||||
|
||||
pub const EXPECTED_DIMS: usize = 768;
|
||||
|
||||
pub const CHUNK_OVERLAP_CHARS: usize = 200;
|
||||
|
||||
pub fn split_into_chunks(content: &str) -> Vec<(usize, String)> {
|
||||
if content.is_empty() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
if content.len() <= CHUNK_MAX_BYTES {
|
||||
return vec![(0, content.to_string())];
|
||||
}
|
||||
|
||||
let mut chunks: Vec<(usize, String)> = Vec::new();
|
||||
let mut start = 0;
|
||||
let mut chunk_index = 0;
|
||||
|
||||
while start < content.len() {
|
||||
let remaining = &content[start..];
|
||||
if remaining.len() <= CHUNK_MAX_BYTES {
|
||||
chunks.push((chunk_index, remaining.to_string()));
|
||||
break;
|
||||
}
|
||||
|
||||
let end = floor_char_boundary(content, start + CHUNK_MAX_BYTES);
|
||||
let window = &content[start..end];
|
||||
|
||||
let split_at = find_paragraph_break(window)
|
||||
.or_else(|| find_sentence_break(window))
|
||||
.or_else(|| find_word_break(window))
|
||||
.unwrap_or(window.len());
|
||||
|
||||
let chunk_text = &content[start..start + split_at];
|
||||
chunks.push((chunk_index, chunk_text.to_string()));
|
||||
|
||||
let advance = if split_at > CHUNK_OVERLAP_CHARS {
|
||||
split_at - CHUNK_OVERLAP_CHARS
|
||||
} else {
|
||||
split_at
|
||||
}
|
||||
.max(1);
|
||||
let old_start = start;
|
||||
start += advance;
|
||||
// Ensure start lands on a char boundary after overlap subtraction
|
||||
start = floor_char_boundary(content, start);
|
||||
// Guarantee forward progress: multi-byte chars can cause
|
||||
// floor_char_boundary to round back to old_start
|
||||
if start <= old_start {
|
||||
start = old_start
|
||||
+ content[old_start..]
|
||||
.chars()
|
||||
.next()
|
||||
.map_or(1, |c| c.len_utf8());
|
||||
}
|
||||
chunk_index += 1;
|
||||
}
|
||||
|
||||
chunks
|
||||
}
|
||||
|
||||
fn find_paragraph_break(window: &str) -> Option<usize> {
|
||||
let search_start = floor_char_boundary(window, window.len() * 2 / 3);
|
||||
window[search_start..]
|
||||
.rfind("\n\n")
|
||||
.map(|pos| search_start + pos + 2)
|
||||
.or_else(|| window[..search_start].rfind("\n\n").map(|pos| pos + 2))
|
||||
}
|
||||
|
||||
fn find_sentence_break(window: &str) -> Option<usize> {
|
||||
let search_start = floor_char_boundary(window, window.len() / 2);
|
||||
for pat in &[". ", "? ", "! "] {
|
||||
if let Some(pos) = window[search_start..].rfind(pat) {
|
||||
return Some(search_start + pos + pat.len());
|
||||
}
|
||||
}
|
||||
for pat in &[". ", "? ", "! "] {
|
||||
if let Some(pos) = window[..search_start].rfind(pat) {
|
||||
return Some(pos + pat.len());
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn find_word_break(window: &str) -> Option<usize> {
|
||||
let search_start = floor_char_boundary(window, window.len() / 2);
|
||||
window[search_start..]
|
||||
.rfind(' ')
|
||||
.map(|pos| search_start + pos + 1)
|
||||
.or_else(|| window[..search_start].rfind(' ').map(|pos| pos + 1))
|
||||
}
|
||||
|
||||
fn floor_char_boundary(s: &str, idx: usize) -> usize {
|
||||
if idx >= s.len() {
|
||||
return s.len();
|
||||
}
|
||||
let mut i = idx;
|
||||
while i > 0 && !s.is_char_boundary(i) {
|
||||
i -= 1;
|
||||
}
|
||||
i
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[path = "chunking_tests.rs"]
|
||||
mod tests;
|
||||
@@ -53,14 +53,8 @@ pub struct NormalizedNote {
|
||||
pub position_head_sha: Option<String>,
|
||||
}
|
||||
|
||||
fn parse_timestamp(ts: &str) -> i64 {
|
||||
match iso_to_ms(ts) {
|
||||
Some(ms) => ms,
|
||||
None => {
|
||||
warn!(timestamp = ts, "Invalid timestamp, defaulting to epoch 0");
|
||||
0
|
||||
}
|
||||
}
|
||||
fn parse_timestamp(ts: &str) -> Result<i64, String> {
|
||||
iso_to_ms_strict(ts)
|
||||
}
|
||||
|
||||
pub fn transform_discussion(
|
||||
@@ -133,7 +127,15 @@ pub fn transform_notes(
|
||||
.notes
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, note)| transform_single_note(note, local_project_id, idx as i32, now))
|
||||
.filter_map(|(idx, note)| {
|
||||
match transform_single_note(note, local_project_id, idx as i32, now) {
|
||||
Ok(n) => Some(n),
|
||||
Err(e) => {
|
||||
warn!(note_id = note.id, error = %e, "Skipping note with invalid timestamp");
|
||||
None
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
@@ -142,7 +144,10 @@ fn transform_single_note(
|
||||
local_project_id: i64,
|
||||
position: i32,
|
||||
now: i64,
|
||||
) -> NormalizedNote {
|
||||
) -> Result<NormalizedNote, String> {
|
||||
let created_at = parse_timestamp(¬e.created_at)?;
|
||||
let updated_at = parse_timestamp(¬e.updated_at)?;
|
||||
|
||||
let (
|
||||
position_old_path,
|
||||
position_new_path,
|
||||
@@ -156,7 +161,7 @@ fn transform_single_note(
|
||||
position_head_sha,
|
||||
) = extract_position_fields(¬e.position);
|
||||
|
||||
NormalizedNote {
|
||||
Ok(NormalizedNote {
|
||||
gitlab_id: note.id,
|
||||
project_id: local_project_id,
|
||||
note_type: note.note_type.clone(),
|
||||
@@ -164,8 +169,8 @@ fn transform_single_note(
|
||||
author_id: Some(note.author.id),
|
||||
author_username: note.author.username.clone(),
|
||||
body: note.body.clone(),
|
||||
created_at: parse_timestamp(¬e.created_at),
|
||||
updated_at: parse_timestamp(¬e.updated_at),
|
||||
created_at,
|
||||
updated_at,
|
||||
last_seen_at: now,
|
||||
position,
|
||||
resolvable: note.resolvable,
|
||||
@@ -182,7 +187,7 @@ fn transform_single_note(
|
||||
position_base_sha,
|
||||
position_start_sha,
|
||||
position_head_sha,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
|
||||
@@ -40,7 +40,11 @@ fn setup() -> Connection {
|
||||
}
|
||||
|
||||
fn get_discussion_id(conn: &Connection) -> i64 {
|
||||
conn.query_row("SELECT id FROM discussions LIMIT 1", [], |row| row.get(0))
|
||||
conn.query_row(
|
||||
"SELECT id FROM discussions ORDER BY id LIMIT 1",
|
||||
[],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
|
||||
@@ -786,7 +786,11 @@ mod tests {
|
||||
}
|
||||
|
||||
fn get_mr_discussion_id(conn: &Connection) -> i64 {
|
||||
conn.query_row("SELECT id FROM discussions LIMIT 1", [], |row| row.get(0))
|
||||
conn.query_row(
|
||||
"SELECT id FROM discussions ORDER BY id LIMIT 1",
|
||||
[],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
|
||||
@@ -242,14 +242,16 @@ mod tests {
|
||||
.unwrap();
|
||||
|
||||
let project_id: i64 = conn
|
||||
.query_row("SELECT id FROM projects LIMIT 1", [], |row| row.get(0))
|
||||
.query_row("SELECT id FROM projects ORDER BY id LIMIT 1", [], |row| {
|
||||
row.get(0)
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
enqueue_job(&conn, project_id, "issue", 42, 100, "resource_events", None).unwrap();
|
||||
|
||||
let job_id: i64 = conn
|
||||
.query_row(
|
||||
"SELECT id FROM pending_dependent_fetches LIMIT 1",
|
||||
"SELECT id FROM pending_dependent_fetches ORDER BY id LIMIT 1",
|
||||
[],
|
||||
|row| row.get(0),
|
||||
)
|
||||
@@ -301,7 +303,9 @@ mod tests {
|
||||
let (conn, _job_id) = setup_db_with_job();
|
||||
|
||||
let project_id: i64 = conn
|
||||
.query_row("SELECT id FROM projects LIMIT 1", [], |row| row.get(0))
|
||||
.query_row("SELECT id FROM projects ORDER BY id LIMIT 1", [], |row| {
|
||||
row.get(0)
|
||||
})
|
||||
.unwrap();
|
||||
let jobs = claim_jobs(&conn, "resource_events", project_id, 10).unwrap();
|
||||
assert_eq!(jobs.len(), 1);
|
||||
|
||||
81
src/main.rs
81
src/main.rs
@@ -13,23 +13,24 @@ use lore::cli::autocorrect::{self, CorrectionResult};
|
||||
use lore::cli::commands::{
|
||||
IngestDisplay, InitInputs, InitOptions, InitResult, ListFilters, MrListFilters,
|
||||
NoteListFilters, RefreshOptions, RefreshResult, SearchCliFilters, SyncOptions, TimelineParams,
|
||||
delete_orphan_projects, open_issue_in_browser, open_mr_in_browser, parse_trace_path,
|
||||
print_count, print_count_json, print_cron_install, print_cron_install_json, print_cron_status,
|
||||
print_cron_status_json, print_cron_uninstall, print_cron_uninstall_json, print_doctor_results,
|
||||
print_drift_human, print_drift_json, print_dry_run_preview, print_dry_run_preview_json,
|
||||
print_embed, print_embed_json, print_event_count, print_event_count_json, print_file_history,
|
||||
print_file_history_json, print_generate_docs, print_generate_docs_json, print_ingest_summary,
|
||||
print_ingest_summary_json, print_list_issues, print_list_issues_json, print_list_mrs,
|
||||
print_list_mrs_json, print_list_notes, print_list_notes_json, print_related_human,
|
||||
print_related_json, print_search_results, print_search_results_json, print_show_issue,
|
||||
print_show_issue_json, print_show_mr, print_show_mr_json, print_stats, print_stats_json,
|
||||
print_sync, print_sync_json, print_sync_status, print_sync_status_json, print_timeline,
|
||||
print_timeline_json_with_meta, print_trace, print_trace_json, print_who_human, print_who_json,
|
||||
query_notes, run_auth_test, run_count, run_count_events, run_cron_install, run_cron_status,
|
||||
run_cron_uninstall, run_doctor, run_drift, run_embed, run_file_history, run_generate_docs,
|
||||
run_ingest, run_ingest_dry_run, run_init, run_init_refresh, run_list_issues, run_list_mrs,
|
||||
run_me, run_related, run_search, run_show_issue, run_show_mr, run_stats, run_sync,
|
||||
run_sync_status, run_timeline, run_token_set, run_token_show, run_who,
|
||||
delete_orphan_projects, handle_explain, open_issue_in_browser, open_mr_in_browser,
|
||||
parse_trace_path, print_count, print_count_json, print_cron_install, print_cron_install_json,
|
||||
print_cron_status, print_cron_status_json, print_cron_uninstall, print_cron_uninstall_json,
|
||||
print_doctor_results, print_drift_human, print_drift_json, print_dry_run_preview,
|
||||
print_dry_run_preview_json, print_embed, print_embed_json, print_event_count,
|
||||
print_event_count_json, print_file_history, print_file_history_json, print_generate_docs,
|
||||
print_generate_docs_json, print_ingest_summary, print_ingest_summary_json, print_list_issues,
|
||||
print_list_issues_json, print_list_mrs, print_list_mrs_json, print_list_notes,
|
||||
print_list_notes_json, print_related_human, print_related_json, print_search_results,
|
||||
print_search_results_json, print_show_issue, print_show_issue_json, print_show_mr,
|
||||
print_show_mr_json, print_stats, print_stats_json, print_sync, print_sync_json,
|
||||
print_sync_status, print_sync_status_json, print_timeline, print_timeline_json_with_meta,
|
||||
print_trace, print_trace_json, print_who_human, print_who_json, query_notes, run_auth_test,
|
||||
run_count, run_count_events, run_cron_install, run_cron_status, run_cron_uninstall, run_doctor,
|
||||
run_drift, run_embed, run_file_history, run_generate_docs, run_ingest, run_ingest_dry_run,
|
||||
run_init, run_init_refresh, run_list_issues, run_list_mrs, run_me, run_related, run_search,
|
||||
run_show_issue, run_show_mr, run_stats, run_sync, run_sync_status, run_timeline, run_token_set,
|
||||
run_token_show, run_who,
|
||||
};
|
||||
use lore::cli::render::{ColorMode, GlyphMode, Icons, LoreRenderer, Theme};
|
||||
use lore::cli::robot::{RobotMeta, strip_schemas};
|
||||
@@ -222,6 +223,25 @@ fn main() {
|
||||
Some(Commands::Trace(args)) => handle_trace(cli.config.as_deref(), args, robot_mode),
|
||||
Some(Commands::Cron(args)) => handle_cron(cli.config.as_deref(), args, robot_mode),
|
||||
Some(Commands::Token(args)) => handle_token(cli.config.as_deref(), args, robot_mode).await,
|
||||
Some(Commands::Explain {
|
||||
entity_type,
|
||||
iid,
|
||||
project,
|
||||
sections,
|
||||
no_timeline,
|
||||
max_decisions,
|
||||
since,
|
||||
}) => handle_explain(
|
||||
cli.config.as_deref(),
|
||||
&entity_type,
|
||||
iid,
|
||||
project.as_deref(),
|
||||
sections,
|
||||
no_timeline,
|
||||
max_decisions,
|
||||
since.as_deref(),
|
||||
robot_mode,
|
||||
),
|
||||
Some(Commands::Drift {
|
||||
entity_type,
|
||||
iid,
|
||||
@@ -365,33 +385,6 @@ fn main() {
|
||||
)
|
||||
.await
|
||||
}
|
||||
Some(Commands::Show {
|
||||
entity,
|
||||
iid,
|
||||
project,
|
||||
}) => {
|
||||
if robot_mode {
|
||||
eprintln!(
|
||||
r#"{{"warning":{{"type":"DEPRECATED","message":"'lore show' is deprecated, use 'lore {entity}s {iid}'","successor":"{entity}s"}}}}"#
|
||||
);
|
||||
} else {
|
||||
eprintln!(
|
||||
"{}",
|
||||
Theme::warning().render(&format!(
|
||||
"warning: 'lore show' is deprecated, use 'lore {}s {}'",
|
||||
entity, iid
|
||||
))
|
||||
);
|
||||
}
|
||||
handle_show_compat(
|
||||
cli.config.as_deref(),
|
||||
&entity,
|
||||
iid,
|
||||
project.as_deref(),
|
||||
robot_mode,
|
||||
)
|
||||
.await
|
||||
}
|
||||
Some(Commands::AuthTest) => {
|
||||
if robot_mode {
|
||||
eprintln!(
|
||||
|
||||
@@ -119,15 +119,12 @@ pub fn search_fts(
|
||||
}
|
||||
|
||||
pub fn generate_fallback_snippet(content_text: &str, max_chars: usize) -> String {
|
||||
if content_text.chars().count() <= max_chars {
|
||||
return content_text.to_string();
|
||||
}
|
||||
|
||||
let byte_end = content_text
|
||||
.char_indices()
|
||||
.nth(max_chars)
|
||||
.map(|(i, _)| i)
|
||||
.unwrap_or(content_text.len());
|
||||
// Use char_indices to find the boundary at max_chars in a single pass,
|
||||
// short-circuiting early for large strings instead of counting all chars.
|
||||
let byte_end = match content_text.char_indices().nth(max_chars) {
|
||||
Some((i, _)) => i,
|
||||
None => return content_text.to_string(), // content fits within max_chars
|
||||
};
|
||||
let truncated = &content_text[..byte_end];
|
||||
|
||||
if let Some(last_space) = truncated.rfind(' ') {
|
||||
|
||||
@@ -411,7 +411,9 @@ fn round_robin_select_by_discussion(
|
||||
let mut made_progress = false;
|
||||
|
||||
for (disc_idx, &discussion_id) in discussion_order.iter().enumerate() {
|
||||
let notes = by_discussion.get(&discussion_id).unwrap();
|
||||
let notes = by_discussion
|
||||
.get(&discussion_id)
|
||||
.expect("key present: inserted into by_discussion via discussion_order");
|
||||
let note_idx = indices[disc_idx];
|
||||
|
||||
if note_idx < notes.len() {
|
||||
|
||||
Reference in New Issue
Block a user