Files
gitlore/src/gitlab/graphql.rs
teernisse 64e73b1cab fix(graphql): handle past HTTP dates in retry-after header gracefully
Extract parse_retry_after_value(header, now) as a pure function to enable
deterministic testing of Retry-After header parsing. The previous
implementation used let-chains with SystemTime::now() inline, which made
it untestable and would panic on negative durations when the server
clock was behind or the header contained a date in the past.

Changes:
- Extract parse_retry_after_value() taking an explicit `now` parameter
- Handle past HTTP dates by returning 1 second instead of panicking on
  negative Duration (date.duration_since(now) returns Err for past dates)
- Trim whitespace from header values before parsing
- Add test for past HTTP date returning 1 second minimum
- Add test for delta-seconds with surrounding whitespace

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-14 11:25:19 -05:00

376 lines
11 KiB
Rust

use reqwest::Client;
use serde::Deserialize;
use serde_json::Value;
use std::time::{Duration, SystemTime};
use tracing::warn;
use crate::core::error::LoreError;
pub struct GraphqlClient {
http: Client,
base_url: String,
token: String,
}
#[derive(Debug)]
pub struct GraphqlQueryResult {
pub data: Value,
pub had_partial_errors: bool,
pub first_partial_error: Option<String>,
}
impl GraphqlClient {
pub fn new(base_url: &str, token: &str) -> Self {
let http = Client::builder()
.timeout(Duration::from_secs(30))
.build()
.unwrap_or_else(|_| Client::new());
Self {
http,
base_url: base_url.trim_end_matches('/').to_string(),
token: token.to_string(),
}
}
pub async fn query(
&self,
query: &str,
variables: Value,
) -> crate::core::error::Result<GraphqlQueryResult> {
let url = format!("{}/api/graphql", self.base_url);
let body = serde_json::json!({
"query": query,
"variables": variables,
});
let response = self
.http
.post(&url)
.header("Authorization", format!("Bearer {}", self.token))
.header("Content-Type", "application/json")
.json(&body)
.send()
.await
.map_err(|e| LoreError::GitLabNetworkError {
base_url: self.base_url.clone(),
source: Some(e),
})?;
let status = response.status();
match status.as_u16() {
401 | 403 => return Err(LoreError::GitLabAuthFailed),
404 => {
return Err(LoreError::GitLabNotFound {
resource: "GraphQL endpoint".into(),
});
}
429 => {
let retry_after = parse_retry_after(&response);
return Err(LoreError::GitLabRateLimited { retry_after });
}
s if s >= 400 => {
return Err(LoreError::Other(format!("GraphQL HTTP {status}")));
}
_ => {}
}
let json: Value = response
.json()
.await
.map_err(|e| LoreError::Other(format!("Failed to parse GraphQL response: {e}")))?;
let errors = json.get("errors").and_then(|e| e.as_array());
let data = json.get("data");
let first_error_msg = errors
.and_then(|arr| arr.first())
.and_then(|e| e.get("message"))
.and_then(|m| m.as_str())
.map(String::from);
let has_errors = errors.is_some_and(|arr| !arr.is_empty());
let data_present = data.is_some_and(|d| !d.is_null());
match (has_errors, data_present) {
(true, false) => Err(LoreError::Other(format!(
"GraphQL error: {}",
first_error_msg.unwrap_or_else(|| "unknown error".into())
))),
(true, true) => Ok(GraphqlQueryResult {
data: data.unwrap().clone(),
had_partial_errors: true,
first_partial_error: first_error_msg,
}),
(false, true) => Ok(GraphqlQueryResult {
data: data.unwrap().clone(),
had_partial_errors: false,
first_partial_error: None,
}),
(false, false) => Err(LoreError::Other(
"GraphQL response missing 'data' field".into(),
)),
}
}
}
fn parse_retry_after(response: &reqwest::Response) -> u64 {
let header = match response
.headers()
.get("retry-after")
.and_then(|v| v.to_str().ok())
{
Some(s) => s,
None => return 60,
};
parse_retry_after_value(header, SystemTime::now())
}
fn parse_retry_after_value(header: &str, now: SystemTime) -> u64 {
let header = header.trim();
if let Ok(secs) = header.parse::<u64>() {
return secs.max(1);
}
if let Ok(date) = httpdate::parse_http_date(header) {
return match date.duration_since(now) {
Ok(delta) => delta.as_secs().max(1),
Err(_) => 1,
};
}
60
}
// ═══════════════════════════════════════════════════════════════════════
// Status Fetcher — adaptive-paging GraphQL work-item status retrieval
// ═══════════════════════════════════════════════════════════════════════
const ISSUE_STATUS_QUERY: &str = r#"
query($projectPath: ID!, $after: String, $first: Int!) {
project(fullPath: $projectPath) {
workItems(types: [ISSUE], first: $first, after: $after) {
nodes {
iid
widgets {
... on WorkItemWidgetStatus {
__typename
status {
name
category
color
iconName
}
}
__typename
}
}
pageInfo {
endCursor
hasNextPage
}
}
}
}
"#;
const PAGE_SIZES: &[u32] = &[100, 50, 25, 10];
#[derive(Deserialize)]
struct WorkItemsResponse {
project: Option<ProjectNode>,
}
#[derive(Deserialize)]
struct ProjectNode {
#[serde(rename = "workItems")]
work_items: Option<WorkItemConnection>,
}
#[derive(Deserialize)]
struct WorkItemConnection {
nodes: Vec<WorkItemNode>,
#[serde(rename = "pageInfo")]
page_info: PageInfo,
}
#[derive(Deserialize)]
struct WorkItemNode {
iid: String,
widgets: Vec<serde_json::Value>,
}
#[derive(Deserialize)]
struct PageInfo {
#[serde(rename = "endCursor")]
end_cursor: Option<String>,
#[serde(rename = "hasNextPage")]
has_next_page: bool,
}
#[derive(Deserialize)]
struct StatusWidget {
status: Option<crate::gitlab::types::WorkItemStatus>,
}
#[derive(Debug, Clone)]
pub enum UnsupportedReason {
GraphqlEndpointMissing,
AuthForbidden,
}
#[derive(Debug)]
pub struct FetchStatusResult {
pub statuses: std::collections::HashMap<i64, crate::gitlab::types::WorkItemStatus>,
pub all_fetched_iids: std::collections::HashSet<i64>,
pub unsupported_reason: Option<UnsupportedReason>,
pub partial_error_count: usize,
pub first_partial_error: Option<String>,
}
fn is_complexity_or_timeout_error(msg: &str) -> bool {
let lower = msg.to_ascii_lowercase();
lower.contains("complexity") || lower.contains("timeout")
}
pub async fn fetch_issue_statuses(
client: &GraphqlClient,
project_path: &str,
) -> crate::core::error::Result<FetchStatusResult> {
fetch_issue_statuses_with_progress(client, project_path, None).await
}
pub async fn fetch_issue_statuses_with_progress(
client: &GraphqlClient,
project_path: &str,
on_page: Option<&dyn Fn(usize)>,
) -> crate::core::error::Result<FetchStatusResult> {
let mut statuses = std::collections::HashMap::new();
let mut all_fetched_iids = std::collections::HashSet::new();
let mut partial_error_count: usize = 0;
let mut first_partial_error: Option<String> = None;
let mut cursor: Option<String> = None;
let mut page_size_idx: usize = 0;
loop {
let page_size = PAGE_SIZES[page_size_idx];
let variables = serde_json::json!({
"projectPath": project_path,
"after": cursor,
"first": page_size,
});
let query_result = match client.query(ISSUE_STATUS_QUERY, variables).await {
Ok(r) => r,
Err(LoreError::GitLabNotFound { .. }) => {
warn!(
"GraphQL endpoint not found for {project_path} — status enrichment unavailable"
);
return Ok(FetchStatusResult {
statuses,
all_fetched_iids,
unsupported_reason: Some(UnsupportedReason::GraphqlEndpointMissing),
partial_error_count,
first_partial_error,
});
}
Err(LoreError::GitLabAuthFailed) => {
warn!("GraphQL auth forbidden for {project_path} — status enrichment unavailable");
return Ok(FetchStatusResult {
statuses,
all_fetched_iids,
unsupported_reason: Some(UnsupportedReason::AuthForbidden),
partial_error_count,
first_partial_error,
});
}
Err(LoreError::Other(msg)) if is_complexity_or_timeout_error(&msg) => {
if page_size_idx + 1 < PAGE_SIZES.len() {
page_size_idx += 1;
warn!(
"GraphQL complexity/timeout error, reducing page size to {}",
PAGE_SIZES[page_size_idx]
);
continue;
}
return Err(LoreError::Other(msg));
}
Err(e) => return Err(e),
};
// Track partial errors
if query_result.had_partial_errors {
partial_error_count += 1;
if first_partial_error.is_none() {
first_partial_error.clone_from(&query_result.first_partial_error);
}
}
// Reset page size after success
page_size_idx = 0;
// Parse response
let response: WorkItemsResponse = serde_json::from_value(query_result.data)
.map_err(|e| LoreError::Other(format!("Failed to parse GraphQL response: {e}")))?;
let connection = match response.project.and_then(|p| p.work_items) {
Some(c) => c,
None => break,
};
for node in &connection.nodes {
let iid = match node.iid.parse::<i64>() {
Ok(id) => id,
Err(_) => {
warn!("Skipping non-numeric work item IID: {}", node.iid);
continue;
}
};
all_fetched_iids.insert(iid);
for widget in &node.widgets {
if widget.get("__typename").and_then(|t| t.as_str()) == Some("WorkItemWidgetStatus")
&& let Ok(sw) = serde_json::from_value::<StatusWidget>(widget.clone())
&& let Some(status) = sw.status
{
statuses.insert(iid, status);
}
}
}
if let Some(cb) = &on_page {
cb(all_fetched_iids.len());
}
// Pagination
if !connection.page_info.has_next_page {
break;
}
let new_cursor = connection.page_info.end_cursor;
if new_cursor.is_none() || new_cursor == cursor {
warn!(
"Pagination cursor stall detected for {project_path}, aborting with partial results"
);
break;
}
cursor = new_cursor;
}
Ok(FetchStatusResult {
statuses,
all_fetched_iids,
unsupported_reason: None,
partial_error_count,
first_partial_error,
})
}
#[cfg(test)]
#[path = "graphql_tests.rs"]
mod tests;