- Add HTTP adapter layer (src/http.rs) wrapping asupersync h1 client - Migrate gitlab client, graphql, and ollama to HTTP adapter - Swap entrypoint from #[tokio::main] to RuntimeBuilder::new().block_on() - Rewrite signal handler for asupersync (RuntimeHandle::spawn + ctrl_c()) - Migrate rate limiter sleeps to asupersync::time::sleep(wall_now(), d) - Add asupersync-native HTTP integration tests - Convert timeline_seed_tests to RuntimeBuilder pattern Phases 1-3 of asupersync migration (atomic: code won't compile without all pieces).
357 lines
11 KiB
Rust
357 lines
11 KiB
Rust
use serde::Deserialize;
|
|
use serde_json::Value;
|
|
use std::time::{Duration, SystemTime};
|
|
use tracing::warn;
|
|
|
|
use crate::core::error::LoreError;
|
|
use crate::http::Client;
|
|
|
|
pub struct GraphqlClient {
|
|
http: Client,
|
|
base_url: String,
|
|
token: String,
|
|
}
|
|
|
|
#[derive(Debug)]
|
|
pub struct GraphqlQueryResult {
|
|
pub data: Value,
|
|
pub had_partial_errors: bool,
|
|
pub first_partial_error: Option<String>,
|
|
}
|
|
|
|
impl GraphqlClient {
|
|
pub fn new(base_url: &str, token: &str) -> Self {
|
|
Self {
|
|
http: Client::with_timeout(Duration::from_secs(30)),
|
|
base_url: base_url.trim_end_matches('/').to_string(),
|
|
token: token.to_string(),
|
|
}
|
|
}
|
|
|
|
pub async fn query(
|
|
&self,
|
|
query: &str,
|
|
variables: Value,
|
|
) -> crate::core::error::Result<GraphqlQueryResult> {
|
|
let url = format!("{}/api/graphql", self.base_url);
|
|
|
|
let body = serde_json::json!({
|
|
"query": query,
|
|
"variables": variables,
|
|
});
|
|
|
|
let bearer = format!("Bearer {}", self.token);
|
|
let response = self
|
|
.http
|
|
.post_json(&url, &[("Authorization", bearer.as_str())], &body)
|
|
.await?;
|
|
|
|
match response.status {
|
|
401 | 403 => return Err(LoreError::GitLabAuthFailed),
|
|
404 => {
|
|
return Err(LoreError::GitLabNotFound {
|
|
resource: "GraphQL endpoint".into(),
|
|
});
|
|
}
|
|
429 => {
|
|
let retry_after = parse_retry_after(&response);
|
|
return Err(LoreError::GitLabRateLimited { retry_after });
|
|
}
|
|
s if s >= 400 => {
|
|
return Err(LoreError::Other(format!("GraphQL HTTP {s}")));
|
|
}
|
|
_ => {}
|
|
}
|
|
|
|
let json: Value = response
|
|
.json()
|
|
.map_err(|e| LoreError::Other(format!("Failed to parse GraphQL response: {e}")))?;
|
|
|
|
let errors = json.get("errors").and_then(|e| e.as_array());
|
|
let data = json.get("data");
|
|
|
|
let first_error_msg = errors
|
|
.and_then(|arr| arr.first())
|
|
.and_then(|e| e.get("message"))
|
|
.and_then(|m| m.as_str())
|
|
.map(String::from);
|
|
|
|
let has_errors = errors.is_some_and(|arr| !arr.is_empty());
|
|
let data_present = data.is_some_and(|d| !d.is_null());
|
|
|
|
match (has_errors, data_present) {
|
|
(true, false) => Err(LoreError::Other(format!(
|
|
"GraphQL error: {}",
|
|
first_error_msg.unwrap_or_else(|| "unknown error".into())
|
|
))),
|
|
(true, true) => Ok(GraphqlQueryResult {
|
|
data: data.unwrap().clone(),
|
|
had_partial_errors: true,
|
|
first_partial_error: first_error_msg,
|
|
}),
|
|
(false, true) => Ok(GraphqlQueryResult {
|
|
data: data.unwrap().clone(),
|
|
had_partial_errors: false,
|
|
first_partial_error: None,
|
|
}),
|
|
(false, false) => Err(LoreError::Other(
|
|
"GraphQL response missing 'data' field".into(),
|
|
)),
|
|
}
|
|
}
|
|
}
|
|
|
|
fn parse_retry_after(response: &crate::http::Response) -> u64 {
|
|
let header = match response.header("retry-after") {
|
|
Some(s) => s,
|
|
None => return 60,
|
|
};
|
|
|
|
parse_retry_after_value(header, SystemTime::now())
|
|
}
|
|
|
|
fn parse_retry_after_value(header: &str, now: SystemTime) -> u64 {
|
|
let header = header.trim();
|
|
|
|
if let Ok(secs) = header.parse::<u64>() {
|
|
return secs.max(1);
|
|
}
|
|
|
|
if let Ok(date) = httpdate::parse_http_date(header) {
|
|
return match date.duration_since(now) {
|
|
Ok(delta) => delta.as_secs().max(1),
|
|
Err(_) => 1,
|
|
};
|
|
}
|
|
|
|
60
|
|
}
|
|
|
|
// ═══════════════════════════════════════════════════════════════════════
|
|
// Status Fetcher — adaptive-paging GraphQL work-item status retrieval
|
|
// ═══════════════════════════════════════════════════════════════════════
|
|
|
|
const ISSUE_STATUS_QUERY: &str = r#"
|
|
query($projectPath: ID!, $after: String, $first: Int!) {
|
|
project(fullPath: $projectPath) {
|
|
workItems(types: [ISSUE], first: $first, after: $after) {
|
|
nodes {
|
|
iid
|
|
widgets {
|
|
... on WorkItemWidgetStatus {
|
|
__typename
|
|
status {
|
|
name
|
|
category
|
|
color
|
|
iconName
|
|
}
|
|
}
|
|
__typename
|
|
}
|
|
}
|
|
pageInfo {
|
|
endCursor
|
|
hasNextPage
|
|
}
|
|
}
|
|
}
|
|
}
|
|
"#;
|
|
|
|
const PAGE_SIZES: &[u32] = &[100, 50, 25, 10];
|
|
|
|
#[derive(Deserialize)]
|
|
struct WorkItemsResponse {
|
|
project: Option<ProjectNode>,
|
|
}
|
|
|
|
#[derive(Deserialize)]
|
|
struct ProjectNode {
|
|
#[serde(rename = "workItems")]
|
|
work_items: Option<WorkItemConnection>,
|
|
}
|
|
|
|
#[derive(Deserialize)]
|
|
struct WorkItemConnection {
|
|
nodes: Vec<WorkItemNode>,
|
|
#[serde(rename = "pageInfo")]
|
|
page_info: PageInfo,
|
|
}
|
|
|
|
#[derive(Deserialize)]
|
|
struct WorkItemNode {
|
|
iid: String,
|
|
widgets: Vec<serde_json::Value>,
|
|
}
|
|
|
|
#[derive(Deserialize)]
|
|
struct PageInfo {
|
|
#[serde(rename = "endCursor")]
|
|
end_cursor: Option<String>,
|
|
#[serde(rename = "hasNextPage")]
|
|
has_next_page: bool,
|
|
}
|
|
|
|
#[derive(Deserialize)]
|
|
struct StatusWidget {
|
|
status: Option<crate::gitlab::types::WorkItemStatus>,
|
|
}
|
|
|
|
#[derive(Debug, Clone)]
|
|
pub enum UnsupportedReason {
|
|
GraphqlEndpointMissing,
|
|
AuthForbidden,
|
|
}
|
|
|
|
#[derive(Debug)]
|
|
pub struct FetchStatusResult {
|
|
pub statuses: std::collections::HashMap<i64, crate::gitlab::types::WorkItemStatus>,
|
|
pub all_fetched_iids: std::collections::HashSet<i64>,
|
|
pub unsupported_reason: Option<UnsupportedReason>,
|
|
pub partial_error_count: usize,
|
|
pub first_partial_error: Option<String>,
|
|
}
|
|
|
|
fn is_complexity_or_timeout_error(msg: &str) -> bool {
|
|
let lower = msg.to_ascii_lowercase();
|
|
lower.contains("complexity") || lower.contains("timeout")
|
|
}
|
|
|
|
pub async fn fetch_issue_statuses(
|
|
client: &GraphqlClient,
|
|
project_path: &str,
|
|
) -> crate::core::error::Result<FetchStatusResult> {
|
|
fetch_issue_statuses_with_progress(client, project_path, None).await
|
|
}
|
|
|
|
pub async fn fetch_issue_statuses_with_progress(
|
|
client: &GraphqlClient,
|
|
project_path: &str,
|
|
on_page: Option<&dyn Fn(usize)>,
|
|
) -> crate::core::error::Result<FetchStatusResult> {
|
|
let mut statuses = std::collections::HashMap::new();
|
|
let mut all_fetched_iids = std::collections::HashSet::new();
|
|
let mut partial_error_count: usize = 0;
|
|
let mut first_partial_error: Option<String> = None;
|
|
let mut cursor: Option<String> = None;
|
|
let mut page_size_idx: usize = 0;
|
|
|
|
loop {
|
|
let page_size = PAGE_SIZES[page_size_idx];
|
|
let variables = serde_json::json!({
|
|
"projectPath": project_path,
|
|
"after": cursor,
|
|
"first": page_size,
|
|
});
|
|
|
|
let query_result = match client.query(ISSUE_STATUS_QUERY, variables).await {
|
|
Ok(r) => r,
|
|
Err(LoreError::GitLabNotFound { .. }) => {
|
|
warn!(
|
|
"GraphQL endpoint not found for {project_path} — status enrichment unavailable"
|
|
);
|
|
return Ok(FetchStatusResult {
|
|
statuses,
|
|
all_fetched_iids,
|
|
unsupported_reason: Some(UnsupportedReason::GraphqlEndpointMissing),
|
|
partial_error_count,
|
|
first_partial_error,
|
|
});
|
|
}
|
|
Err(LoreError::GitLabAuthFailed) => {
|
|
warn!("GraphQL auth forbidden for {project_path} — status enrichment unavailable");
|
|
return Ok(FetchStatusResult {
|
|
statuses,
|
|
all_fetched_iids,
|
|
unsupported_reason: Some(UnsupportedReason::AuthForbidden),
|
|
partial_error_count,
|
|
first_partial_error,
|
|
});
|
|
}
|
|
Err(LoreError::Other(msg)) if is_complexity_or_timeout_error(&msg) => {
|
|
if page_size_idx + 1 < PAGE_SIZES.len() {
|
|
page_size_idx += 1;
|
|
warn!(
|
|
"GraphQL complexity/timeout error, reducing page size to {}",
|
|
PAGE_SIZES[page_size_idx]
|
|
);
|
|
continue;
|
|
}
|
|
return Err(LoreError::Other(msg));
|
|
}
|
|
Err(e) => return Err(e),
|
|
};
|
|
|
|
// Track partial errors
|
|
if query_result.had_partial_errors {
|
|
partial_error_count += 1;
|
|
if first_partial_error.is_none() {
|
|
first_partial_error.clone_from(&query_result.first_partial_error);
|
|
}
|
|
}
|
|
|
|
// Reset page size after success
|
|
page_size_idx = 0;
|
|
|
|
// Parse response
|
|
let response: WorkItemsResponse = serde_json::from_value(query_result.data)
|
|
.map_err(|e| LoreError::Other(format!("Failed to parse GraphQL response: {e}")))?;
|
|
|
|
let connection = match response.project.and_then(|p| p.work_items) {
|
|
Some(c) => c,
|
|
None => break,
|
|
};
|
|
|
|
for node in &connection.nodes {
|
|
let iid = match node.iid.parse::<i64>() {
|
|
Ok(id) => id,
|
|
Err(_) => {
|
|
warn!("Skipping non-numeric work item IID: {}", node.iid);
|
|
continue;
|
|
}
|
|
};
|
|
all_fetched_iids.insert(iid);
|
|
|
|
for widget in &node.widgets {
|
|
if widget.get("__typename").and_then(|t| t.as_str()) == Some("WorkItemWidgetStatus")
|
|
&& let Ok(sw) = serde_json::from_value::<StatusWidget>(widget.clone())
|
|
&& let Some(status) = sw.status
|
|
{
|
|
statuses.insert(iid, status);
|
|
}
|
|
}
|
|
}
|
|
|
|
if let Some(cb) = &on_page {
|
|
cb(all_fetched_iids.len());
|
|
}
|
|
|
|
// Pagination
|
|
if !connection.page_info.has_next_page {
|
|
break;
|
|
}
|
|
|
|
let new_cursor = connection.page_info.end_cursor;
|
|
if new_cursor.is_none() || new_cursor == cursor {
|
|
warn!(
|
|
"Pagination cursor stall detected for {project_path}, aborting with partial results"
|
|
);
|
|
break;
|
|
}
|
|
cursor = new_cursor;
|
|
}
|
|
|
|
Ok(FetchStatusResult {
|
|
statuses,
|
|
all_fetched_iids,
|
|
unsupported_reason: None,
|
|
partial_error_count,
|
|
first_partial_error,
|
|
})
|
|
}
|
|
|
|
#[cfg(test)]
|
|
#[path = "graphql_tests.rs"]
|
|
mod tests;
|