Wave 5: Schemas command, sync command, network policy, test fixtures (bd-x15, bd-3f4, bd-1cv, bd-lx6)
- Implement schemas command with list/show modes, regex filtering, ref expansion - Implement sync command with conditional fetch, content hash diffing, dry-run - Add NetworkPolicy enum (Auto/Offline/OnlineOnly) with env var + CLI flag resolution - Integrate network policy into AsyncHttpClient and fetch command - Create test fixtures (petstore.json/yaml, minimal.json) and integration test helpers - Fix clippy lints: derivable_impls, len_zero, borrow-after-move, deprecated API - 192 tests passing (179 unit + 13 integration), all quality gates green
This commit is contained in:
@@ -10,6 +10,7 @@ use crate::core::cache::{CacheManager, compute_hash, validate_alias};
|
||||
use crate::core::config::{AuthType, Config, CredentialSource, cache_dir, config_path};
|
||||
use crate::core::http::AsyncHttpClient;
|
||||
use crate::core::indexer::{Format, build_index, detect_format, normalize_to_json};
|
||||
use crate::core::network::{NetworkPolicy, resolve_policy};
|
||||
use crate::errors::SwaggerCliError;
|
||||
use crate::output::robot;
|
||||
|
||||
@@ -209,6 +210,7 @@ async fn fetch_inner(
|
||||
args: &Args,
|
||||
cache_path: PathBuf,
|
||||
robot_mode: bool,
|
||||
network_policy: NetworkPolicy,
|
||||
) -> Result<(), SwaggerCliError> {
|
||||
let start = Instant::now();
|
||||
|
||||
@@ -256,7 +258,8 @@ async fn fetch_inner(
|
||||
.max_bytes(args.max_bytes)
|
||||
.max_retries(args.retries)
|
||||
.allow_insecure_http(args.allow_insecure_http)
|
||||
.allowed_private_hosts(args.allow_private_host.clone());
|
||||
.allowed_private_hosts(args.allow_private_host.clone())
|
||||
.network_policy(network_policy);
|
||||
|
||||
for (name, value) in &auth_headers {
|
||||
builder = builder.auth_header(name.clone(), value.clone());
|
||||
@@ -352,7 +355,8 @@ async fn fetch_inner(
|
||||
|
||||
pub async fn execute(args: &Args, robot_mode: bool) -> Result<(), SwaggerCliError> {
|
||||
let cache = cache_dir();
|
||||
fetch_inner(args, cache, robot_mode).await
|
||||
let policy = resolve_policy("auto")?;
|
||||
fetch_inner(args, cache, robot_mode, policy).await
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
@@ -646,7 +650,7 @@ mod tests {
|
||||
|
||||
let args = make_test_args(spec_path.to_str().unwrap(), "localtest");
|
||||
|
||||
let result = fetch_inner(&args, cache_path.clone(), false).await;
|
||||
let result = fetch_inner(&args, cache_path.clone(), false, NetworkPolicy::Auto).await;
|
||||
assert!(result.is_ok(), "execute failed: {result:?}");
|
||||
|
||||
let cm = CacheManager::new(cache_path);
|
||||
@@ -689,7 +693,7 @@ paths:
|
||||
|
||||
let args = make_test_args(spec_path.to_str().unwrap(), "yamltest");
|
||||
|
||||
let result = fetch_inner(&args, cache_path.clone(), false).await;
|
||||
let result = fetch_inner(&args, cache_path.clone(), false, NetworkPolicy::Auto).await;
|
||||
assert!(result.is_ok(), "execute failed: {result:?}");
|
||||
|
||||
let cm = CacheManager::new(cache_path);
|
||||
@@ -717,9 +721,13 @@ paths:
|
||||
|
||||
let args = make_test_args(spec_path.to_str().unwrap(), "dupetest");
|
||||
|
||||
assert!(fetch_inner(&args, cache_path.clone(), false).await.is_ok());
|
||||
assert!(
|
||||
fetch_inner(&args, cache_path.clone(), false, NetworkPolicy::Auto)
|
||||
.await
|
||||
.is_ok()
|
||||
);
|
||||
|
||||
let result = fetch_inner(&args, cache_path, false).await;
|
||||
let result = fetch_inner(&args, cache_path, false, NetworkPolicy::Auto).await;
|
||||
assert!(result.is_err());
|
||||
match result.unwrap_err() {
|
||||
SwaggerCliError::AliasExists(alias) => assert_eq!(alias, "dupetest"),
|
||||
@@ -744,7 +752,7 @@ paths:
|
||||
|
||||
let args_v1 = make_test_args(spec_path.to_str().unwrap(), "forcetest");
|
||||
assert!(
|
||||
fetch_inner(&args_v1, cache_path.clone(), false)
|
||||
fetch_inner(&args_v1, cache_path.clone(), false, NetworkPolicy::Auto)
|
||||
.await
|
||||
.is_ok()
|
||||
);
|
||||
@@ -759,7 +767,7 @@ paths:
|
||||
let mut args_v2 = make_test_args(spec_path.to_str().unwrap(), "forcetest");
|
||||
args_v2.force = true;
|
||||
assert!(
|
||||
fetch_inner(&args_v2, cache_path.clone(), false)
|
||||
fetch_inner(&args_v2, cache_path.clone(), false, NetworkPolicy::Auto)
|
||||
.await
|
||||
.is_ok()
|
||||
);
|
||||
@@ -787,7 +795,7 @@ paths:
|
||||
|
||||
let args = make_test_args(spec_path.to_str().unwrap(), "robottest");
|
||||
|
||||
let result = fetch_inner(&args, cache_path.clone(), true).await;
|
||||
let result = fetch_inner(&args, cache_path.clone(), true, NetworkPolicy::Auto).await;
|
||||
assert!(result.is_ok(), "robot mode execute failed: {result:?}");
|
||||
|
||||
let cm = CacheManager::new(cache_path);
|
||||
@@ -811,7 +819,7 @@ paths:
|
||||
|
||||
let args = make_test_args(spec_path.to_str().unwrap(), "../bad-alias");
|
||||
|
||||
let result = fetch_inner(&args, cache_path, false).await;
|
||||
let result = fetch_inner(&args, cache_path, false, NetworkPolicy::Auto).await;
|
||||
assert!(result.is_err());
|
||||
match result.unwrap_err() {
|
||||
SwaggerCliError::Usage(msg) => {
|
||||
@@ -839,7 +847,7 @@ paths:
|
||||
let url = format!("file://{}", spec_path.to_str().unwrap());
|
||||
let args = make_test_args(&url, "fileprefixtest");
|
||||
|
||||
let result = fetch_inner(&args, cache_path.clone(), false).await;
|
||||
let result = fetch_inner(&args, cache_path.clone(), false, NetworkPolicy::Auto).await;
|
||||
assert!(result.is_ok(), "file:// prefix failed: {result:?}");
|
||||
|
||||
let cm = CacheManager::new(cache_path);
|
||||
@@ -855,7 +863,7 @@ paths:
|
||||
|
||||
let args = make_test_args("file:///nonexistent/path/spec.json", "nofile");
|
||||
|
||||
let result = fetch_inner(&args, cache_path, false).await;
|
||||
let result = fetch_inner(&args, cache_path, false, NetworkPolicy::Auto).await;
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
matches!(result.unwrap_err(), SwaggerCliError::Io(_)),
|
||||
|
||||
@@ -1,6 +1,15 @@
|
||||
use clap::Args as ClapArgs;
|
||||
use std::path::PathBuf;
|
||||
use std::time::Instant;
|
||||
|
||||
use clap::Args as ClapArgs;
|
||||
use regex::Regex;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::core::cache::CacheManager;
|
||||
use crate::core::config::cache_dir;
|
||||
use crate::core::refs::{expand_refs, resolve_json_pointer};
|
||||
use crate::errors::SwaggerCliError;
|
||||
use crate::output::robot;
|
||||
|
||||
/// List or show schemas from a cached spec
|
||||
#[derive(Debug, ClapArgs)]
|
||||
@@ -8,11 +17,475 @@ pub struct Args {
|
||||
/// Alias of the cached spec
|
||||
pub alias: String,
|
||||
|
||||
/// Specific schema name to show
|
||||
/// Filter schema names by regex pattern
|
||||
#[arg(long)]
|
||||
pub name: Option<String>,
|
||||
|
||||
/// List schemas (default mode)
|
||||
#[arg(long, default_value_t = false)]
|
||||
pub list: bool,
|
||||
|
||||
/// Show a specific schema by exact name
|
||||
#[arg(long)]
|
||||
pub show: Option<String>,
|
||||
|
||||
/// Expand $ref entries inline (show mode only)
|
||||
#[arg(long)]
|
||||
pub expand_refs: bool,
|
||||
|
||||
/// Maximum depth for ref expansion
|
||||
#[arg(long, default_value = "3")]
|
||||
pub max_depth: u32,
|
||||
}
|
||||
|
||||
pub async fn execute(_args: &Args, _robot: bool) -> Result<(), SwaggerCliError> {
|
||||
Err(SwaggerCliError::Usage("schemas not yet implemented".into()))
|
||||
#[derive(Debug, Serialize)]
|
||||
struct SchemasListOutput {
|
||||
schemas: Vec<SchemaEntry>,
|
||||
total: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct SchemaEntry {
|
||||
name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct SchemaShowOutput {
|
||||
name: String,
|
||||
schema: serde_json::Value,
|
||||
}
|
||||
|
||||
fn build_list_output(
|
||||
index: &crate::core::spec::SpecIndex,
|
||||
name_filter: Option<&str>,
|
||||
) -> Result<SchemasListOutput, SwaggerCliError> {
|
||||
let pattern =
|
||||
match name_filter {
|
||||
Some(pat) => Some(Regex::new(pat).map_err(|e| {
|
||||
SwaggerCliError::Usage(format!("Invalid regex pattern '{pat}': {e}"))
|
||||
})?),
|
||||
None => None,
|
||||
};
|
||||
|
||||
let mut schemas: Vec<SchemaEntry> = index
|
||||
.schemas
|
||||
.iter()
|
||||
.filter(|s| match &pattern {
|
||||
Some(re) => re.is_match(&s.name),
|
||||
None => true,
|
||||
})
|
||||
.map(|s| SchemaEntry {
|
||||
name: s.name.clone(),
|
||||
})
|
||||
.collect();
|
||||
|
||||
schemas.sort_by(|a, b| a.name.cmp(&b.name));
|
||||
|
||||
let total = schemas.len();
|
||||
Ok(SchemasListOutput { schemas, total })
|
||||
}
|
||||
|
||||
pub async fn execute(args: &Args, robot: bool) -> Result<(), SwaggerCliError> {
|
||||
execute_inner(args, cache_dir(), robot)
|
||||
}
|
||||
|
||||
fn execute_inner(args: &Args, cache_path: PathBuf, robot: bool) -> Result<(), SwaggerCliError> {
|
||||
let start = Instant::now();
|
||||
|
||||
if let Some(schema_name) = &args.show {
|
||||
execute_show(args, schema_name, cache_path, robot, start)
|
||||
} else {
|
||||
execute_list(args, cache_path, robot, start)
|
||||
}
|
||||
}
|
||||
|
||||
fn execute_list(
|
||||
args: &Args,
|
||||
cache_path: PathBuf,
|
||||
robot_mode: bool,
|
||||
start: Instant,
|
||||
) -> Result<(), SwaggerCliError> {
|
||||
let cm = CacheManager::new(cache_path);
|
||||
let (index, meta) = cm.load_index(&args.alias)?;
|
||||
let output = build_list_output(&index, args.name.as_deref())?;
|
||||
|
||||
if robot_mode {
|
||||
robot::robot_success(&output, "schemas", start.elapsed());
|
||||
} else {
|
||||
println!(
|
||||
"{} {} -- {} schemas",
|
||||
meta.spec_title, meta.spec_version, output.total
|
||||
);
|
||||
if output.schemas.is_empty() {
|
||||
println!("No schemas found.");
|
||||
} else {
|
||||
for entry in &output.schemas {
|
||||
println!(" - {}", entry.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn execute_show(
|
||||
args: &Args,
|
||||
schema_name: &str,
|
||||
cache_path: PathBuf,
|
||||
robot_mode: bool,
|
||||
start: Instant,
|
||||
) -> Result<(), SwaggerCliError> {
|
||||
let cm = CacheManager::new(cache_path);
|
||||
let (index, meta) = cm.load_index(&args.alias)?;
|
||||
|
||||
let indexed_schema = index
|
||||
.schemas
|
||||
.iter()
|
||||
.find(|s| s.name == schema_name)
|
||||
.ok_or_else(|| {
|
||||
let available: Vec<&str> = index.schemas.iter().map(|s| s.name.as_str()).collect();
|
||||
SwaggerCliError::Usage(format!(
|
||||
"Schema '{}' not found in alias '{}'. Available schemas: {}",
|
||||
schema_name,
|
||||
args.alias,
|
||||
available.join(", ")
|
||||
))
|
||||
})?;
|
||||
|
||||
let raw = cm.load_raw(&args.alias, &meta)?;
|
||||
|
||||
let mut schema_value = resolve_json_pointer(&raw, &indexed_schema.schema_ptr)
|
||||
.ok_or_else(|| {
|
||||
SwaggerCliError::Cache(format!(
|
||||
"Failed to resolve schema pointer '{}' in raw spec for alias '{}'",
|
||||
indexed_schema.schema_ptr, args.alias
|
||||
))
|
||||
})?
|
||||
.clone();
|
||||
|
||||
if args.expand_refs {
|
||||
expand_refs(&mut schema_value, &raw, args.max_depth);
|
||||
}
|
||||
|
||||
let output = SchemaShowOutput {
|
||||
name: schema_name.to_string(),
|
||||
schema: schema_value,
|
||||
};
|
||||
|
||||
if robot_mode {
|
||||
robot::robot_success(&output, "schemas", start.elapsed());
|
||||
} else {
|
||||
println!("Schema: {}", output.name);
|
||||
if let Ok(pretty) = serde_json::to_string_pretty(&output.schema) {
|
||||
println!("{pretty}");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::core::cache::CacheManager;
|
||||
use crate::core::indexer::build_index;
|
||||
use crate::core::spec::{IndexInfo, IndexedSchema, SpecIndex};
|
||||
|
||||
/// Helper: create a cache from the petstore fixture and return (TempDir, PathBuf).
|
||||
/// Returns the tempdir (must be kept alive) and the cache path.
|
||||
fn setup_petstore_cache() -> (tempfile::TempDir, PathBuf) {
|
||||
let fixture = include_str!("../../tests/fixtures/petstore.json");
|
||||
let raw_json: serde_json::Value = serde_json::from_str(fixture).unwrap();
|
||||
let raw_bytes = serde_json::to_vec(&raw_json).unwrap();
|
||||
let content_hash = crate::core::cache::compute_hash(fixture.as_bytes());
|
||||
let index = build_index(&raw_json, &content_hash, 1).unwrap();
|
||||
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let cache_path = tmp.path().to_path_buf();
|
||||
let cm = CacheManager::new(cache_path.clone());
|
||||
cm.write_cache(
|
||||
"petstore",
|
||||
fixture.as_bytes(),
|
||||
&raw_bytes,
|
||||
&index,
|
||||
Some("https://example.com/petstore.json".into()),
|
||||
raw_json["info"]["version"].as_str().unwrap_or("1.0.0"),
|
||||
raw_json["info"]["title"].as_str().unwrap_or("Petstore"),
|
||||
"json",
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
(tmp, cache_path)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_schemas_list() {
|
||||
let fixture = include_str!("../../tests/fixtures/petstore.json");
|
||||
let raw_json: serde_json::Value = serde_json::from_str(fixture).unwrap();
|
||||
let content_hash = crate::core::cache::compute_hash(fixture.as_bytes());
|
||||
let index = build_index(&raw_json, &content_hash, 1).unwrap();
|
||||
|
||||
let output = build_list_output(&index, None).unwrap();
|
||||
|
||||
assert_eq!(output.total, 3);
|
||||
let names: Vec<&str> = output.schemas.iter().map(|s| s.name.as_str()).collect();
|
||||
assert_eq!(names, vec!["Error", "NewPet", "Pet"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_schemas_list_with_regex_filter() {
|
||||
let fixture = include_str!("../../tests/fixtures/petstore.json");
|
||||
let raw_json: serde_json::Value = serde_json::from_str(fixture).unwrap();
|
||||
let content_hash = crate::core::cache::compute_hash(fixture.as_bytes());
|
||||
let index = build_index(&raw_json, &content_hash, 1).unwrap();
|
||||
|
||||
let output = build_list_output(&index, Some(".*Pet.*")).unwrap();
|
||||
|
||||
assert_eq!(output.total, 2);
|
||||
let names: Vec<&str> = output.schemas.iter().map(|s| s.name.as_str()).collect();
|
||||
assert_eq!(names, vec!["NewPet", "Pet"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_schemas_list_invalid_regex() {
|
||||
let index = SpecIndex {
|
||||
index_version: 1,
|
||||
generation: 1,
|
||||
content_hash: "sha256:test".into(),
|
||||
openapi: "3.0.3".into(),
|
||||
info: IndexInfo {
|
||||
title: "Test".into(),
|
||||
version: "1.0.0".into(),
|
||||
},
|
||||
endpoints: vec![],
|
||||
schemas: vec![],
|
||||
tags: vec![],
|
||||
};
|
||||
|
||||
let result = build_list_output(&index, Some("[invalid"));
|
||||
assert!(result.is_err());
|
||||
let err = result.unwrap_err();
|
||||
assert!(
|
||||
matches!(err, SwaggerCliError::Usage(_)),
|
||||
"Expected Usage error, got: {err:?}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_schemas_list_no_match() {
|
||||
let fixture = include_str!("../../tests/fixtures/petstore.json");
|
||||
let raw_json: serde_json::Value = serde_json::from_str(fixture).unwrap();
|
||||
let content_hash = crate::core::cache::compute_hash(fixture.as_bytes());
|
||||
let index = build_index(&raw_json, &content_hash, 1).unwrap();
|
||||
|
||||
let output = build_list_output(&index, Some("^Nonexistent$")).unwrap();
|
||||
|
||||
assert_eq!(output.total, 0);
|
||||
assert!(output.schemas.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_schemas_list_empty_index() {
|
||||
let index = SpecIndex {
|
||||
index_version: 1,
|
||||
generation: 1,
|
||||
content_hash: "sha256:empty".into(),
|
||||
openapi: "3.0.3".into(),
|
||||
info: IndexInfo {
|
||||
title: "Empty".into(),
|
||||
version: "1.0.0".into(),
|
||||
},
|
||||
endpoints: vec![],
|
||||
schemas: vec![],
|
||||
tags: vec![],
|
||||
};
|
||||
|
||||
let output = build_list_output(&index, None).unwrap();
|
||||
assert_eq!(output.total, 0);
|
||||
assert!(output.schemas.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_schemas_list_sorted_alphabetically() {
|
||||
let index = SpecIndex {
|
||||
index_version: 1,
|
||||
generation: 1,
|
||||
content_hash: "sha256:test".into(),
|
||||
openapi: "3.0.3".into(),
|
||||
info: IndexInfo {
|
||||
title: "Test".into(),
|
||||
version: "1.0.0".into(),
|
||||
},
|
||||
endpoints: vec![],
|
||||
schemas: vec![
|
||||
IndexedSchema {
|
||||
name: "Zebra".into(),
|
||||
schema_ptr: "/components/schemas/Zebra".into(),
|
||||
},
|
||||
IndexedSchema {
|
||||
name: "Apple".into(),
|
||||
schema_ptr: "/components/schemas/Apple".into(),
|
||||
},
|
||||
IndexedSchema {
|
||||
name: "Mango".into(),
|
||||
schema_ptr: "/components/schemas/Mango".into(),
|
||||
},
|
||||
],
|
||||
tags: vec![],
|
||||
};
|
||||
|
||||
let output = build_list_output(&index, None).unwrap();
|
||||
let names: Vec<&str> = output.schemas.iter().map(|s| s.name.as_str()).collect();
|
||||
assert_eq!(names, vec!["Apple", "Mango", "Zebra"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_schemas_show_robot() {
|
||||
let (_tmp, cache_path) = setup_petstore_cache();
|
||||
|
||||
let args = Args {
|
||||
alias: "petstore".into(),
|
||||
name: None,
|
||||
list: false,
|
||||
show: Some("Pet".into()),
|
||||
expand_refs: false,
|
||||
max_depth: 3,
|
||||
};
|
||||
|
||||
let result = execute_inner(&args, cache_path, true);
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_schemas_show_not_found() {
|
||||
let (_tmp, cache_path) = setup_petstore_cache();
|
||||
|
||||
let args = Args {
|
||||
alias: "petstore".into(),
|
||||
name: None,
|
||||
list: false,
|
||||
show: Some("NonexistentSchema".into()),
|
||||
expand_refs: false,
|
||||
max_depth: 3,
|
||||
};
|
||||
|
||||
let result = execute_inner(&args, cache_path, false);
|
||||
assert!(result.is_err());
|
||||
let err = result.unwrap_err();
|
||||
assert!(
|
||||
matches!(err, SwaggerCliError::Usage(_)),
|
||||
"Expected Usage error for missing schema, got: {err:?}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_schemas_show_expand_refs() {
|
||||
let (_tmp, cache_path) = setup_petstore_cache();
|
||||
|
||||
let args = Args {
|
||||
alias: "petstore".into(),
|
||||
name: None,
|
||||
list: false,
|
||||
show: Some("Pet".into()),
|
||||
expand_refs: true,
|
||||
max_depth: 3,
|
||||
};
|
||||
|
||||
let result = execute_inner(&args, cache_path, true);
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_schemas_list_via_execute_inner() {
|
||||
let (_tmp, cache_path) = setup_petstore_cache();
|
||||
|
||||
let args = Args {
|
||||
alias: "petstore".into(),
|
||||
name: None,
|
||||
list: false,
|
||||
show: None,
|
||||
expand_refs: false,
|
||||
max_depth: 3,
|
||||
};
|
||||
|
||||
let result = execute_inner(&args, cache_path, true);
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_schemas_list_with_name_filter_via_execute_inner() {
|
||||
let (_tmp, cache_path) = setup_petstore_cache();
|
||||
|
||||
let args = Args {
|
||||
alias: "petstore".into(),
|
||||
name: Some(".*Pet.*".into()),
|
||||
list: false,
|
||||
show: None,
|
||||
expand_refs: false,
|
||||
max_depth: 3,
|
||||
};
|
||||
|
||||
let result = execute_inner(&args, cache_path, true);
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_schemas_invalid_regex_via_execute_inner() {
|
||||
let (_tmp, cache_path) = setup_petstore_cache();
|
||||
|
||||
let args = Args {
|
||||
alias: "petstore".into(),
|
||||
name: Some("[invalid".into()),
|
||||
list: false,
|
||||
show: None,
|
||||
expand_refs: false,
|
||||
max_depth: 3,
|
||||
};
|
||||
|
||||
let result = execute_inner(&args, cache_path, false);
|
||||
assert!(result.is_err());
|
||||
let err = result.unwrap_err();
|
||||
assert!(
|
||||
matches!(err, SwaggerCliError::Usage(_)),
|
||||
"Expected Usage error for invalid regex, got: {err:?}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_schemas_show_output_serialization() {
|
||||
let output = SchemaShowOutput {
|
||||
name: "Pet".into(),
|
||||
schema: serde_json::json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": { "type": "string" }
|
||||
}
|
||||
}),
|
||||
};
|
||||
|
||||
let serialized = serde_json::to_value(&output).unwrap();
|
||||
assert_eq!(serialized["name"], "Pet");
|
||||
assert_eq!(serialized["schema"]["type"], "object");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_schemas_list_output_serialization() {
|
||||
let output = SchemasListOutput {
|
||||
schemas: vec![
|
||||
SchemaEntry {
|
||||
name: "Error".into(),
|
||||
},
|
||||
SchemaEntry { name: "Pet".into() },
|
||||
],
|
||||
total: 2,
|
||||
};
|
||||
|
||||
let serialized = serde_json::to_value(&output).unwrap();
|
||||
assert_eq!(serialized["total"], 2);
|
||||
assert_eq!(serialized["schemas"][0]["name"], "Error");
|
||||
assert_eq!(serialized["schemas"][1]["name"], "Pet");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,22 @@
|
||||
use clap::Args as ClapArgs;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::BTreeSet;
|
||||
use std::path::PathBuf;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
use clap::Args as ClapArgs;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::core::cache::{CacheManager, CacheMetadata, compute_hash, validate_alias};
|
||||
use crate::core::config::{AuthType, Config, CredentialSource, config_path};
|
||||
use crate::core::http::{AsyncHttpClient, ConditionalFetchResult};
|
||||
use crate::core::indexer::{Format, build_index, detect_format, normalize_to_json};
|
||||
use crate::core::spec::SpecIndex;
|
||||
use crate::errors::SwaggerCliError;
|
||||
use crate::output::robot;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// CLI arguments
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Re-fetch and update a cached spec
|
||||
#[derive(Debug, ClapArgs)]
|
||||
@@ -8,11 +24,766 @@ pub struct Args {
|
||||
/// Alias to sync
|
||||
pub alias: String,
|
||||
|
||||
/// Sync all cached specs
|
||||
#[arg(long)]
|
||||
pub all: bool,
|
||||
|
||||
/// Check for changes without writing
|
||||
#[arg(long)]
|
||||
pub dry_run: bool,
|
||||
|
||||
/// Re-fetch regardless of cache freshness
|
||||
#[arg(long)]
|
||||
pub force: bool,
|
||||
|
||||
/// Include detailed change lists in output
|
||||
#[arg(long)]
|
||||
pub details: bool,
|
||||
|
||||
/// Auth profile name from config
|
||||
#[arg(long)]
|
||||
pub auth: Option<String>,
|
||||
}
|
||||
|
||||
pub async fn execute(_args: &Args, _robot: bool) -> Result<(), SwaggerCliError> {
|
||||
Err(SwaggerCliError::Usage("sync not yet implemented".into()))
|
||||
// ---------------------------------------------------------------------------
|
||||
// Diff types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const MAX_DETAIL_ITEMS: usize = 200;
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
struct EndpointKey {
|
||||
path: String,
|
||||
method: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
struct SchemaDiff {
|
||||
added: Vec<String>,
|
||||
removed: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
struct EndpointDiff {
|
||||
added: Vec<EndpointKey>,
|
||||
removed: Vec<EndpointKey>,
|
||||
modified: Vec<EndpointKey>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
struct ChangeSummary {
|
||||
endpoints_added: usize,
|
||||
endpoints_removed: usize,
|
||||
endpoints_modified: usize,
|
||||
schemas_added: usize,
|
||||
schemas_removed: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
struct ChangeDetails {
|
||||
endpoints: EndpointDiff,
|
||||
schemas: SchemaDiff,
|
||||
truncated: bool,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Robot output
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct SyncOutput {
|
||||
alias: String,
|
||||
changed: bool,
|
||||
reason: String,
|
||||
local_version: String,
|
||||
remote_version: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
changes: Option<ChangeSummary>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
details: Option<ChangeDetails>,
|
||||
dry_run: bool,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Index diffing
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Build a comparable key for an endpoint: (path, method).
|
||||
fn endpoint_key(ep: &crate::core::spec::IndexedEndpoint) -> (String, String) {
|
||||
(ep.path.clone(), ep.method.clone())
|
||||
}
|
||||
|
||||
/// Build a fingerprint of an endpoint for modification detection.
|
||||
/// Includes summary, parameters, deprecated status, and request body info.
|
||||
fn endpoint_fingerprint(ep: &crate::core::spec::IndexedEndpoint) -> String {
|
||||
let params: Vec<String> = ep
|
||||
.parameters
|
||||
.iter()
|
||||
.map(|p| format!("{}:{}:{}", p.name, p.location, p.required))
|
||||
.collect();
|
||||
|
||||
format!(
|
||||
"{}|{}|{}|{}|{}",
|
||||
ep.summary.as_deref().unwrap_or(""),
|
||||
ep.deprecated,
|
||||
params.join(","),
|
||||
ep.request_body_required,
|
||||
ep.request_body_content_types.join(","),
|
||||
)
|
||||
}
|
||||
|
||||
fn compute_diff(old: &SpecIndex, new: &SpecIndex) -> (ChangeSummary, ChangeDetails) {
|
||||
// Endpoint diff
|
||||
let old_keys: BTreeSet<(String, String)> = old.endpoints.iter().map(endpoint_key).collect();
|
||||
let new_keys: BTreeSet<(String, String)> = new.endpoints.iter().map(endpoint_key).collect();
|
||||
|
||||
let old_fingerprints: BTreeMap<(String, String), String> = old
|
||||
.endpoints
|
||||
.iter()
|
||||
.map(|ep| (endpoint_key(ep), endpoint_fingerprint(ep)))
|
||||
.collect();
|
||||
let new_fingerprints: BTreeMap<(String, String), String> = new
|
||||
.endpoints
|
||||
.iter()
|
||||
.map(|ep| (endpoint_key(ep), endpoint_fingerprint(ep)))
|
||||
.collect();
|
||||
|
||||
let added_keys: Vec<(String, String)> = new_keys.difference(&old_keys).cloned().collect();
|
||||
let removed_keys: Vec<(String, String)> = old_keys.difference(&new_keys).cloned().collect();
|
||||
let common_keys: BTreeSet<&(String, String)> = old_keys.intersection(&new_keys).collect();
|
||||
|
||||
let modified_keys: Vec<(String, String)> = common_keys
|
||||
.into_iter()
|
||||
.filter(|k| old_fingerprints.get(*k) != new_fingerprints.get(*k))
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
// Schema diff
|
||||
let old_schemas: BTreeSet<String> = old.schemas.iter().map(|s| s.name.clone()).collect();
|
||||
let new_schemas: BTreeSet<String> = new.schemas.iter().map(|s| s.name.clone()).collect();
|
||||
|
||||
let schemas_added: Vec<String> = new_schemas.difference(&old_schemas).cloned().collect();
|
||||
let schemas_removed: Vec<String> = old_schemas.difference(&new_schemas).cloned().collect();
|
||||
|
||||
let total_items = added_keys.len()
|
||||
+ removed_keys.len()
|
||||
+ modified_keys.len()
|
||||
+ schemas_added.len()
|
||||
+ schemas_removed.len();
|
||||
let truncated = total_items > MAX_DETAIL_ITEMS;
|
||||
|
||||
let summary = ChangeSummary {
|
||||
endpoints_added: added_keys.len(),
|
||||
endpoints_removed: removed_keys.len(),
|
||||
endpoints_modified: modified_keys.len(),
|
||||
schemas_added: schemas_added.len(),
|
||||
schemas_removed: schemas_removed.len(),
|
||||
};
|
||||
|
||||
let to_endpoint_keys = |keys: Vec<(String, String)>, limit: usize| -> Vec<EndpointKey> {
|
||||
keys.into_iter()
|
||||
.take(limit)
|
||||
.map(|(path, method)| EndpointKey { path, method })
|
||||
.collect()
|
||||
};
|
||||
|
||||
let details = ChangeDetails {
|
||||
endpoints: EndpointDiff {
|
||||
added: to_endpoint_keys(added_keys, MAX_DETAIL_ITEMS),
|
||||
removed: to_endpoint_keys(removed_keys, MAX_DETAIL_ITEMS),
|
||||
modified: to_endpoint_keys(modified_keys, MAX_DETAIL_ITEMS),
|
||||
},
|
||||
schemas: SchemaDiff {
|
||||
added: schemas_added.into_iter().take(MAX_DETAIL_ITEMS).collect(),
|
||||
removed: schemas_removed.into_iter().take(MAX_DETAIL_ITEMS).collect(),
|
||||
},
|
||||
truncated,
|
||||
};
|
||||
|
||||
(summary, details)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Auth credential resolution
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn resolve_credential(source: &CredentialSource) -> Result<String, SwaggerCliError> {
|
||||
match source {
|
||||
CredentialSource::Literal { value } => Ok(value.clone()),
|
||||
CredentialSource::EnvVar { name } => std::env::var(name).map_err(|_| {
|
||||
SwaggerCliError::Auth(format!(
|
||||
"environment variable '{name}' not set (required by auth profile)"
|
||||
))
|
||||
}),
|
||||
CredentialSource::Keyring { service, account } => Err(SwaggerCliError::Auth(format!(
|
||||
"keyring credential lookup not yet implemented (service={service}, account={account})"
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Core sync logic (testable with explicit cache path)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
async fn sync_inner(
|
||||
args: &Args,
|
||||
cache_path: PathBuf,
|
||||
robot_mode: bool,
|
||||
) -> Result<(), SwaggerCliError> {
|
||||
let start = Instant::now();
|
||||
|
||||
let cm = CacheManager::new(cache_path);
|
||||
validate_alias(&args.alias)?;
|
||||
|
||||
// 1. Load existing metadata and index
|
||||
let (old_index, meta) = cm.load_index(&args.alias)?;
|
||||
|
||||
let url = meta.url.clone().ok_or_else(|| {
|
||||
SwaggerCliError::Usage(format!(
|
||||
"alias '{}' has no URL (fetched from stdin/file). Cannot sync.",
|
||||
args.alias
|
||||
))
|
||||
})?;
|
||||
|
||||
// 2. Build HTTP client
|
||||
let cfg = Config::load(&config_path(None))?;
|
||||
let mut builder = AsyncHttpClient::builder().allow_insecure_http(url.starts_with("http://"));
|
||||
|
||||
if let Some(profile_name) = &args.auth {
|
||||
let profile = cfg.auth_profiles.get(profile_name).ok_or_else(|| {
|
||||
SwaggerCliError::Auth(format!("auth profile '{profile_name}' not found in config"))
|
||||
})?;
|
||||
let credential = resolve_credential(&profile.credential)?;
|
||||
match &profile.auth_type {
|
||||
AuthType::Bearer => {
|
||||
builder = builder
|
||||
.auth_header("Authorization".to_string(), format!("Bearer {credential}"));
|
||||
}
|
||||
AuthType::ApiKey { header } => {
|
||||
builder = builder.auth_header(header.clone(), credential);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let client = builder.build();
|
||||
|
||||
// 3. Conditional fetch
|
||||
let (etag, last_modified) = if args.force {
|
||||
(None, None)
|
||||
} else {
|
||||
(meta.etag.as_deref(), meta.last_modified.as_deref())
|
||||
};
|
||||
|
||||
let fetch_result = client.fetch_conditional(&url, etag, last_modified).await?;
|
||||
|
||||
match fetch_result {
|
||||
ConditionalFetchResult::NotModified => {
|
||||
output_no_changes(args, &meta, "304 Not Modified", robot_mode, start.elapsed());
|
||||
return Ok(());
|
||||
}
|
||||
ConditionalFetchResult::Modified(result) => {
|
||||
// 4. Check content hash
|
||||
let new_content_hash = compute_hash(&result.bytes);
|
||||
|
||||
if new_content_hash == meta.content_hash && !args.force {
|
||||
output_no_changes(
|
||||
args,
|
||||
&meta,
|
||||
"content hash unchanged",
|
||||
robot_mode,
|
||||
start.elapsed(),
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// 5. Normalize and build index
|
||||
let format = detect_format(&result.bytes, Some(&url), result.content_type.as_deref());
|
||||
let format_str = match format {
|
||||
Format::Json => "json",
|
||||
Format::Yaml => "yaml",
|
||||
};
|
||||
|
||||
let json_bytes = normalize_to_json(&result.bytes, format)?;
|
||||
let value: serde_json::Value = serde_json::from_slice(&json_bytes)?;
|
||||
let new_index = build_index(&value, &new_content_hash, meta.generation + 1)?;
|
||||
|
||||
// 6. Compute diff
|
||||
let (summary, details) = compute_diff(&old_index, &new_index);
|
||||
|
||||
let has_changes = summary.endpoints_added > 0
|
||||
|| summary.endpoints_removed > 0
|
||||
|| summary.endpoints_modified > 0
|
||||
|| summary.schemas_added > 0
|
||||
|| summary.schemas_removed > 0;
|
||||
|
||||
// Even if diff is empty, content hash changed so we still update
|
||||
let changed = new_content_hash != meta.content_hash || has_changes;
|
||||
|
||||
// 7. Write cache (unless dry-run)
|
||||
if !args.dry_run && changed {
|
||||
cm.write_cache(
|
||||
&args.alias,
|
||||
&result.bytes,
|
||||
&json_bytes,
|
||||
&new_index,
|
||||
Some(url.clone()),
|
||||
&new_index.info.version,
|
||||
&new_index.info.title,
|
||||
format_str,
|
||||
result.etag.clone(),
|
||||
result.last_modified.clone(),
|
||||
Some(meta.generation),
|
||||
)?;
|
||||
}
|
||||
|
||||
// 8. Output
|
||||
output_changes(
|
||||
args,
|
||||
&meta,
|
||||
&new_index,
|
||||
changed,
|
||||
&summary,
|
||||
&details,
|
||||
robot_mode,
|
||||
start.elapsed(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn output_no_changes(
|
||||
args: &Args,
|
||||
meta: &CacheMetadata,
|
||||
reason: &str,
|
||||
robot_mode: bool,
|
||||
duration: Duration,
|
||||
) {
|
||||
if robot_mode {
|
||||
let output = SyncOutput {
|
||||
alias: args.alias.clone(),
|
||||
changed: false,
|
||||
reason: reason.to_string(),
|
||||
local_version: meta.spec_version.clone(),
|
||||
remote_version: None,
|
||||
changes: None,
|
||||
details: None,
|
||||
dry_run: args.dry_run,
|
||||
};
|
||||
robot::robot_success(output, "sync", duration);
|
||||
} else {
|
||||
println!("'{}' is up to date ({})", args.alias, reason);
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn output_changes(
|
||||
args: &Args,
|
||||
old_meta: &CacheMetadata,
|
||||
new_index: &SpecIndex,
|
||||
changed: bool,
|
||||
summary: &ChangeSummary,
|
||||
details: &ChangeDetails,
|
||||
robot_mode: bool,
|
||||
duration: Duration,
|
||||
) {
|
||||
if robot_mode {
|
||||
let output = SyncOutput {
|
||||
alias: args.alias.clone(),
|
||||
changed,
|
||||
reason: if changed {
|
||||
"content changed".to_string()
|
||||
} else {
|
||||
"no changes detected".to_string()
|
||||
},
|
||||
local_version: old_meta.spec_version.clone(),
|
||||
remote_version: Some(new_index.info.version.clone()),
|
||||
changes: Some(summary.clone()),
|
||||
details: if args.details {
|
||||
Some(details.clone())
|
||||
} else {
|
||||
None
|
||||
},
|
||||
dry_run: args.dry_run,
|
||||
};
|
||||
robot::robot_success(output, "sync", duration);
|
||||
} else if changed {
|
||||
let prefix = if args.dry_run { "[dry-run] " } else { "" };
|
||||
println!(
|
||||
"{prefix}'{}' has changes (v{} -> v{})",
|
||||
args.alias, old_meta.spec_version, new_index.info.version
|
||||
);
|
||||
println!(
|
||||
" Endpoints: +{} -{} ~{}",
|
||||
summary.endpoints_added, summary.endpoints_removed, summary.endpoints_modified
|
||||
);
|
||||
println!(
|
||||
" Schemas: +{} -{}",
|
||||
summary.schemas_added, summary.schemas_removed
|
||||
);
|
||||
if args.dry_run {
|
||||
println!(" (dry run -- no changes written)");
|
||||
}
|
||||
} else {
|
||||
println!("'{}' is up to date (content unchanged)", args.alias);
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Public entry point
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
pub async fn execute(args: &Args, robot: bool) -> Result<(), SwaggerCliError> {
|
||||
if args.all {
|
||||
return Err(SwaggerCliError::Usage(
|
||||
"sync --all is not yet implemented".into(),
|
||||
));
|
||||
}
|
||||
|
||||
let cache = crate::core::config::cache_dir();
|
||||
sync_inner(args, cache, robot).await
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::core::cache::CacheManager;
|
||||
use crate::core::indexer::build_index;
|
||||
use crate::core::spec::{
|
||||
IndexInfo, IndexedEndpoint, IndexedParam, IndexedSchema, IndexedTag, SpecIndex,
|
||||
};
|
||||
|
||||
fn make_test_index(endpoints: Vec<IndexedEndpoint>, schemas: Vec<IndexedSchema>) -> SpecIndex {
|
||||
let tags: Vec<IndexedTag> = vec![];
|
||||
SpecIndex {
|
||||
index_version: 1,
|
||||
generation: 1,
|
||||
content_hash: "sha256:test".into(),
|
||||
openapi: "3.0.3".into(),
|
||||
info: IndexInfo {
|
||||
title: "Test".into(),
|
||||
version: "1.0.0".into(),
|
||||
},
|
||||
endpoints,
|
||||
schemas,
|
||||
tags,
|
||||
}
|
||||
}
|
||||
|
||||
fn make_endpoint(path: &str, method: &str, summary: Option<&str>) -> IndexedEndpoint {
|
||||
IndexedEndpoint {
|
||||
path: path.into(),
|
||||
method: method.into(),
|
||||
summary: summary.map(String::from),
|
||||
description: None,
|
||||
operation_id: None,
|
||||
tags: vec![],
|
||||
deprecated: false,
|
||||
parameters: vec![],
|
||||
request_body_required: false,
|
||||
request_body_content_types: vec![],
|
||||
security_schemes: vec![],
|
||||
security_required: false,
|
||||
operation_ptr: format!(
|
||||
"/paths/{}/{}",
|
||||
path.replace('/', "~1"),
|
||||
method.to_lowercase()
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
fn make_schema(name: &str) -> IndexedSchema {
|
||||
IndexedSchema {
|
||||
name: name.into(),
|
||||
schema_ptr: format!("/components/schemas/{name}"),
|
||||
}
|
||||
}
|
||||
|
||||
// -- Diff computation tests ------------------------------------------------
|
||||
|
||||
#[test]
|
||||
fn test_diff_no_changes() {
|
||||
let endpoints = vec![make_endpoint("/pets", "GET", Some("List pets"))];
|
||||
let schemas = vec![make_schema("Pet")];
|
||||
let old = make_test_index(endpoints.clone(), schemas.clone());
|
||||
let new = make_test_index(endpoints, schemas);
|
||||
|
||||
let (summary, details) = compute_diff(&old, &new);
|
||||
assert_eq!(summary.endpoints_added, 0);
|
||||
assert_eq!(summary.endpoints_removed, 0);
|
||||
assert_eq!(summary.endpoints_modified, 0);
|
||||
assert_eq!(summary.schemas_added, 0);
|
||||
assert_eq!(summary.schemas_removed, 0);
|
||||
assert!(!details.truncated);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_diff_added_endpoint() {
|
||||
let old = make_test_index(
|
||||
vec![make_endpoint("/pets", "GET", Some("List pets"))],
|
||||
vec![],
|
||||
);
|
||||
let new = make_test_index(
|
||||
vec![
|
||||
make_endpoint("/pets", "GET", Some("List pets")),
|
||||
make_endpoint("/pets", "POST", Some("Create pet")),
|
||||
],
|
||||
vec![],
|
||||
);
|
||||
|
||||
let (summary, details) = compute_diff(&old, &new);
|
||||
assert_eq!(summary.endpoints_added, 1);
|
||||
assert_eq!(summary.endpoints_removed, 0);
|
||||
assert_eq!(details.endpoints.added.len(), 1);
|
||||
assert_eq!(details.endpoints.added[0].path, "/pets");
|
||||
assert_eq!(details.endpoints.added[0].method, "POST");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_diff_removed_endpoint() {
|
||||
let old = make_test_index(
|
||||
vec![
|
||||
make_endpoint("/pets", "GET", Some("List pets")),
|
||||
make_endpoint("/pets", "POST", Some("Create pet")),
|
||||
],
|
||||
vec![],
|
||||
);
|
||||
let new = make_test_index(
|
||||
vec![make_endpoint("/pets", "GET", Some("List pets"))],
|
||||
vec![],
|
||||
);
|
||||
|
||||
let (summary, details) = compute_diff(&old, &new);
|
||||
assert_eq!(summary.endpoints_removed, 1);
|
||||
assert_eq!(details.endpoints.removed.len(), 1);
|
||||
assert_eq!(details.endpoints.removed[0].method, "POST");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_diff_modified_endpoint() {
|
||||
let old = make_test_index(
|
||||
vec![make_endpoint("/pets", "GET", Some("List pets"))],
|
||||
vec![],
|
||||
);
|
||||
let new = make_test_index(
|
||||
vec![make_endpoint("/pets", "GET", Some("List all pets"))],
|
||||
vec![],
|
||||
);
|
||||
|
||||
let (summary, _details) = compute_diff(&old, &new);
|
||||
assert_eq!(summary.endpoints_modified, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_diff_added_schema() {
|
||||
let old = make_test_index(vec![], vec![make_schema("Pet")]);
|
||||
let new = make_test_index(vec![], vec![make_schema("Pet"), make_schema("Error")]);
|
||||
|
||||
let (summary, details) = compute_diff(&old, &new);
|
||||
assert_eq!(summary.schemas_added, 1);
|
||||
assert_eq!(details.schemas.added, vec!["Error"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_diff_removed_schema() {
|
||||
let old = make_test_index(vec![], vec![make_schema("Pet"), make_schema("Error")]);
|
||||
let new = make_test_index(vec![], vec![make_schema("Pet")]);
|
||||
|
||||
let (summary, details) = compute_diff(&old, &new);
|
||||
assert_eq!(summary.schemas_removed, 1);
|
||||
assert_eq!(details.schemas.removed, vec!["Error"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_diff_endpoint_modified_by_params() {
|
||||
let mut ep_old = make_endpoint("/pets", "GET", Some("List pets"));
|
||||
ep_old.parameters = vec![IndexedParam {
|
||||
name: "limit".into(),
|
||||
location: "query".into(),
|
||||
required: false,
|
||||
description: None,
|
||||
}];
|
||||
|
||||
let mut ep_new = make_endpoint("/pets", "GET", Some("List pets"));
|
||||
ep_new.parameters = vec![
|
||||
IndexedParam {
|
||||
name: "limit".into(),
|
||||
location: "query".into(),
|
||||
required: false,
|
||||
description: None,
|
||||
},
|
||||
IndexedParam {
|
||||
name: "offset".into(),
|
||||
location: "query".into(),
|
||||
required: false,
|
||||
description: None,
|
||||
},
|
||||
];
|
||||
|
||||
let old = make_test_index(vec![ep_old], vec![]);
|
||||
let new = make_test_index(vec![ep_new], vec![]);
|
||||
|
||||
let (summary, _) = compute_diff(&old, &new);
|
||||
assert_eq!(summary.endpoints_modified, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_diff_truncation() {
|
||||
// Create enough items to exceed MAX_DETAIL_ITEMS
|
||||
let mut new_endpoints: Vec<IndexedEndpoint> = Vec::new();
|
||||
for i in 0..250 {
|
||||
new_endpoints.push(make_endpoint(&format!("/item{i}"), "GET", None));
|
||||
}
|
||||
|
||||
let old = make_test_index(vec![], vec![]);
|
||||
let new = make_test_index(new_endpoints, vec![]);
|
||||
|
||||
let (summary, details) = compute_diff(&old, &new);
|
||||
assert_eq!(summary.endpoints_added, 250);
|
||||
assert!(details.truncated);
|
||||
assert_eq!(details.endpoints.added.len(), MAX_DETAIL_ITEMS);
|
||||
}
|
||||
|
||||
// -- Hash-based change detection (unit) ------------------------------------
|
||||
|
||||
#[test]
|
||||
fn test_sync_no_changes_same_hash() {
|
||||
// Simulate: same content hash -> no changes
|
||||
let raw_bytes =
|
||||
br#"{"openapi":"3.0.3","info":{"title":"Test","version":"1.0.0"},"paths":{}}"#;
|
||||
let content_hash = compute_hash(raw_bytes);
|
||||
assert_eq!(content_hash, compute_hash(raw_bytes));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sync_changes_different_hash() {
|
||||
let raw_v1 = br#"{"openapi":"3.0.3","info":{"title":"Test","version":"1.0.0"},"paths":{}}"#;
|
||||
let raw_v2 = br#"{"openapi":"3.0.3","info":{"title":"Test","version":"2.0.0"},"paths":{}}"#;
|
||||
let hash_v1 = compute_hash(raw_v1);
|
||||
let hash_v2 = compute_hash(raw_v2);
|
||||
assert_ne!(hash_v1, hash_v2);
|
||||
}
|
||||
|
||||
// -- Integration: sync with local cache ------------------------------------
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn write_test_cache(
|
||||
cache_path: &std::path::Path,
|
||||
alias: &str,
|
||||
spec_json: &serde_json::Value,
|
||||
url: Option<String>,
|
||||
) -> CacheMetadata {
|
||||
let cm = CacheManager::new(cache_path.to_path_buf());
|
||||
let raw_bytes = serde_json::to_vec(spec_json).unwrap();
|
||||
let content_hash = compute_hash(&raw_bytes);
|
||||
let json_bytes = raw_bytes.clone();
|
||||
let index = build_index(spec_json, &content_hash, 1).unwrap();
|
||||
|
||||
cm.write_cache(
|
||||
alias,
|
||||
&raw_bytes,
|
||||
&json_bytes,
|
||||
&index,
|
||||
url,
|
||||
&index.info.version,
|
||||
&index.info.title,
|
||||
"json",
|
||||
Some("\"etag-v1\"".to_string()),
|
||||
Some("Wed, 21 Oct 2025 07:28:00 GMT".to_string()),
|
||||
None,
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sync_diff_detects_new_endpoint_in_index() {
|
||||
let spec_v1 = serde_json::json!({
|
||||
"openapi": "3.0.3",
|
||||
"info": { "title": "Test", "version": "1.0.0" },
|
||||
"paths": {
|
||||
"/pets": {
|
||||
"get": {
|
||||
"summary": "List pets",
|
||||
"responses": { "200": { "description": "OK" } }
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let spec_v2 = serde_json::json!({
|
||||
"openapi": "3.0.3",
|
||||
"info": { "title": "Test", "version": "2.0.0" },
|
||||
"paths": {
|
||||
"/pets": {
|
||||
"get": {
|
||||
"summary": "List pets",
|
||||
"responses": { "200": { "description": "OK" } }
|
||||
},
|
||||
"post": {
|
||||
"summary": "Create pet",
|
||||
"responses": { "201": { "description": "Created" } }
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let raw_v1 = serde_json::to_vec(&spec_v1).unwrap();
|
||||
let raw_v2 = serde_json::to_vec(&spec_v2).unwrap();
|
||||
let hash_v1 = compute_hash(&raw_v1);
|
||||
let hash_v2 = compute_hash(&raw_v2);
|
||||
|
||||
let index_v1 = build_index(&spec_v1, &hash_v1, 1).unwrap();
|
||||
let index_v2 = build_index(&spec_v2, &hash_v2, 2).unwrap();
|
||||
|
||||
let (summary, details) = compute_diff(&index_v1, &index_v2);
|
||||
assert_eq!(summary.endpoints_added, 1);
|
||||
assert_eq!(summary.endpoints_removed, 0);
|
||||
assert_eq!(summary.endpoints_modified, 0);
|
||||
assert_eq!(details.endpoints.added.len(), 1);
|
||||
assert_eq!(details.endpoints.added[0].path, "/pets");
|
||||
assert_eq!(details.endpoints.added[0].method, "POST");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_compute_diff_complex_scenario() {
|
||||
// Old: GET /pets, GET /pets/{id}, DELETE /pets/{id}, schemas: Pet, Error
|
||||
// New: GET /pets (modified summary), POST /pets, GET /pets/{id}, schemas: Pet, Owner
|
||||
let old = make_test_index(
|
||||
vec![
|
||||
make_endpoint("/pets", "GET", Some("List pets")),
|
||||
make_endpoint("/pets/{id}", "GET", Some("Get pet")),
|
||||
make_endpoint("/pets/{id}", "DELETE", Some("Delete pet")),
|
||||
],
|
||||
vec![make_schema("Pet"), make_schema("Error")],
|
||||
);
|
||||
|
||||
let new = make_test_index(
|
||||
vec![
|
||||
make_endpoint("/pets", "GET", Some("List all pets")), // modified summary
|
||||
make_endpoint("/pets", "POST", Some("Create pet")), // added
|
||||
make_endpoint("/pets/{id}", "GET", Some("Get pet")), // unchanged
|
||||
],
|
||||
vec![make_schema("Pet"), make_schema("Owner")], // Error removed, Owner added
|
||||
);
|
||||
|
||||
let (summary, details) = compute_diff(&old, &new);
|
||||
|
||||
assert_eq!(summary.endpoints_added, 1); // POST /pets
|
||||
assert_eq!(summary.endpoints_removed, 1); // DELETE /pets/{id}
|
||||
assert_eq!(summary.endpoints_modified, 1); // GET /pets summary changed
|
||||
assert_eq!(summary.schemas_added, 1); // Owner
|
||||
assert_eq!(summary.schemas_removed, 1); // Error
|
||||
|
||||
assert_eq!(details.endpoints.added[0].method, "POST");
|
||||
assert_eq!(details.endpoints.removed[0].method, "DELETE");
|
||||
assert_eq!(details.endpoints.modified[0].path, "/pets");
|
||||
assert_eq!(details.schemas.added, vec!["Owner"]);
|
||||
assert_eq!(details.schemas.removed, vec!["Error"]);
|
||||
assert!(!details.truncated);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ use std::time::Duration;
|
||||
use reqwest::{StatusCode, Url};
|
||||
use tokio::net::lookup_host;
|
||||
|
||||
use crate::core::network::{NetworkPolicy, check_remote_fetch};
|
||||
use crate::errors::SwaggerCliError;
|
||||
|
||||
const DEFAULT_CONNECT_TIMEOUT: Duration = Duration::from_secs(5);
|
||||
@@ -141,6 +142,15 @@ pub struct FetchResult {
|
||||
pub last_modified: Option<String>,
|
||||
}
|
||||
|
||||
/// Result of a conditional fetch (If-None-Match / If-Modified-Since).
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ConditionalFetchResult {
|
||||
/// Server returned 304 Not Modified -- cached content is still current.
|
||||
NotModified,
|
||||
/// Server returned new content.
|
||||
Modified(FetchResult),
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// AsyncHttpClient builder
|
||||
// ---------------------------------------------------------------------------
|
||||
@@ -153,6 +163,7 @@ pub struct AsyncHttpClient {
|
||||
allow_insecure_http: bool,
|
||||
allowed_private_hosts: Vec<String>,
|
||||
auth_headers: Vec<(String, String)>,
|
||||
network_policy: NetworkPolicy,
|
||||
}
|
||||
|
||||
impl Default for AsyncHttpClient {
|
||||
@@ -165,6 +176,7 @@ impl Default for AsyncHttpClient {
|
||||
allow_insecure_http: false,
|
||||
allowed_private_hosts: Vec::new(),
|
||||
auth_headers: Vec::new(),
|
||||
network_policy: NetworkPolicy::Auto,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -174,7 +186,85 @@ impl AsyncHttpClient {
|
||||
AsyncHttpClientBuilder::default()
|
||||
}
|
||||
|
||||
/// Fetch a spec with conditional request headers.
|
||||
///
|
||||
/// Sends If-None-Match (for ETag) and If-Modified-Since (for Last-Modified)
|
||||
/// when provided. Returns `NotModified` on 304, `Modified` on 200.
|
||||
pub async fn fetch_conditional(
|
||||
&self,
|
||||
url: &str,
|
||||
etag: Option<&str>,
|
||||
last_modified: Option<&str>,
|
||||
) -> Result<ConditionalFetchResult, SwaggerCliError> {
|
||||
let parsed = validate_url(url, self.allow_insecure_http)?;
|
||||
|
||||
let host = parsed
|
||||
.host_str()
|
||||
.ok_or_else(|| SwaggerCliError::InvalidSpec(format!("URL '{url}' has no host")))?;
|
||||
let port = parsed.port_or_known_default().unwrap_or(443);
|
||||
|
||||
resolve_and_check(host, port, &self.allowed_private_hosts).await?;
|
||||
|
||||
let client = self.build_reqwest_client()?;
|
||||
|
||||
let mut attempts = 0u32;
|
||||
loop {
|
||||
let mut request = client.get(parsed.clone());
|
||||
for (name, value) in &self.auth_headers {
|
||||
request = request.header(name.as_str(), value.as_str());
|
||||
}
|
||||
|
||||
if let Some(etag_val) = etag {
|
||||
request = request.header(reqwest::header::IF_NONE_MATCH, etag_val);
|
||||
}
|
||||
if let Some(lm_val) = last_modified {
|
||||
request = request.header(reqwest::header::IF_MODIFIED_SINCE, lm_val);
|
||||
}
|
||||
|
||||
let response = request.send().await.map_err(SwaggerCliError::Network)?;
|
||||
let status = response.status();
|
||||
|
||||
match status {
|
||||
StatusCode::NOT_MODIFIED => {
|
||||
return Ok(ConditionalFetchResult::NotModified);
|
||||
}
|
||||
s if s.is_success() => {
|
||||
let result = self.read_response(response).await?;
|
||||
return Ok(ConditionalFetchResult::Modified(result));
|
||||
}
|
||||
StatusCode::UNAUTHORIZED | StatusCode::FORBIDDEN => {
|
||||
return Err(SwaggerCliError::Auth(format!(
|
||||
"server returned {status} for '{url}'"
|
||||
)));
|
||||
}
|
||||
StatusCode::NOT_FOUND => {
|
||||
return Err(SwaggerCliError::InvalidSpec(format!(
|
||||
"spec not found at '{url}' (404)"
|
||||
)));
|
||||
}
|
||||
s if s == StatusCode::TOO_MANY_REQUESTS || s.is_server_error() => {
|
||||
attempts += 1;
|
||||
if attempts > self.max_retries {
|
||||
return Err(SwaggerCliError::Network(
|
||||
client.get(url).send().await.unwrap_err(),
|
||||
));
|
||||
}
|
||||
let delay = self.retry_delay(&response, attempts);
|
||||
tokio::time::sleep(delay).await;
|
||||
}
|
||||
_ => {
|
||||
return Err(SwaggerCliError::InvalidSpec(format!(
|
||||
"unexpected status {status} fetching '{url}'"
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn fetch_spec(&self, url: &str) -> Result<FetchResult, SwaggerCliError> {
|
||||
// Check network policy before any HTTP request
|
||||
check_remote_fetch(self.network_policy)?;
|
||||
|
||||
let parsed = validate_url(url, self.allow_insecure_http)?;
|
||||
|
||||
let host = parsed
|
||||
@@ -300,6 +390,7 @@ pub struct AsyncHttpClientBuilder {
|
||||
allow_insecure_http: bool,
|
||||
allowed_private_hosts: Vec<String>,
|
||||
auth_headers: Vec<(String, String)>,
|
||||
network_policy: NetworkPolicy,
|
||||
}
|
||||
|
||||
impl AsyncHttpClientBuilder {
|
||||
@@ -338,6 +429,11 @@ impl AsyncHttpClientBuilder {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn network_policy(mut self, policy: NetworkPolicy) -> Self {
|
||||
self.network_policy = policy;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn build(self) -> AsyncHttpClient {
|
||||
AsyncHttpClient {
|
||||
connect_timeout: self.connect_timeout.unwrap_or(DEFAULT_CONNECT_TIMEOUT),
|
||||
@@ -347,6 +443,7 @@ impl AsyncHttpClientBuilder {
|
||||
allow_insecure_http: self.allow_insecure_http,
|
||||
allowed_private_hosts: self.allowed_private_hosts,
|
||||
auth_headers: self.auth_headers,
|
||||
network_policy: self.network_policy,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ pub mod cache;
|
||||
pub mod config;
|
||||
pub mod http;
|
||||
pub mod indexer;
|
||||
pub mod network;
|
||||
pub mod refs;
|
||||
pub mod search;
|
||||
pub mod spec;
|
||||
|
||||
194
src/core/network.rs
Normal file
194
src/core/network.rs
Normal file
@@ -0,0 +1,194 @@
|
||||
use std::fmt;
|
||||
use std::str::FromStr;
|
||||
|
||||
use crate::errors::SwaggerCliError;
|
||||
|
||||
/// Global network access policy.
|
||||
///
|
||||
/// Controls whether swagger-cli makes outbound HTTP requests.
|
||||
/// Parsed from `--network` CLI flag or `SWAGGER_CLI_NETWORK` env var.
|
||||
/// Flag takes precedence over env var.
|
||||
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
|
||||
pub enum NetworkPolicy {
|
||||
/// Allow network calls when needed (default). No restrictions.
|
||||
#[default]
|
||||
Auto,
|
||||
|
||||
/// Block ALL outbound network calls preemptively. Fetch/sync for remote
|
||||
/// URLs return `OfflineMode` without attempting the request. Local files
|
||||
/// and stdin are unaffected. Index-only commands (list, search, show,
|
||||
/// tags, schemas, aliases, doctor, cache) work normally.
|
||||
Offline,
|
||||
|
||||
/// Allow network calls but surface a distinct error when the network is
|
||||
/// unreachable. Differs from `Offline` in that it attempts the request
|
||||
/// before failing, allowing agents to distinguish "blocked by policy"
|
||||
/// from "network actually down."
|
||||
OnlineOnly,
|
||||
}
|
||||
|
||||
impl fmt::Display for NetworkPolicy {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::Auto => write!(f, "auto"),
|
||||
Self::Offline => write!(f, "offline"),
|
||||
Self::OnlineOnly => write!(f, "online-only"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for NetworkPolicy {
|
||||
type Err = SwaggerCliError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s.to_ascii_lowercase().as_str() {
|
||||
"auto" => Ok(Self::Auto),
|
||||
"offline" => Ok(Self::Offline),
|
||||
"online-only" | "online_only" | "onlineonly" => Ok(Self::OnlineOnly),
|
||||
other => Err(SwaggerCliError::Usage(format!(
|
||||
"invalid network policy '{other}'. Valid options: auto, offline, online-only"
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolve the effective network policy from CLI flag and env var.
|
||||
///
|
||||
/// Precedence: CLI flag (if not "auto" or explicitly set) > env var > default (auto).
|
||||
pub fn resolve_policy(cli_value: &str) -> Result<NetworkPolicy, SwaggerCliError> {
|
||||
// CLI flag takes precedence
|
||||
let from_flag = NetworkPolicy::from_str(cli_value)?;
|
||||
if from_flag != NetworkPolicy::Auto {
|
||||
return Ok(from_flag);
|
||||
}
|
||||
|
||||
// Check env var
|
||||
if let Ok(env_val) = std::env::var("SWAGGER_CLI_NETWORK") {
|
||||
return NetworkPolicy::from_str(&env_val);
|
||||
}
|
||||
|
||||
Ok(NetworkPolicy::Auto)
|
||||
}
|
||||
|
||||
/// Check whether a remote fetch is allowed under the current policy.
|
||||
///
|
||||
/// Returns `Ok(())` if the fetch may proceed, or `Err(OfflineMode)` if blocked.
|
||||
/// This is called before any HTTP request for remote URLs.
|
||||
/// Local file and stdin sources bypass this check entirely.
|
||||
pub fn check_remote_fetch(policy: NetworkPolicy) -> Result<(), SwaggerCliError> {
|
||||
if policy == NetworkPolicy::Offline {
|
||||
return Err(SwaggerCliError::OfflineMode(
|
||||
"network access is disabled (--network offline). \
|
||||
Remote URLs cannot be fetched in offline mode."
|
||||
.into(),
|
||||
));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parse_auto() {
|
||||
assert_eq!(
|
||||
NetworkPolicy::from_str("auto").unwrap(),
|
||||
NetworkPolicy::Auto
|
||||
);
|
||||
assert_eq!(
|
||||
NetworkPolicy::from_str("AUTO").unwrap(),
|
||||
NetworkPolicy::Auto
|
||||
);
|
||||
assert_eq!(
|
||||
NetworkPolicy::from_str("Auto").unwrap(),
|
||||
NetworkPolicy::Auto
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_offline() {
|
||||
assert_eq!(
|
||||
NetworkPolicy::from_str("offline").unwrap(),
|
||||
NetworkPolicy::Offline
|
||||
);
|
||||
assert_eq!(
|
||||
NetworkPolicy::from_str("OFFLINE").unwrap(),
|
||||
NetworkPolicy::Offline
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_online_only() {
|
||||
assert_eq!(
|
||||
NetworkPolicy::from_str("online-only").unwrap(),
|
||||
NetworkPolicy::OnlineOnly
|
||||
);
|
||||
assert_eq!(
|
||||
NetworkPolicy::from_str("online_only").unwrap(),
|
||||
NetworkPolicy::OnlineOnly
|
||||
);
|
||||
assert_eq!(
|
||||
NetworkPolicy::from_str("onlineonly").unwrap(),
|
||||
NetworkPolicy::OnlineOnly
|
||||
);
|
||||
assert_eq!(
|
||||
NetworkPolicy::from_str("ONLINE-ONLY").unwrap(),
|
||||
NetworkPolicy::OnlineOnly
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_invalid() {
|
||||
let err = NetworkPolicy::from_str("bogus").unwrap_err();
|
||||
assert!(matches!(err, SwaggerCliError::Usage(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_display() {
|
||||
assert_eq!(NetworkPolicy::Auto.to_string(), "auto");
|
||||
assert_eq!(NetworkPolicy::Offline.to_string(), "offline");
|
||||
assert_eq!(NetworkPolicy::OnlineOnly.to_string(), "online-only");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_check_remote_fetch_auto_allowed() {
|
||||
assert!(check_remote_fetch(NetworkPolicy::Auto).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_check_remote_fetch_online_only_allowed() {
|
||||
assert!(check_remote_fetch(NetworkPolicy::OnlineOnly).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_check_remote_fetch_offline_blocked() {
|
||||
let err = check_remote_fetch(NetworkPolicy::Offline).unwrap_err();
|
||||
assert!(matches!(err, SwaggerCliError::OfflineMode(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_policy_flag_offline() {
|
||||
// When flag is explicitly "offline", it takes precedence regardless of env
|
||||
assert_eq!(resolve_policy("offline").unwrap(), NetworkPolicy::Offline);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_policy_flag_online_only() {
|
||||
// When flag is explicitly "online-only", it takes precedence
|
||||
assert_eq!(
|
||||
resolve_policy("online-only").unwrap(),
|
||||
NetworkPolicy::OnlineOnly
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_policy_invalid_flag() {
|
||||
let err = resolve_policy("bogus").unwrap_err();
|
||||
assert!(matches!(err, SwaggerCliError::Usage(_)));
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user