Wave 4: Full CLI command implementations - fetch, list, show, search, tags, aliases, doctor, cache lifecycle (bd-16o, bd-3km, bd-1dj, bd-acf, bd-3bl, bd-30a, bd-2s6, bd-1d4)
This commit is contained in:
@@ -1,6 +1,18 @@
|
||||
use clap::Args as ClapArgs;
|
||||
use std::time::Instant;
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use clap::Args as ClapArgs;
|
||||
use serde::Serialize;
|
||||
use tabled::Tabled;
|
||||
|
||||
use crate::core::cache::{CacheManager, CacheMetadata, validate_alias};
|
||||
use crate::core::config::{Config, cache_dir, config_path};
|
||||
use crate::errors::SwaggerCliError;
|
||||
use crate::output::{robot, table};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// CLI args
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Manage spec aliases
|
||||
#[derive(Debug, ClapArgs)]
|
||||
@@ -9,15 +21,589 @@ pub struct Args {
|
||||
#[arg(long)]
|
||||
pub list: bool,
|
||||
|
||||
/// Remove an alias
|
||||
/// Show full details for an alias
|
||||
#[arg(long)]
|
||||
pub remove: Option<String>,
|
||||
pub show: Option<String>,
|
||||
|
||||
/// Rename an alias (old=new)
|
||||
/// Rename an alias (old new)
|
||||
#[arg(long, num_args = 2, value_names = ["OLD", "NEW"])]
|
||||
pub rename: Option<Vec<String>>,
|
||||
|
||||
/// Delete an alias
|
||||
#[arg(long)]
|
||||
pub rename: Option<String>,
|
||||
pub delete: Option<String>,
|
||||
|
||||
/// Set the default alias
|
||||
#[arg(long)]
|
||||
pub set_default: Option<String>,
|
||||
}
|
||||
|
||||
pub async fn execute(_args: &Args, _robot: bool) -> Result<(), SwaggerCliError> {
|
||||
Err(SwaggerCliError::Usage("aliases not yet implemented".into()))
|
||||
// ---------------------------------------------------------------------------
|
||||
// Robot-mode output structs
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct AliasListEntry {
|
||||
name: String,
|
||||
source_url: Option<String>,
|
||||
version: String,
|
||||
is_default: bool,
|
||||
cached_at: DateTime<Utc>,
|
||||
endpoints: usize,
|
||||
schemas: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct AliasListOutput {
|
||||
aliases: Vec<AliasListEntry>,
|
||||
default_alias: Option<String>,
|
||||
count: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct AliasShowOutput {
|
||||
name: String,
|
||||
source_url: Option<String>,
|
||||
title: String,
|
||||
version: String,
|
||||
is_default: bool,
|
||||
cached_at: DateTime<Utc>,
|
||||
last_accessed: DateTime<Utc>,
|
||||
endpoints: usize,
|
||||
schemas: usize,
|
||||
source_format: String,
|
||||
content_hash: String,
|
||||
raw_size_bytes: u64,
|
||||
generation: u64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct AliasRenameOutput {
|
||||
old_name: String,
|
||||
new_name: String,
|
||||
updated_default: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct AliasDeleteOutput {
|
||||
name: String,
|
||||
cleared_default: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct AliasSetDefaultOutput {
|
||||
name: String,
|
||||
previous_default: Option<String>,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Human-mode table row
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Tabled)]
|
||||
struct AliasRow {
|
||||
#[tabled(rename = "Alias")]
|
||||
name: String,
|
||||
#[tabled(rename = "Version")]
|
||||
version: String,
|
||||
#[tabled(rename = "Endpoints")]
|
||||
endpoints: usize,
|
||||
#[tabled(rename = "Schemas")]
|
||||
schemas: usize,
|
||||
#[tabled(rename = "Source")]
|
||||
source: String,
|
||||
#[tabled(rename = "Default")]
|
||||
default_marker: String,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Execute
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
pub async fn execute(args: &Args, robot: bool) -> Result<(), SwaggerCliError> {
|
||||
let start = Instant::now();
|
||||
|
||||
if let Some(ref alias) = args.show {
|
||||
return cmd_show(alias, robot, start);
|
||||
}
|
||||
if let Some(ref names) = args.rename {
|
||||
return cmd_rename(names, robot, start);
|
||||
}
|
||||
if let Some(ref alias) = args.delete {
|
||||
return cmd_delete(alias, robot, start);
|
||||
}
|
||||
if let Some(ref alias) = args.set_default {
|
||||
return cmd_set_default(alias, robot, start);
|
||||
}
|
||||
|
||||
// Default: list
|
||||
cmd_list(robot, start)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// List
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn cmd_list(robot: bool, start: Instant) -> Result<(), SwaggerCliError> {
|
||||
let cm = CacheManager::new(cache_dir());
|
||||
let cfg = Config::load(&config_path(None))?;
|
||||
let default_alias = cfg.default_alias.clone();
|
||||
let metas = cm.list_aliases()?;
|
||||
|
||||
if robot {
|
||||
let entries: Vec<AliasListEntry> = metas
|
||||
.iter()
|
||||
.map(|m| meta_to_list_entry(m, &default_alias))
|
||||
.collect();
|
||||
let count = entries.len();
|
||||
let output = AliasListOutput {
|
||||
aliases: entries,
|
||||
default_alias,
|
||||
count,
|
||||
};
|
||||
robot::robot_success(output, "aliases", start.elapsed());
|
||||
} else {
|
||||
print_human_list(&metas, &default_alias);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn meta_to_list_entry(m: &CacheMetadata, default_alias: &Option<String>) -> AliasListEntry {
|
||||
AliasListEntry {
|
||||
name: m.alias.clone(),
|
||||
source_url: m.url.clone(),
|
||||
version: m.spec_version.clone(),
|
||||
is_default: default_alias.as_deref() == Some(m.alias.as_str()),
|
||||
cached_at: m.fetched_at,
|
||||
endpoints: m.endpoint_count,
|
||||
schemas: m.schema_count,
|
||||
}
|
||||
}
|
||||
|
||||
fn print_human_list(metas: &[CacheMetadata], default_alias: &Option<String>) {
|
||||
if metas.is_empty() {
|
||||
println!("No cached aliases. Use 'swagger-cli fetch <url>' to cache a spec.");
|
||||
return;
|
||||
}
|
||||
|
||||
let rows: Vec<AliasRow> = metas
|
||||
.iter()
|
||||
.map(|m| {
|
||||
let is_default = default_alias.as_deref() == Some(m.alias.as_str());
|
||||
AliasRow {
|
||||
name: m.alias.clone(),
|
||||
version: m.spec_version.clone(),
|
||||
endpoints: m.endpoint_count,
|
||||
schemas: m.schema_count,
|
||||
source: m.url.as_deref().unwrap_or("-").to_string(),
|
||||
default_marker: if is_default {
|
||||
"*".to_string()
|
||||
} else {
|
||||
String::new()
|
||||
},
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
println!("{}", table::render_table(&rows));
|
||||
if let Some(d) = default_alias {
|
||||
println!("\n default: {d}");
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Show
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn cmd_show(alias: &str, robot: bool, start: Instant) -> Result<(), SwaggerCliError> {
|
||||
let cm = CacheManager::new(cache_dir());
|
||||
let cfg = Config::load(&config_path(None))?;
|
||||
let default_alias = cfg.default_alias.clone();
|
||||
|
||||
let (_index, meta) = cm.load_index(alias)?;
|
||||
|
||||
if robot {
|
||||
let output = meta_to_show_output(&meta, &default_alias);
|
||||
robot::robot_success(output, "aliases", start.elapsed());
|
||||
} else {
|
||||
print_human_show(&meta, &default_alias);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn meta_to_show_output(m: &CacheMetadata, default_alias: &Option<String>) -> AliasShowOutput {
|
||||
AliasShowOutput {
|
||||
name: m.alias.clone(),
|
||||
source_url: m.url.clone(),
|
||||
title: m.spec_title.clone(),
|
||||
version: m.spec_version.clone(),
|
||||
is_default: default_alias.as_deref() == Some(m.alias.as_str()),
|
||||
cached_at: m.fetched_at,
|
||||
last_accessed: m.last_accessed,
|
||||
endpoints: m.endpoint_count,
|
||||
schemas: m.schema_count,
|
||||
source_format: m.source_format.clone(),
|
||||
content_hash: m.content_hash.clone(),
|
||||
raw_size_bytes: m.raw_size_bytes,
|
||||
generation: m.generation,
|
||||
}
|
||||
}
|
||||
|
||||
fn print_human_show(m: &CacheMetadata, default_alias: &Option<String>) {
|
||||
let is_default = default_alias.as_deref() == Some(m.alias.as_str());
|
||||
println!("Alias: {}", m.alias);
|
||||
println!("Title: {}", m.spec_title);
|
||||
println!("Version: {}", m.spec_version);
|
||||
println!("Source URL: {}", m.url.as_deref().unwrap_or("-"));
|
||||
println!("Source format: {}", m.source_format);
|
||||
println!("Endpoints: {}", m.endpoint_count);
|
||||
println!("Schemas: {}", m.schema_count);
|
||||
println!("Cached at: {}", m.fetched_at);
|
||||
println!("Last accessed: {}", m.last_accessed);
|
||||
println!("Content hash: {}", m.content_hash);
|
||||
println!("Raw size: {} bytes", m.raw_size_bytes);
|
||||
println!("Generation: {}", m.generation);
|
||||
println!("Default: {}", if is_default { "yes" } else { "no" });
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Rename
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn cmd_rename(names: &[String], robot: bool, start: Instant) -> Result<(), SwaggerCliError> {
|
||||
if names.len() != 2 {
|
||||
return Err(SwaggerCliError::Usage(
|
||||
"--rename requires exactly two values: OLD NEW".into(),
|
||||
));
|
||||
}
|
||||
|
||||
let old = &names[0];
|
||||
let new = &names[1];
|
||||
|
||||
validate_alias(new)?;
|
||||
|
||||
let cm = CacheManager::new(cache_dir());
|
||||
|
||||
if !cm.alias_exists(old) {
|
||||
return Err(SwaggerCliError::AliasNotFound(old.clone()));
|
||||
}
|
||||
if cm.alias_exists(new) {
|
||||
return Err(SwaggerCliError::AliasExists(new.clone()));
|
||||
}
|
||||
|
||||
let old_dir = cm.alias_dir(old);
|
||||
let new_dir = cm.alias_dir(new);
|
||||
|
||||
std::fs::rename(&old_dir, &new_dir).map_err(|e| {
|
||||
SwaggerCliError::Cache(format!(
|
||||
"Failed to rename {} -> {}: {e}",
|
||||
old_dir.display(),
|
||||
new_dir.display()
|
||||
))
|
||||
})?;
|
||||
|
||||
// Update meta.json alias field
|
||||
let meta_path = new_dir.join("meta.json");
|
||||
if let Ok(bytes) = std::fs::read(&meta_path)
|
||||
&& let Ok(mut meta) = serde_json::from_slice::<CacheMetadata>(&bytes)
|
||||
{
|
||||
meta.alias = new.clone();
|
||||
if let Ok(updated_bytes) = serde_json::to_vec_pretty(&meta) {
|
||||
let _ = std::fs::write(&meta_path, updated_bytes);
|
||||
}
|
||||
}
|
||||
|
||||
// Update config if old was the default
|
||||
let cfg_path = config_path(None);
|
||||
let mut cfg = Config::load(&cfg_path)?;
|
||||
let updated_default = cfg.default_alias.as_deref() == Some(old.as_str());
|
||||
if updated_default {
|
||||
cfg.default_alias = Some(new.clone());
|
||||
cfg.save(&cfg_path)?;
|
||||
}
|
||||
|
||||
if robot {
|
||||
let output = AliasRenameOutput {
|
||||
old_name: old.clone(),
|
||||
new_name: new.clone(),
|
||||
updated_default,
|
||||
};
|
||||
robot::robot_success(output, "aliases", start.elapsed());
|
||||
} else {
|
||||
println!("Renamed '{}' -> '{}'", old, new);
|
||||
if updated_default {
|
||||
println!(" (default alias updated)");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Delete
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn cmd_delete(alias: &str, robot: bool, start: Instant) -> Result<(), SwaggerCliError> {
|
||||
let cm = CacheManager::new(cache_dir());
|
||||
cm.delete_alias(alias)?;
|
||||
|
||||
// Clear default if it was this alias
|
||||
let cfg_path = config_path(None);
|
||||
let mut cfg = Config::load(&cfg_path)?;
|
||||
let cleared_default = cfg.default_alias.as_deref() == Some(alias);
|
||||
if cleared_default {
|
||||
cfg.default_alias = None;
|
||||
cfg.save(&cfg_path)?;
|
||||
}
|
||||
|
||||
if robot {
|
||||
let output = AliasDeleteOutput {
|
||||
name: alias.to_string(),
|
||||
cleared_default,
|
||||
};
|
||||
robot::robot_success(output, "aliases", start.elapsed());
|
||||
} else {
|
||||
println!("Deleted alias '{alias}'");
|
||||
if cleared_default {
|
||||
println!(" (cleared default alias)");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Set default
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn cmd_set_default(alias: &str, robot: bool, start: Instant) -> Result<(), SwaggerCliError> {
|
||||
let cm = CacheManager::new(cache_dir());
|
||||
|
||||
if !cm.alias_exists(alias) {
|
||||
return Err(SwaggerCliError::AliasNotFound(alias.to_string()));
|
||||
}
|
||||
|
||||
let cfg_path = config_path(None);
|
||||
let mut cfg = Config::load(&cfg_path)?;
|
||||
let previous_default = cfg.default_alias.clone();
|
||||
cfg.default_alias = Some(alias.to_string());
|
||||
cfg.save(&cfg_path)?;
|
||||
|
||||
if robot {
|
||||
let output = AliasSetDefaultOutput {
|
||||
name: alias.to_string(),
|
||||
previous_default,
|
||||
};
|
||||
robot::robot_success(output, "aliases", start.elapsed());
|
||||
} else {
|
||||
println!("Default alias set to '{alias}'");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::core::cache::CacheManager;
|
||||
use crate::core::spec::{IndexInfo, SpecIndex};
|
||||
|
||||
fn make_test_index() -> SpecIndex {
|
||||
SpecIndex {
|
||||
index_version: 1,
|
||||
generation: 1,
|
||||
content_hash: "sha256:test".into(),
|
||||
openapi: "3.0.3".into(),
|
||||
info: IndexInfo {
|
||||
title: "Test".into(),
|
||||
version: "1.0.0".into(),
|
||||
},
|
||||
endpoints: vec![],
|
||||
schemas: vec![],
|
||||
tags: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
fn write_test_alias(cm: &CacheManager, alias: &str) -> CacheMetadata {
|
||||
let index = make_test_index();
|
||||
cm.write_cache(
|
||||
alias,
|
||||
b"openapi: 3.0.3",
|
||||
b"{\"openapi\":\"3.0.3\"}",
|
||||
&index,
|
||||
Some("https://example.com/api.json".into()),
|
||||
"1.0.0",
|
||||
"Test API",
|
||||
"yaml",
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_list_aliases_output_format() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let cm = CacheManager::new(tmp.path().to_path_buf());
|
||||
|
||||
write_test_alias(&cm, "petstore");
|
||||
write_test_alias(&cm, "users-api");
|
||||
|
||||
let metas = cm.list_aliases().unwrap();
|
||||
let default_alias = Some("petstore".to_string());
|
||||
|
||||
let entries: Vec<AliasListEntry> = metas
|
||||
.iter()
|
||||
.map(|m| meta_to_list_entry(m, &default_alias))
|
||||
.collect();
|
||||
|
||||
assert_eq!(entries.len(), 2);
|
||||
|
||||
let pet = entries.iter().find(|e| e.name == "petstore").unwrap();
|
||||
assert!(pet.is_default);
|
||||
assert_eq!(pet.version, "1.0.0");
|
||||
assert_eq!(pet.source_url, Some("https://example.com/api.json".into()));
|
||||
|
||||
let users = entries.iter().find(|e| e.name == "users-api").unwrap();
|
||||
assert!(!users.is_default);
|
||||
|
||||
// Verify JSON round-trip
|
||||
let output = AliasListOutput {
|
||||
count: entries.len(),
|
||||
aliases: entries,
|
||||
default_alias,
|
||||
};
|
||||
let json = serde_json::to_string(&output).unwrap();
|
||||
let parsed: serde_json::Value = serde_json::from_str(&json).unwrap();
|
||||
assert_eq!(parsed["count"], 2);
|
||||
assert!(parsed["aliases"].is_array());
|
||||
assert_eq!(parsed["default_alias"], "petstore");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rename_validation() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let cm = CacheManager::new(tmp.path().to_path_buf());
|
||||
|
||||
write_test_alias(&cm, "original");
|
||||
|
||||
// Renaming to an invalid name should fail at validation
|
||||
let result = validate_alias("../evil");
|
||||
assert!(result.is_err());
|
||||
|
||||
// Renaming to an existing alias should fail
|
||||
write_test_alias(&cm, "taken");
|
||||
assert!(cm.alias_exists("taken"));
|
||||
|
||||
// Renaming a non-existent alias should fail
|
||||
assert!(!cm.alias_exists("ghost"));
|
||||
|
||||
// A valid rename should succeed
|
||||
let old_dir = cm.alias_dir("original");
|
||||
let new_dir = cm.alias_dir("renamed");
|
||||
assert!(old_dir.exists());
|
||||
assert!(!new_dir.exists());
|
||||
std::fs::rename(&old_dir, &new_dir).unwrap();
|
||||
assert!(!old_dir.exists());
|
||||
assert!(new_dir.exists());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_show_output_fields() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let cm = CacheManager::new(tmp.path().to_path_buf());
|
||||
|
||||
write_test_alias(&cm, "showtest");
|
||||
|
||||
let (_index, meta) = cm.load_index("showtest").unwrap();
|
||||
let default_alias = Some("showtest".to_string());
|
||||
let output = meta_to_show_output(&meta, &default_alias);
|
||||
|
||||
assert_eq!(output.name, "showtest");
|
||||
assert_eq!(output.title, "Test API");
|
||||
assert_eq!(output.version, "1.0.0");
|
||||
assert!(output.is_default);
|
||||
assert_eq!(output.source_format, "yaml");
|
||||
assert_eq!(output.generation, 1);
|
||||
assert!(output.content_hash.starts_with("sha256:"));
|
||||
|
||||
// JSON round-trip
|
||||
let json = serde_json::to_string(&output).unwrap();
|
||||
let parsed: serde_json::Value = serde_json::from_str(&json).unwrap();
|
||||
assert_eq!(parsed["name"], "showtest");
|
||||
assert_eq!(parsed["is_default"], true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_delete_clears_default() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let cm = CacheManager::new(tmp.path().to_path_buf());
|
||||
let cfg_path = tmp.path().join("config.toml");
|
||||
|
||||
write_test_alias(&cm, "doomed");
|
||||
|
||||
let cfg = Config {
|
||||
default_alias: Some("doomed".to_string()),
|
||||
..Config::default()
|
||||
};
|
||||
cfg.save(&cfg_path).unwrap();
|
||||
|
||||
// Delete the alias
|
||||
cm.delete_alias("doomed").unwrap();
|
||||
assert!(!cm.alias_exists("doomed"));
|
||||
|
||||
// Simulate clearing default
|
||||
let mut cfg = Config::load(&cfg_path).unwrap();
|
||||
let cleared = cfg.default_alias.as_deref() == Some("doomed");
|
||||
assert!(cleared);
|
||||
cfg.default_alias = None;
|
||||
cfg.save(&cfg_path).unwrap();
|
||||
|
||||
let cfg = Config::load(&cfg_path).unwrap();
|
||||
assert!(cfg.default_alias.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_set_default_requires_existing_alias() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let cm = CacheManager::new(tmp.path().to_path_buf());
|
||||
|
||||
assert!(!cm.alias_exists("nonexistent"));
|
||||
|
||||
write_test_alias(&cm, "real");
|
||||
assert!(cm.alias_exists("real"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rename_updates_meta_alias_field() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let cm = CacheManager::new(tmp.path().to_path_buf());
|
||||
|
||||
write_test_alias(&cm, "old-name");
|
||||
|
||||
let old_dir = cm.alias_dir("old-name");
|
||||
let new_dir = cm.alias_dir("new-name");
|
||||
std::fs::rename(&old_dir, &new_dir).unwrap();
|
||||
|
||||
// Simulate the meta.json update that cmd_rename does
|
||||
let meta_path = new_dir.join("meta.json");
|
||||
let bytes = std::fs::read(&meta_path).unwrap();
|
||||
let mut meta: CacheMetadata = serde_json::from_slice(&bytes).unwrap();
|
||||
assert_eq!(meta.alias, "old-name");
|
||||
|
||||
meta.alias = "new-name".to_string();
|
||||
let updated = serde_json::to_vec_pretty(&meta).unwrap();
|
||||
std::fs::write(&meta_path, updated).unwrap();
|
||||
|
||||
let bytes = std::fs::read(&meta_path).unwrap();
|
||||
let meta: CacheMetadata = serde_json::from_slice(&bytes).unwrap();
|
||||
assert_eq!(meta.alias, "new-name");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,23 +1,598 @@
|
||||
use clap::Args as ClapArgs;
|
||||
use std::cmp::Ordering;
|
||||
use std::time::Instant;
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use clap::Args as ClapArgs;
|
||||
use serde::Serialize;
|
||||
use tabled::Tabled;
|
||||
|
||||
use crate::core::cache::CacheManager;
|
||||
use crate::core::config::cache_dir;
|
||||
use crate::errors::SwaggerCliError;
|
||||
use crate::output::robot::robot_success;
|
||||
use crate::output::table::render_table_or_empty;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// CLI args
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Manage the spec cache
|
||||
#[derive(Debug, ClapArgs)]
|
||||
pub struct Args {
|
||||
/// Show cache location
|
||||
/// Show cache statistics (default when no other flag given)
|
||||
#[arg(long)]
|
||||
pub stats: bool,
|
||||
|
||||
/// Print the cache directory path and exit
|
||||
#[arg(long)]
|
||||
pub path: bool,
|
||||
|
||||
/// Clear the entire cache
|
||||
/// Remove aliases whose fetched_at exceeds the stale threshold
|
||||
#[arg(long)]
|
||||
pub clear: bool,
|
||||
pub prune_stale: bool,
|
||||
|
||||
/// Show cache size
|
||||
/// Days before an alias is considered stale (default: 90)
|
||||
#[arg(long, default_value_t = 90)]
|
||||
pub prune_threshold: u32,
|
||||
|
||||
/// Evict least-recently-used aliases until total size is under this limit (MB)
|
||||
#[arg(long)]
|
||||
pub size: bool,
|
||||
pub max_total_mb: Option<u64>,
|
||||
|
||||
/// Report what would happen without deleting anything
|
||||
#[arg(long)]
|
||||
pub dry_run: bool,
|
||||
}
|
||||
|
||||
pub async fn execute(_args: &Args, _robot: bool) -> Result<(), SwaggerCliError> {
|
||||
Err(SwaggerCliError::Usage("cache not yet implemented".into()))
|
||||
// ---------------------------------------------------------------------------
|
||||
// Robot output structs
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(tag = "kind")]
|
||||
enum CacheOutput {
|
||||
#[serde(rename = "stats")]
|
||||
Stats(StatsOutput),
|
||||
#[serde(rename = "path")]
|
||||
Path(PathOutput),
|
||||
#[serde(rename = "prune")]
|
||||
Prune(PruneOutput),
|
||||
#[serde(rename = "evict")]
|
||||
Evict(EvictOutput),
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct StatsOutput {
|
||||
aliases: Vec<AliasStats>,
|
||||
total_bytes: u64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct AliasStats {
|
||||
name: String,
|
||||
size_bytes: u64,
|
||||
endpoint_count: usize,
|
||||
fetched_at: DateTime<Utc>,
|
||||
last_accessed: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct PathOutput {
|
||||
path: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct PruneOutput {
|
||||
pruned: Vec<String>,
|
||||
dry_run: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct EvictOutput {
|
||||
evicted: Vec<String>,
|
||||
target_bytes: u64,
|
||||
actual_bytes: u64,
|
||||
dry_run: bool,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Human-readable table row
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Tabled)]
|
||||
struct StatsRow {
|
||||
#[tabled(rename = "Alias")]
|
||||
name: String,
|
||||
#[tabled(rename = "Size")]
|
||||
size: String,
|
||||
#[tabled(rename = "Endpoints")]
|
||||
endpoints: usize,
|
||||
#[tabled(rename = "Fetched")]
|
||||
fetched: String,
|
||||
#[tabled(rename = "Last Accessed")]
|
||||
accessed: String,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Walk every file in `dir` (non-recursive) and sum metadata().len().
|
||||
fn dir_size(dir: &std::path::Path) -> u64 {
|
||||
let Ok(entries) = std::fs::read_dir(dir) else {
|
||||
return 0;
|
||||
};
|
||||
entries
|
||||
.filter_map(Result::ok)
|
||||
.filter_map(|e| e.metadata().ok())
|
||||
.filter(|m| m.is_file())
|
||||
.map(|m| m.len())
|
||||
.sum()
|
||||
}
|
||||
|
||||
fn human_bytes(bytes: u64) -> String {
|
||||
const KB: u64 = 1024;
|
||||
const MB: u64 = KB * 1024;
|
||||
if bytes >= MB {
|
||||
format!("{:.1} MB", bytes as f64 / MB as f64)
|
||||
} else if bytes >= KB {
|
||||
format!("{:.1} KB", bytes as f64 / KB as f64)
|
||||
} else {
|
||||
format!("{bytes} B")
|
||||
}
|
||||
}
|
||||
|
||||
fn short_datetime(dt: &DateTime<Utc>) -> String {
|
||||
dt.format("%Y-%m-%d %H:%M").to_string()
|
||||
}
|
||||
|
||||
/// Compare by last_accessed ASC, then fetched_at ASC as tie-breaker.
|
||||
fn lru_order(
|
||||
a_last: &DateTime<Utc>,
|
||||
a_fetched: &DateTime<Utc>,
|
||||
b_last: &DateTime<Utc>,
|
||||
b_fetched: &DateTime<Utc>,
|
||||
) -> Ordering {
|
||||
a_last.cmp(b_last).then_with(|| a_fetched.cmp(b_fetched))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Execute
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
pub async fn execute(args: &Args, robot: bool) -> Result<(), SwaggerCliError> {
|
||||
let start = Instant::now();
|
||||
|
||||
if args.path {
|
||||
return execute_path(robot, start);
|
||||
}
|
||||
if args.prune_stale {
|
||||
return execute_prune(args, robot, start);
|
||||
}
|
||||
if args.max_total_mb.is_some() {
|
||||
return execute_evict(args, robot, start);
|
||||
}
|
||||
|
||||
// Default: stats
|
||||
execute_stats(robot, start)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Path
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn execute_path(robot: bool, start: Instant) -> Result<(), SwaggerCliError> {
|
||||
let cd = cache_dir();
|
||||
if robot {
|
||||
robot_success(
|
||||
CacheOutput::Path(PathOutput {
|
||||
path: cd.display().to_string(),
|
||||
}),
|
||||
"cache",
|
||||
start.elapsed(),
|
||||
);
|
||||
} else {
|
||||
println!("{}", cd.display());
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Stats
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn execute_stats(robot: bool, start: Instant) -> Result<(), SwaggerCliError> {
|
||||
let cm = CacheManager::new(cache_dir());
|
||||
let metas = cm.list_aliases()?;
|
||||
|
||||
let mut alias_stats: Vec<AliasStats> = Vec::with_capacity(metas.len());
|
||||
for meta in &metas {
|
||||
let size = dir_size(&cm.alias_dir(&meta.alias));
|
||||
alias_stats.push(AliasStats {
|
||||
name: meta.alias.clone(),
|
||||
size_bytes: size,
|
||||
endpoint_count: meta.endpoint_count,
|
||||
fetched_at: meta.fetched_at,
|
||||
last_accessed: meta.last_accessed,
|
||||
});
|
||||
}
|
||||
|
||||
let total_bytes: u64 = alias_stats.iter().map(|a| a.size_bytes).sum();
|
||||
|
||||
if robot {
|
||||
robot_success(
|
||||
CacheOutput::Stats(StatsOutput {
|
||||
aliases: alias_stats,
|
||||
total_bytes,
|
||||
}),
|
||||
"cache",
|
||||
start.elapsed(),
|
||||
);
|
||||
} else {
|
||||
let rows: Vec<StatsRow> = alias_stats
|
||||
.iter()
|
||||
.map(|a| StatsRow {
|
||||
name: a.name.clone(),
|
||||
size: human_bytes(a.size_bytes),
|
||||
endpoints: a.endpoint_count,
|
||||
fetched: short_datetime(&a.fetched_at),
|
||||
accessed: short_datetime(&a.last_accessed),
|
||||
})
|
||||
.collect();
|
||||
println!("{}", render_table_or_empty(&rows, "Cache is empty."));
|
||||
println!("Total: {}", human_bytes(total_bytes));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Prune
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn execute_prune(args: &Args, robot: bool, start: Instant) -> Result<(), SwaggerCliError> {
|
||||
let cm = CacheManager::new(cache_dir());
|
||||
let metas = cm.list_aliases()?;
|
||||
|
||||
let stale: Vec<&str> = metas
|
||||
.iter()
|
||||
.filter(|m| m.is_stale(args.prune_threshold))
|
||||
.map(|m| m.alias.as_str())
|
||||
.collect();
|
||||
|
||||
if args.dry_run {
|
||||
if robot {
|
||||
robot_success(
|
||||
CacheOutput::Prune(PruneOutput {
|
||||
pruned: stale.iter().map(|s| (*s).to_string()).collect(),
|
||||
dry_run: true,
|
||||
}),
|
||||
"cache",
|
||||
start.elapsed(),
|
||||
);
|
||||
} else if stale.is_empty() {
|
||||
println!(
|
||||
"No stale aliases (threshold: {} days).",
|
||||
args.prune_threshold
|
||||
);
|
||||
} else {
|
||||
println!(
|
||||
"Would prune {} stale alias(es) (threshold: {} days):",
|
||||
stale.len(),
|
||||
args.prune_threshold
|
||||
);
|
||||
for name in &stale {
|
||||
println!(" {name}");
|
||||
}
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut pruned: Vec<String> = Vec::new();
|
||||
for name in &stale {
|
||||
cm.delete_alias(name)?;
|
||||
pruned.push((*name).to_string());
|
||||
}
|
||||
|
||||
if robot {
|
||||
robot_success(
|
||||
CacheOutput::Prune(PruneOutput {
|
||||
pruned,
|
||||
dry_run: false,
|
||||
}),
|
||||
"cache",
|
||||
start.elapsed(),
|
||||
);
|
||||
} else if stale.is_empty() {
|
||||
println!(
|
||||
"No stale aliases (threshold: {} days).",
|
||||
args.prune_threshold
|
||||
);
|
||||
} else {
|
||||
println!("Pruned {} stale alias(es).", stale.len());
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// LRU eviction
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn execute_evict(args: &Args, robot: bool, start: Instant) -> Result<(), SwaggerCliError> {
|
||||
let target_bytes = args.max_total_mb.unwrap_or(0) * 1024 * 1024;
|
||||
let cm = CacheManager::new(cache_dir());
|
||||
let metas = cm.list_aliases()?;
|
||||
|
||||
// Build (alias, size, last_accessed, fetched_at)
|
||||
let mut entries: Vec<(String, u64, DateTime<Utc>, DateTime<Utc>)> = metas
|
||||
.iter()
|
||||
.map(|m| {
|
||||
let size = dir_size(&cm.alias_dir(&m.alias));
|
||||
(m.alias.clone(), size, m.last_accessed, m.fetched_at)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut total: u64 = entries.iter().map(|(_, s, _, _)| s).sum();
|
||||
|
||||
// Sort LRU: oldest last_accessed first, then oldest fetched_at
|
||||
entries.sort_by(|a, b| lru_order(&a.2, &a.3, &b.2, &b.3));
|
||||
|
||||
let mut evicted: Vec<String> = Vec::new();
|
||||
for (name, size, _, _) in &entries {
|
||||
if total <= target_bytes {
|
||||
break;
|
||||
}
|
||||
evicted.push(name.clone());
|
||||
total = total.saturating_sub(*size);
|
||||
}
|
||||
|
||||
if args.dry_run {
|
||||
if robot {
|
||||
robot_success(
|
||||
CacheOutput::Evict(EvictOutput {
|
||||
evicted,
|
||||
target_bytes,
|
||||
actual_bytes: total,
|
||||
dry_run: true,
|
||||
}),
|
||||
"cache",
|
||||
start.elapsed(),
|
||||
);
|
||||
} else if evicted.is_empty() {
|
||||
println!(
|
||||
"Cache already under {} MB target.",
|
||||
args.max_total_mb.unwrap_or(0)
|
||||
);
|
||||
} else {
|
||||
println!(
|
||||
"Would evict {} alias(es) to reach {} MB target:",
|
||||
evicted.len(),
|
||||
args.max_total_mb.unwrap_or(0)
|
||||
);
|
||||
for name in &evicted {
|
||||
println!(" {name}");
|
||||
}
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
for name in &evicted {
|
||||
cm.delete_alias(name)?;
|
||||
}
|
||||
|
||||
if robot {
|
||||
robot_success(
|
||||
CacheOutput::Evict(EvictOutput {
|
||||
evicted,
|
||||
target_bytes,
|
||||
actual_bytes: total,
|
||||
dry_run: false,
|
||||
}),
|
||||
"cache",
|
||||
start.elapsed(),
|
||||
);
|
||||
} else if evicted.is_empty() {
|
||||
println!(
|
||||
"Cache already under {} MB target.",
|
||||
args.max_total_mb.unwrap_or(0)
|
||||
);
|
||||
} else {
|
||||
println!(
|
||||
"Evicted {} alias(es). Cache now {}.",
|
||||
evicted.len(),
|
||||
human_bytes(total)
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::core::cache::CacheMetadata;
|
||||
use chrono::Duration;
|
||||
|
||||
fn make_meta(alias: &str, days_old: i64, last_accessed_days_ago: i64) -> CacheMetadata {
|
||||
CacheMetadata {
|
||||
alias: alias.to_string(),
|
||||
url: None,
|
||||
fetched_at: Utc::now() - Duration::days(days_old),
|
||||
last_accessed: Utc::now() - Duration::days(last_accessed_days_ago),
|
||||
content_hash: String::new(),
|
||||
raw_hash: String::new(),
|
||||
etag: None,
|
||||
last_modified: None,
|
||||
spec_version: "1.0.0".to_string(),
|
||||
spec_title: format!("{alias} API"),
|
||||
endpoint_count: 5,
|
||||
schema_count: 3,
|
||||
raw_size_bytes: 1000,
|
||||
source_format: "json".to_string(),
|
||||
index_version: 1,
|
||||
generation: 1,
|
||||
index_hash: String::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Write a minimal cache entry with known content so we can measure size.
|
||||
fn write_test_alias(cm: &CacheManager, alias: &str, content: &[u8]) {
|
||||
cm.ensure_dirs(alias).unwrap();
|
||||
let dir = cm.alias_dir(alias);
|
||||
std::fs::write(dir.join("raw.json"), content).unwrap();
|
||||
let meta = CacheMetadata {
|
||||
alias: alias.to_string(),
|
||||
url: None,
|
||||
fetched_at: Utc::now(),
|
||||
last_accessed: Utc::now(),
|
||||
content_hash: String::new(),
|
||||
raw_hash: String::new(),
|
||||
etag: None,
|
||||
last_modified: None,
|
||||
spec_version: "1.0.0".to_string(),
|
||||
spec_title: "Test".to_string(),
|
||||
endpoint_count: 3,
|
||||
schema_count: 1,
|
||||
raw_size_bytes: content.len() as u64,
|
||||
source_format: "json".to_string(),
|
||||
index_version: 1,
|
||||
generation: 1,
|
||||
index_hash: String::new(),
|
||||
};
|
||||
let meta_json = serde_json::to_vec_pretty(&meta).unwrap();
|
||||
std::fs::write(dir.join("meta.json"), &meta_json).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_stats_computes_sizes() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let cm = CacheManager::new(tmp.path().to_path_buf());
|
||||
|
||||
let content_a = vec![0u8; 512];
|
||||
let content_b = vec![0u8; 1024];
|
||||
write_test_alias(&cm, "alpha", &content_a);
|
||||
write_test_alias(&cm, "bravo", &content_b);
|
||||
|
||||
let metas = cm.list_aliases().unwrap();
|
||||
assert_eq!(metas.len(), 2);
|
||||
|
||||
let mut total: u64 = 0;
|
||||
for meta in &metas {
|
||||
let size = dir_size(&cm.alias_dir(&meta.alias));
|
||||
assert!(size > 0, "alias {} should have nonzero size", meta.alias);
|
||||
total += size;
|
||||
}
|
||||
// Total should be at least the raw content sizes (each alias also has meta.json)
|
||||
assert!(total >= 512 + 1024, "total {total} should be >= 1536");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_prune_identifies_stale() {
|
||||
let threshold = 90;
|
||||
|
||||
let fresh = make_meta("fresh-api", 10, 1);
|
||||
let stale = make_meta("old-api", 100, 50);
|
||||
let borderline = make_meta("edge-api", 91, 2);
|
||||
|
||||
assert!(!fresh.is_stale(threshold), "fresh should not be stale");
|
||||
assert!(stale.is_stale(threshold), "old should be stale");
|
||||
assert!(borderline.is_stale(threshold), "91-day-old should be stale");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_path_output() {
|
||||
let path = cache_dir();
|
||||
let display = path.display().to_string();
|
||||
assert!(
|
||||
!display.is_empty(),
|
||||
"cache_dir should produce a non-empty path"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lru_order_sorts_oldest_first() {
|
||||
let now = Utc::now();
|
||||
let old = now - Duration::days(30);
|
||||
let older = now - Duration::days(60);
|
||||
|
||||
// older last_accessed should sort before newer
|
||||
assert_eq!(
|
||||
lru_order(&older, &now, &old, &now),
|
||||
Ordering::Less,
|
||||
"older last_accessed should come first"
|
||||
);
|
||||
assert_eq!(
|
||||
lru_order(&old, &now, &older, &now),
|
||||
Ordering::Greater,
|
||||
"newer last_accessed should come second"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lru_tiebreak_uses_fetched_at() {
|
||||
let now = Utc::now();
|
||||
let same_access = now - Duration::days(10);
|
||||
let older_fetch = now - Duration::days(60);
|
||||
let newer_fetch = now - Duration::days(30);
|
||||
|
||||
assert_eq!(
|
||||
lru_order(&same_access, &older_fetch, &same_access, &newer_fetch),
|
||||
Ordering::Less,
|
||||
"older fetched_at should break tie"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_human_bytes_formatting() {
|
||||
assert_eq!(human_bytes(500), "500 B");
|
||||
assert_eq!(human_bytes(1024), "1.0 KB");
|
||||
assert_eq!(human_bytes(1536), "1.5 KB");
|
||||
assert_eq!(human_bytes(1_048_576), "1.0 MB");
|
||||
assert_eq!(human_bytes(2_621_440), "2.5 MB");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cache_output_serialization() {
|
||||
let output = CacheOutput::Stats(StatsOutput {
|
||||
aliases: vec![AliasStats {
|
||||
name: "test".to_string(),
|
||||
size_bytes: 1024,
|
||||
endpoint_count: 5,
|
||||
fetched_at: Utc::now(),
|
||||
last_accessed: Utc::now(),
|
||||
}],
|
||||
total_bytes: 1024,
|
||||
});
|
||||
let json = serde_json::to_string(&output).unwrap();
|
||||
assert!(json.contains("\"kind\":\"stats\""));
|
||||
assert!(json.contains("\"total_bytes\":1024"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_prune_output_serialization() {
|
||||
let output = CacheOutput::Prune(PruneOutput {
|
||||
pruned: vec!["old-api".to_string()],
|
||||
dry_run: true,
|
||||
});
|
||||
let json = serde_json::to_string(&output).unwrap();
|
||||
assert!(json.contains("\"kind\":\"prune\""));
|
||||
assert!(json.contains("\"dry_run\":true"));
|
||||
assert!(json.contains("old-api"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_evict_output_serialization() {
|
||||
let output = CacheOutput::Evict(EvictOutput {
|
||||
evicted: vec!["stale-api".to_string()],
|
||||
target_bytes: 10_485_760,
|
||||
actual_bytes: 5_000_000,
|
||||
dry_run: false,
|
||||
});
|
||||
let json = serde_json::to_string(&output).unwrap();
|
||||
assert!(json.contains("\"kind\":\"evict\""));
|
||||
assert!(json.contains("\"target_bytes\":10485760"));
|
||||
assert!(json.contains("stale-api"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,21 @@
|
||||
use clap::Args as ClapArgs;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use std::time::Instant;
|
||||
|
||||
use clap::Args as ClapArgs;
|
||||
use colored::Colorize;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::core::cache::{CacheManager, CacheMetadata, compute_hash};
|
||||
use crate::core::config::{Config, cache_dir, config_path};
|
||||
use crate::core::indexer::{build_index, resolve_pointer};
|
||||
use crate::core::spec::SpecIndex;
|
||||
use crate::errors::SwaggerCliError;
|
||||
use crate::output::robot;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// CLI arguments
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Check cache health and diagnose issues
|
||||
#[derive(Debug, ClapArgs)]
|
||||
@@ -8,8 +23,747 @@ pub struct Args {
|
||||
/// Attempt to fix issues automatically
|
||||
#[arg(long)]
|
||||
pub fix: bool,
|
||||
|
||||
/// Check a specific alias only
|
||||
#[arg(long)]
|
||||
pub alias: Option<String>,
|
||||
}
|
||||
|
||||
pub async fn execute(_args: &Args, _robot: bool) -> Result<(), SwaggerCliError> {
|
||||
Err(SwaggerCliError::Usage("doctor not yet implemented".into()))
|
||||
// ---------------------------------------------------------------------------
|
||||
// Health status
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
enum HealthStatus {
|
||||
Healthy,
|
||||
Warning,
|
||||
Degraded,
|
||||
Unhealthy,
|
||||
}
|
||||
|
||||
impl HealthStatus {
|
||||
fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::Healthy => "healthy",
|
||||
Self::Warning => "warning",
|
||||
Self::Degraded => "degraded",
|
||||
Self::Unhealthy => "unhealthy",
|
||||
}
|
||||
}
|
||||
|
||||
fn colored_str(self) -> String {
|
||||
match self {
|
||||
Self::Healthy => "healthy".green().to_string(),
|
||||
Self::Warning => "warning".yellow().to_string(),
|
||||
Self::Degraded => "degraded".red().to_string(),
|
||||
Self::Unhealthy => "unhealthy".red().bold().to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Robot output structs
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct DoctorOutput {
|
||||
health: String,
|
||||
aliases: Vec<AliasReport>,
|
||||
warnings: Vec<String>,
|
||||
total_disk_bytes: u64,
|
||||
fixable_count: usize,
|
||||
unfixable_count: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct AliasReport {
|
||||
name: String,
|
||||
status: String,
|
||||
issues: Vec<String>,
|
||||
disk_bytes: u64,
|
||||
endpoint_count: usize,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Internal check result
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
struct AliasCheckResult {
|
||||
name: String,
|
||||
status: HealthStatus,
|
||||
issues: Vec<String>,
|
||||
disk_bytes: u64,
|
||||
endpoint_count: usize,
|
||||
fixable: bool,
|
||||
unfixable: bool,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Compute total size of a directory (non-recursive into symlinks).
|
||||
fn dir_size(path: &PathBuf) -> u64 {
|
||||
let Ok(entries) = fs::read_dir(path) else {
|
||||
return 0;
|
||||
};
|
||||
let mut total: u64 = 0;
|
||||
for entry in entries.flatten() {
|
||||
if let Ok(md) = entry.metadata()
|
||||
&& md.is_file()
|
||||
{
|
||||
total += md.len();
|
||||
}
|
||||
}
|
||||
total
|
||||
}
|
||||
|
||||
/// Discover all alias directory names in the cache dir, including those
|
||||
/// without a valid meta.json (which list_aliases would skip).
|
||||
fn discover_alias_dirs(cache_root: &PathBuf) -> Vec<String> {
|
||||
let Ok(entries) = fs::read_dir(cache_root) else {
|
||||
return Vec::new();
|
||||
};
|
||||
let mut names = Vec::new();
|
||||
for entry in entries.flatten() {
|
||||
let path = entry.path();
|
||||
if path.is_dir()
|
||||
&& let Some(name) = path.file_name().and_then(|n| n.to_str())
|
||||
{
|
||||
// Skip hidden directories (e.g. .DS_Store dirs)
|
||||
if !name.starts_with('.') {
|
||||
names.push(name.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
names.sort();
|
||||
names
|
||||
}
|
||||
|
||||
/// Check a single alias for health issues.
|
||||
fn check_alias(cm: &CacheManager, alias: &str, stale_threshold_days: u32) -> AliasCheckResult {
|
||||
let mut issues: Vec<String> = Vec::new();
|
||||
let mut status = HealthStatus::Healthy;
|
||||
let mut endpoint_count: usize = 0;
|
||||
let mut fixable = false;
|
||||
let mut unfixable = false;
|
||||
|
||||
let disk_bytes = dir_size(&cm.alias_dir(alias));
|
||||
|
||||
// Step 1: Try loading index (meta + index integrity)
|
||||
let index_result = cm.load_index(alias);
|
||||
let (index, meta): (Option<SpecIndex>, Option<CacheMetadata>) = match index_result {
|
||||
Ok((idx, m)) => (Some(idx), Some(m)),
|
||||
Err(SwaggerCliError::AliasNotFound(_)) => {
|
||||
issues.push("meta.json missing".to_string());
|
||||
status = HealthStatus::Degraded;
|
||||
// Check if raw.json exists -- if so this might be fixable
|
||||
if cm.alias_dir(alias).join("raw.json").exists() {
|
||||
fixable = true;
|
||||
} else {
|
||||
unfixable = true;
|
||||
}
|
||||
(None, None)
|
||||
}
|
||||
Err(SwaggerCliError::CacheIntegrity(msg)) => {
|
||||
issues.push(format!("index integrity: {msg}"));
|
||||
status = HealthStatus::Degraded;
|
||||
fixable = true; // Index can potentially be rebuilt from raw
|
||||
(None, None)
|
||||
}
|
||||
Err(e) => {
|
||||
issues.push(format!("load error: {e}"));
|
||||
status = HealthStatus::Unhealthy;
|
||||
unfixable = true;
|
||||
(None, None)
|
||||
}
|
||||
};
|
||||
|
||||
// Step 2: Try loading raw (validates raw_hash)
|
||||
let raw_value: Option<serde_json::Value> = if let Some(ref m) = meta {
|
||||
match cm.load_raw(alias, m) {
|
||||
Ok(v) => Some(v),
|
||||
Err(SwaggerCliError::CacheIntegrity(msg)) => {
|
||||
issues.push(format!("raw integrity: {msg}"));
|
||||
status = status.max(HealthStatus::Degraded);
|
||||
unfixable = true;
|
||||
None
|
||||
}
|
||||
Err(e) => {
|
||||
issues.push(format!("raw load error: {e}"));
|
||||
status = status.max(HealthStatus::Unhealthy);
|
||||
unfixable = true;
|
||||
None
|
||||
}
|
||||
}
|
||||
} else if cm.alias_dir(alias).join("raw.json").exists() {
|
||||
// Meta is missing but raw.json exists -- try to parse it
|
||||
let raw_path = cm.alias_dir(alias).join("raw.json");
|
||||
match fs::read(&raw_path) {
|
||||
Ok(bytes) => match serde_json::from_slice::<serde_json::Value>(&bytes) {
|
||||
Ok(v) => Some(v),
|
||||
Err(e) => {
|
||||
issues.push(format!("raw.json unparseable: {e}"));
|
||||
unfixable = true;
|
||||
None
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
issues.push(format!("raw.json unreadable: {e}"));
|
||||
unfixable = true;
|
||||
None
|
||||
}
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Step 3: Validate operation pointers
|
||||
if let (Some(idx), Some(raw)) = (&index, &raw_value) {
|
||||
endpoint_count = idx.endpoints.len();
|
||||
let mut broken_ptrs = 0usize;
|
||||
for ep in &idx.endpoints {
|
||||
if !resolve_pointer(raw, &ep.operation_ptr) {
|
||||
broken_ptrs += 1;
|
||||
}
|
||||
}
|
||||
if broken_ptrs > 0 {
|
||||
issues.push(format!(
|
||||
"{broken_ptrs} endpoint pointer(s) do not resolve in raw"
|
||||
));
|
||||
status = status.max(HealthStatus::Degraded);
|
||||
fixable = true;
|
||||
}
|
||||
} else if let Some(ref idx) = index {
|
||||
endpoint_count = idx.endpoints.len();
|
||||
}
|
||||
|
||||
// Step 4: Stale check
|
||||
if let Some(ref m) = meta
|
||||
&& m.is_stale(stale_threshold_days)
|
||||
{
|
||||
issues.push(format!(
|
||||
"stale: fetched {} (threshold: {stale_threshold_days} days)",
|
||||
m.fetched_at.format("%Y-%m-%d")
|
||||
));
|
||||
status = status.max(HealthStatus::Warning);
|
||||
}
|
||||
|
||||
AliasCheckResult {
|
||||
name: alias.to_string(),
|
||||
status,
|
||||
issues,
|
||||
disk_bytes,
|
||||
endpoint_count,
|
||||
fixable,
|
||||
unfixable,
|
||||
}
|
||||
}
|
||||
|
||||
/// Attempt to fix an alias by rebuilding the index from raw.json.
|
||||
fn try_fix_alias(cm: &CacheManager, alias: &str) -> Result<Vec<String>, Vec<String>> {
|
||||
let mut fixed: Vec<String> = Vec::new();
|
||||
let mut unfixed: Vec<String> = Vec::new();
|
||||
|
||||
let alias_dir = cm.alias_dir(alias);
|
||||
|
||||
// Read raw.json bytes
|
||||
let raw_json_path = alias_dir.join("raw.json");
|
||||
let raw_json_bytes = match fs::read(&raw_json_path) {
|
||||
Ok(b) => b,
|
||||
Err(e) => {
|
||||
unfixed.push(format!("cannot read raw.json: {e}"));
|
||||
return Err(unfixed);
|
||||
}
|
||||
};
|
||||
|
||||
let raw_value: serde_json::Value = match serde_json::from_slice(&raw_json_bytes) {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
unfixed.push(format!("raw.json unparseable: {e}"));
|
||||
return Err(unfixed);
|
||||
}
|
||||
};
|
||||
|
||||
// Read raw.source if present, otherwise use raw.json bytes as source
|
||||
let raw_source_path = alias_dir.join("raw.source");
|
||||
let raw_source_bytes = fs::read(&raw_source_path).unwrap_or_else(|_| raw_json_bytes.clone());
|
||||
|
||||
let content_hash = compute_hash(&raw_source_bytes);
|
||||
|
||||
// Try to load existing meta for generation/url info
|
||||
let meta_path = alias_dir.join("meta.json");
|
||||
let existing_meta: Option<CacheMetadata> = fs::read(&meta_path)
|
||||
.ok()
|
||||
.and_then(|b| serde_json::from_slice(&b).ok());
|
||||
|
||||
let generation = existing_meta.as_ref().map_or(1, |m| m.generation);
|
||||
let url = existing_meta.as_ref().and_then(|m| m.url.clone());
|
||||
let source_format = existing_meta
|
||||
.as_ref()
|
||||
.map_or("json".to_string(), |m| m.source_format.clone());
|
||||
|
||||
// Rebuild index
|
||||
let new_index = match build_index(&raw_value, &content_hash, generation) {
|
||||
Ok(idx) => idx,
|
||||
Err(e) => {
|
||||
unfixed.push(format!("index rebuild failed: {e}"));
|
||||
return Err(unfixed);
|
||||
}
|
||||
};
|
||||
|
||||
let spec_title = new_index.info.title.clone();
|
||||
let spec_version = new_index.info.version.clone();
|
||||
|
||||
// Write everything back through the public API
|
||||
match cm.write_cache(
|
||||
alias,
|
||||
&raw_source_bytes,
|
||||
&raw_json_bytes,
|
||||
&new_index,
|
||||
url,
|
||||
&spec_version,
|
||||
&spec_title,
|
||||
&source_format,
|
||||
existing_meta.as_ref().and_then(|m| m.etag.clone()),
|
||||
existing_meta.as_ref().and_then(|m| m.last_modified.clone()),
|
||||
Some(generation.saturating_sub(1)), // previous_generation so new = generation
|
||||
) {
|
||||
Ok(_) => {
|
||||
fixed.push("rebuilt index and meta from raw data".to_string());
|
||||
}
|
||||
Err(e) => {
|
||||
unfixed.push(format!("cache write failed: {e}"));
|
||||
return Err(unfixed);
|
||||
}
|
||||
}
|
||||
|
||||
if unfixed.is_empty() {
|
||||
Ok(fixed)
|
||||
} else {
|
||||
Err(unfixed)
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Execute
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
pub async fn execute(args: &Args, robot_mode: bool) -> Result<(), SwaggerCliError> {
|
||||
let start = Instant::now();
|
||||
|
||||
// Load config
|
||||
let cfg_path = config_path(None);
|
||||
let config = Config::load(&cfg_path)?;
|
||||
|
||||
// Check config dir exists
|
||||
let mut warnings: Vec<String> = Vec::new();
|
||||
if let Some(parent) = cfg_path.parent()
|
||||
&& !parent.exists()
|
||||
{
|
||||
warnings.push(format!(
|
||||
"config directory does not exist: {}",
|
||||
parent.display()
|
||||
));
|
||||
}
|
||||
|
||||
// Check cache dir
|
||||
let cache = cache_dir();
|
||||
if !cache.exists() {
|
||||
warnings.push(format!(
|
||||
"cache directory does not exist: {}",
|
||||
cache.display()
|
||||
));
|
||||
// No aliases to check -- output empty result
|
||||
let output = DoctorOutput {
|
||||
health: HealthStatus::Warning.as_str().to_string(),
|
||||
aliases: Vec::new(),
|
||||
warnings: warnings.clone(),
|
||||
total_disk_bytes: 0,
|
||||
fixable_count: 0,
|
||||
unfixable_count: 0,
|
||||
};
|
||||
|
||||
if robot_mode {
|
||||
robot::robot_success(output, "doctor", start.elapsed());
|
||||
} else {
|
||||
println!("{} no cache directory found", "warning:".yellow().bold());
|
||||
for w in &warnings {
|
||||
println!(" {w}");
|
||||
}
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let cm = CacheManager::new(cache.clone());
|
||||
|
||||
// Discover aliases (including broken ones without meta.json)
|
||||
let alias_names: Vec<String> = if let Some(ref specific) = args.alias {
|
||||
// Verify the alias dir exists
|
||||
if !cm.alias_dir(specific).exists() {
|
||||
return Err(SwaggerCliError::AliasNotFound(specific.clone()));
|
||||
}
|
||||
vec![specific.clone()]
|
||||
} else {
|
||||
discover_alias_dirs(&cache)
|
||||
};
|
||||
|
||||
// Check each alias
|
||||
let mut results: Vec<AliasCheckResult> = Vec::new();
|
||||
for alias in &alias_names {
|
||||
results.push(check_alias(&cm, alias, config.stale_threshold_days));
|
||||
}
|
||||
|
||||
// Apply fixes if requested
|
||||
if args.fix {
|
||||
for result in &mut results {
|
||||
if result.fixable && result.status >= HealthStatus::Degraded {
|
||||
match try_fix_alias(&cm, &result.name) {
|
||||
Ok(fixes) => {
|
||||
for fix in &fixes {
|
||||
result.issues.push(format!("FIXED: {fix}"));
|
||||
}
|
||||
// Re-check after fix
|
||||
let rechecked = check_alias(&cm, &result.name, config.stale_threshold_days);
|
||||
result.status = rechecked.status;
|
||||
result.endpoint_count = rechecked.endpoint_count;
|
||||
result.fixable = rechecked.fixable;
|
||||
result.unfixable = rechecked.unfixable;
|
||||
}
|
||||
Err(errs) => {
|
||||
for err in &errs {
|
||||
result.issues.push(format!("FIX FAILED: {err}"));
|
||||
}
|
||||
result.unfixable = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Compute aggregates
|
||||
let total_disk_bytes: u64 = results.iter().map(|r| r.disk_bytes).sum();
|
||||
let fixable_count = results.iter().filter(|r| r.fixable).count();
|
||||
let unfixable_count = results.iter().filter(|r| r.unfixable).count();
|
||||
|
||||
let overall_status = results
|
||||
.iter()
|
||||
.map(|r| r.status)
|
||||
.max()
|
||||
.unwrap_or(HealthStatus::Healthy);
|
||||
|
||||
// Build output
|
||||
let alias_reports: Vec<AliasReport> = results
|
||||
.iter()
|
||||
.map(|r| AliasReport {
|
||||
name: r.name.clone(),
|
||||
status: r.status.as_str().to_string(),
|
||||
issues: r.issues.clone(),
|
||||
disk_bytes: r.disk_bytes,
|
||||
endpoint_count: r.endpoint_count,
|
||||
})
|
||||
.collect();
|
||||
|
||||
let output = DoctorOutput {
|
||||
health: overall_status.as_str().to_string(),
|
||||
aliases: alias_reports,
|
||||
warnings: warnings.clone(),
|
||||
total_disk_bytes,
|
||||
fixable_count,
|
||||
unfixable_count,
|
||||
};
|
||||
|
||||
if robot_mode {
|
||||
robot::robot_success(output, "doctor", start.elapsed());
|
||||
} else {
|
||||
// Human output
|
||||
println!(
|
||||
"{} {}",
|
||||
"Cache health:".bold(),
|
||||
overall_status.colored_str()
|
||||
);
|
||||
println!();
|
||||
|
||||
if results.is_empty() {
|
||||
println!(" No cached specs found.");
|
||||
}
|
||||
|
||||
for r in &results {
|
||||
let status_str = r.status.colored_str();
|
||||
let size_kb = r.disk_bytes as f64 / 1024.0;
|
||||
println!(
|
||||
" {} [{}] {:.1} KB, {} endpoints",
|
||||
r.name.bold(),
|
||||
status_str,
|
||||
size_kb,
|
||||
r.endpoint_count,
|
||||
);
|
||||
for issue in &r.issues {
|
||||
println!(" - {issue}");
|
||||
}
|
||||
}
|
||||
|
||||
if !warnings.is_empty() {
|
||||
println!();
|
||||
for w in &warnings {
|
||||
println!("{} {w}", "warning:".yellow().bold());
|
||||
}
|
||||
}
|
||||
|
||||
println!();
|
||||
println!(
|
||||
"Total: {} alias(es), {:.1} KB on disk",
|
||||
results.len(),
|
||||
total_disk_bytes as f64 / 1024.0,
|
||||
);
|
||||
if fixable_count > 0 {
|
||||
println!(
|
||||
" {} fixable issue(s) -- run with {} to repair",
|
||||
fixable_count,
|
||||
"--fix".bold(),
|
||||
);
|
||||
}
|
||||
if unfixable_count > 0 {
|
||||
println!(
|
||||
" {} unfixable issue(s) -- re-fetch required",
|
||||
unfixable_count,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::core::cache::CacheManager;
|
||||
use crate::core::indexer::build_index;
|
||||
use tempfile::TempDir;
|
||||
|
||||
/// Create a minimal valid OpenAPI spec JSON value.
|
||||
fn minimal_spec() -> serde_json::Value {
|
||||
serde_json::json!({
|
||||
"openapi": "3.0.3",
|
||||
"info": { "title": "Test API", "version": "1.0.0" },
|
||||
"paths": {
|
||||
"/pets": {
|
||||
"get": {
|
||||
"summary": "List pets",
|
||||
"responses": { "200": { "description": "OK" } }
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Set up a healthy alias in a temp cache dir.
|
||||
fn setup_healthy_cache(tmp: &TempDir) -> (CacheManager, String) {
|
||||
let cache_path = tmp.path().join("cache");
|
||||
fs::create_dir_all(&cache_path).unwrap();
|
||||
|
||||
let cm = CacheManager::new(cache_path);
|
||||
let spec = minimal_spec();
|
||||
let raw_bytes = serde_json::to_vec_pretty(&spec).unwrap();
|
||||
let content_hash = compute_hash(&raw_bytes);
|
||||
let index = build_index(&spec, &content_hash, 1).unwrap();
|
||||
|
||||
cm.write_cache(
|
||||
"testapi",
|
||||
&raw_bytes,
|
||||
&raw_bytes,
|
||||
&index,
|
||||
Some("https://example.com/api.json".to_string()),
|
||||
"1.0.0",
|
||||
"Test API",
|
||||
"json",
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
(cm, "testapi".to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_healthy_cache_reports_healthy() {
|
||||
let tmp = TempDir::new().unwrap();
|
||||
let (cm, alias) = setup_healthy_cache(&tmp);
|
||||
|
||||
let result = check_alias(&cm, &alias, 30);
|
||||
|
||||
assert_eq!(result.status, HealthStatus::Healthy);
|
||||
assert!(
|
||||
result.issues.is_empty(),
|
||||
"expected no issues, got: {:?}",
|
||||
result.issues
|
||||
);
|
||||
assert_eq!(result.endpoint_count, 1);
|
||||
assert!(result.disk_bytes > 0);
|
||||
assert!(!result.fixable);
|
||||
assert!(!result.unfixable);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_missing_meta_detected() {
|
||||
let tmp = TempDir::new().unwrap();
|
||||
let (cm, alias) = setup_healthy_cache(&tmp);
|
||||
|
||||
// Delete meta.json to simulate corruption
|
||||
let meta_path = cm.alias_dir(&alias).join("meta.json");
|
||||
fs::remove_file(&meta_path).unwrap();
|
||||
|
||||
let result = check_alias(&cm, &alias, 30);
|
||||
|
||||
assert!(
|
||||
result.status >= HealthStatus::Degraded,
|
||||
"expected Degraded or worse, got: {:?}",
|
||||
result.status,
|
||||
);
|
||||
assert!(
|
||||
result
|
||||
.issues
|
||||
.iter()
|
||||
.any(|i| i.contains("meta.json missing")),
|
||||
"expected 'meta.json missing' issue, got: {:?}",
|
||||
result.issues,
|
||||
);
|
||||
// raw.json still exists, so it should be fixable
|
||||
assert!(result.fixable);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_corrupt_index_detected() {
|
||||
let tmp = TempDir::new().unwrap();
|
||||
let (cm, alias) = setup_healthy_cache(&tmp);
|
||||
|
||||
// Corrupt the index.json
|
||||
let index_path = cm.alias_dir(&alias).join("index.json");
|
||||
fs::write(&index_path, b"not valid json").unwrap();
|
||||
|
||||
let result = check_alias(&cm, &alias, 30);
|
||||
|
||||
assert!(
|
||||
result.status >= HealthStatus::Degraded,
|
||||
"expected Degraded or worse, got: {:?}",
|
||||
result.status,
|
||||
);
|
||||
assert!(
|
||||
result.issues.iter().any(|i| i.contains("index integrity")),
|
||||
"expected index integrity issue, got: {:?}",
|
||||
result.issues,
|
||||
);
|
||||
assert!(result.fixable);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_missing_raw_is_unfixable() {
|
||||
let tmp = TempDir::new().unwrap();
|
||||
let (cm, alias) = setup_healthy_cache(&tmp);
|
||||
|
||||
// Delete both meta.json and raw.json
|
||||
let meta_path = cm.alias_dir(&alias).join("meta.json");
|
||||
let raw_path = cm.alias_dir(&alias).join("raw.json");
|
||||
fs::remove_file(&meta_path).unwrap();
|
||||
fs::remove_file(&raw_path).unwrap();
|
||||
|
||||
let result = check_alias(&cm, &alias, 30);
|
||||
|
||||
assert!(
|
||||
result.status >= HealthStatus::Degraded,
|
||||
"expected Degraded or worse, got: {:?}",
|
||||
result.status,
|
||||
);
|
||||
assert!(result.unfixable);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_stale_cache_warns() {
|
||||
let tmp = TempDir::new().unwrap();
|
||||
let (cm, alias) = setup_healthy_cache(&tmp);
|
||||
|
||||
// Manually modify meta to have an old fetched_at
|
||||
let meta_path = cm.alias_dir(&alias).join("meta.json");
|
||||
let bytes = fs::read(&meta_path).unwrap();
|
||||
let mut meta: CacheMetadata = serde_json::from_slice(&bytes).unwrap();
|
||||
meta.fetched_at = chrono::Utc::now() - chrono::Duration::days(60);
|
||||
let updated = serde_json::to_vec_pretty(&meta).unwrap();
|
||||
fs::write(&meta_path, &updated).unwrap();
|
||||
|
||||
let result = check_alias(&cm, &alias, 30);
|
||||
|
||||
assert!(
|
||||
result.status >= HealthStatus::Warning,
|
||||
"expected Warning or worse for stale cache, got: {:?}",
|
||||
result.status,
|
||||
);
|
||||
assert!(
|
||||
result.issues.iter().any(|i| i.contains("stale")),
|
||||
"expected 'stale' issue, got: {:?}",
|
||||
result.issues,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fix_rebuilds_index() {
|
||||
let tmp = TempDir::new().unwrap();
|
||||
let (cm, alias) = setup_healthy_cache(&tmp);
|
||||
|
||||
// Corrupt the index.json
|
||||
let index_path = cm.alias_dir(&alias).join("index.json");
|
||||
fs::write(&index_path, b"corrupted data").unwrap();
|
||||
|
||||
// Verify it's broken
|
||||
let before = check_alias(&cm, &alias, 30);
|
||||
assert!(before.status >= HealthStatus::Degraded);
|
||||
|
||||
// Fix it
|
||||
let fix_result = try_fix_alias(&cm, &alias);
|
||||
assert!(
|
||||
fix_result.is_ok(),
|
||||
"fix should succeed, got: {fix_result:?}"
|
||||
);
|
||||
|
||||
// Verify it's healthy now
|
||||
let after = check_alias(&cm, &alias, 30);
|
||||
assert_eq!(
|
||||
after.status,
|
||||
HealthStatus::Healthy,
|
||||
"expected healthy after fix, got: {:?}, issues: {:?}",
|
||||
after.status,
|
||||
after.issues,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_discover_alias_dirs() {
|
||||
let tmp = TempDir::new().unwrap();
|
||||
let cache_path = tmp.path().join("cache");
|
||||
fs::create_dir_all(cache_path.join("alpha")).unwrap();
|
||||
fs::create_dir_all(cache_path.join("beta")).unwrap();
|
||||
fs::create_dir_all(cache_path.join(".hidden")).unwrap();
|
||||
// Create a file (should be ignored)
|
||||
fs::write(cache_path.join("not-a-dir"), b"x").unwrap();
|
||||
|
||||
let dirs = discover_alias_dirs(&cache_path);
|
||||
assert_eq!(dirs, vec!["alpha", "beta"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_dir_size_computes_bytes() {
|
||||
let tmp = TempDir::new().unwrap();
|
||||
let dir = tmp.path().join("test");
|
||||
fs::create_dir_all(&dir).unwrap();
|
||||
fs::write(dir.join("a.txt"), b"hello").unwrap(); // 5 bytes
|
||||
fs::write(dir.join("b.txt"), b"world!").unwrap(); // 6 bytes
|
||||
|
||||
let size = dir_size(&dir);
|
||||
assert_eq!(size, 11);
|
||||
}
|
||||
}
|
||||
|
||||
853
src/cli/fetch.rs
853
src/cli/fetch.rs
@@ -1,26 +1,865 @@
|
||||
use clap::Args as ClapArgs;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use clap::Args as ClapArgs;
|
||||
use serde::Serialize;
|
||||
use tokio::io::AsyncReadExt;
|
||||
|
||||
use crate::core::cache::{CacheManager, compute_hash, validate_alias};
|
||||
use crate::core::config::{AuthType, Config, CredentialSource, cache_dir, config_path};
|
||||
use crate::core::http::AsyncHttpClient;
|
||||
use crate::core::indexer::{Format, build_index, detect_format, normalize_to_json};
|
||||
use crate::errors::SwaggerCliError;
|
||||
use crate::output::robot;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// CLI arguments
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Fetch and cache an OpenAPI spec
|
||||
#[derive(Debug, ClapArgs)]
|
||||
pub struct Args {
|
||||
/// URL of the OpenAPI spec
|
||||
/// URL, file path, or "-" for stdin
|
||||
pub url: String,
|
||||
|
||||
/// Alias for the cached spec
|
||||
#[arg(long)]
|
||||
pub alias: Option<String>,
|
||||
pub alias: String,
|
||||
|
||||
/// Additional HTTP header (repeatable, format: "Name: Value")
|
||||
#[arg(long = "header", short = 'H')]
|
||||
pub header: Vec<String>,
|
||||
|
||||
/// Bearer token for Authorization header
|
||||
#[arg(long)]
|
||||
pub bearer: Option<String>,
|
||||
|
||||
/// Auth profile name from config
|
||||
#[arg(long)]
|
||||
pub auth_profile: Option<String>,
|
||||
|
||||
/// Overwrite existing alias
|
||||
#[arg(long)]
|
||||
pub force: bool,
|
||||
|
||||
/// Auth profile name from config
|
||||
/// HTTP request timeout in milliseconds
|
||||
#[arg(long, default_value = "10000")]
|
||||
pub timeout_ms: u64,
|
||||
|
||||
/// Maximum response size in bytes
|
||||
#[arg(long, default_value = "26214400")]
|
||||
pub max_bytes: u64,
|
||||
|
||||
/// Number of retries on transient errors
|
||||
#[arg(long, default_value = "2")]
|
||||
pub retries: u32,
|
||||
|
||||
/// Allow private/internal host (repeatable)
|
||||
#[arg(long = "allow-private-host")]
|
||||
pub allow_private_host: Vec<String>,
|
||||
|
||||
/// Allow plain HTTP (insecure)
|
||||
#[arg(long)]
|
||||
pub auth: Option<String>,
|
||||
pub allow_insecure_http: bool,
|
||||
}
|
||||
|
||||
pub async fn execute(_args: &Args, _robot: bool) -> Result<(), SwaggerCliError> {
|
||||
Err(SwaggerCliError::Usage("fetch not yet implemented".into()))
|
||||
// ---------------------------------------------------------------------------
|
||||
// Robot output data struct
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct FetchOutput {
|
||||
alias: String,
|
||||
url: String,
|
||||
title: String,
|
||||
version: String,
|
||||
endpoint_count: usize,
|
||||
schema_count: usize,
|
||||
cached_at: DateTime<Utc>,
|
||||
source_format: String,
|
||||
content_hash: String,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Source classification
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
enum SourceKind {
|
||||
Stdin,
|
||||
LocalFile(String),
|
||||
Url(String),
|
||||
}
|
||||
|
||||
fn classify_source(url: &str) -> SourceKind {
|
||||
if url == "-" {
|
||||
return SourceKind::Stdin;
|
||||
}
|
||||
|
||||
// Strip file:// prefix for local file access
|
||||
if let Some(path) = url.strip_prefix("file://") {
|
||||
return SourceKind::LocalFile(path.to_string());
|
||||
}
|
||||
|
||||
// If it looks like a URL (has scheme), treat as URL
|
||||
if url.contains("://") {
|
||||
return SourceKind::Url(url.to_string());
|
||||
}
|
||||
|
||||
// If the path exists on disk, treat as local file
|
||||
if Path::new(url).exists() {
|
||||
return SourceKind::LocalFile(url.to_string());
|
||||
}
|
||||
|
||||
// Default: assume it's a URL (will fail with a helpful error in the HTTP client)
|
||||
SourceKind::Url(url.to_string())
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Auth header resolution
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Resolve a credential source to its string value.
|
||||
fn resolve_credential(source: &CredentialSource) -> Result<String, SwaggerCliError> {
|
||||
match source {
|
||||
CredentialSource::Literal { value } => Ok(value.clone()),
|
||||
CredentialSource::EnvVar { name } => std::env::var(name).map_err(|_| {
|
||||
SwaggerCliError::Auth(format!(
|
||||
"environment variable '{name}' not set (required by auth profile)"
|
||||
))
|
||||
}),
|
||||
CredentialSource::Keyring { service, account } => Err(SwaggerCliError::Auth(format!(
|
||||
"keyring credential lookup not yet implemented (service={service}, account={account})"
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
/// Build the list of auth headers from CLI flags and config auth profile.
|
||||
///
|
||||
/// Precedence: --bearer and --header flags override auth profile values.
|
||||
/// Auth header values are never logged or included in output.
|
||||
fn resolve_auth_headers(
|
||||
args: &Args,
|
||||
config: &Config,
|
||||
) -> Result<Vec<(String, String)>, SwaggerCliError> {
|
||||
let mut headers: Vec<(String, String)> = Vec::new();
|
||||
|
||||
// 1. Auth profile from config (lowest precedence)
|
||||
if let Some(profile_name) = &args.auth_profile {
|
||||
let profile = config.auth_profiles.get(profile_name).ok_or_else(|| {
|
||||
SwaggerCliError::Auth(format!("auth profile '{profile_name}' not found in config"))
|
||||
})?;
|
||||
|
||||
let credential = resolve_credential(&profile.credential)?;
|
||||
|
||||
match &profile.auth_type {
|
||||
AuthType::Bearer => {
|
||||
headers.push(("Authorization".to_string(), format!("Bearer {credential}")));
|
||||
}
|
||||
AuthType::ApiKey { header } => {
|
||||
headers.push((header.clone(), credential));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 2. --bearer flag (overrides profile Authorization header)
|
||||
if let Some(token) = &args.bearer {
|
||||
headers.retain(|(name, _)| !name.eq_ignore_ascii_case("authorization"));
|
||||
headers.push(("Authorization".to_string(), format!("Bearer {token}")));
|
||||
}
|
||||
|
||||
// 3. --header flags (highest precedence, override matching names)
|
||||
for raw in &args.header {
|
||||
let (name, value) = parse_header(raw)?;
|
||||
headers.retain(|(n, _)| !n.eq_ignore_ascii_case(&name));
|
||||
headers.push((name, value));
|
||||
}
|
||||
|
||||
Ok(headers)
|
||||
}
|
||||
|
||||
/// Parse a "Name: Value" header string.
|
||||
fn parse_header(raw: &str) -> Result<(String, String), SwaggerCliError> {
|
||||
let Some((name, value)) = raw.split_once(':') else {
|
||||
return Err(SwaggerCliError::Usage(format!(
|
||||
"invalid header format: '{raw}'. Expected 'Name: Value'"
|
||||
)));
|
||||
};
|
||||
let name = name.trim().to_string();
|
||||
let value = value.trim().to_string();
|
||||
if name.is_empty() {
|
||||
return Err(SwaggerCliError::Usage(
|
||||
"header name cannot be empty".to_string(),
|
||||
));
|
||||
}
|
||||
Ok((name, value))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Core fetch pipeline (testable without env var mutation)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Run the full fetch pipeline with an explicit cache path.
|
||||
///
|
||||
/// Separated from `execute()` so integration tests can provide a temp cache
|
||||
/// directory without mutating environment variables (which is unsafe in
|
||||
/// Rust edition 2024 with `#![forbid(unsafe_code)]`).
|
||||
async fn fetch_inner(
|
||||
args: &Args,
|
||||
cache_path: PathBuf,
|
||||
robot_mode: bool,
|
||||
) -> Result<(), SwaggerCliError> {
|
||||
let start = Instant::now();
|
||||
|
||||
// 1. Build cache manager and validate alias
|
||||
let cm = CacheManager::new(cache_path);
|
||||
validate_alias(&args.alias)?;
|
||||
|
||||
// 2. Check alias exists (unless --force)
|
||||
if cm.alias_exists(&args.alias) && !args.force {
|
||||
return Err(SwaggerCliError::AliasExists(args.alias.clone()));
|
||||
}
|
||||
|
||||
// 3. Load config and resolve auth headers
|
||||
let cfg = Config::load(&config_path(None))?;
|
||||
let auth_headers = resolve_auth_headers(args, &cfg)?;
|
||||
|
||||
// 4. Fetch raw bytes based on source kind
|
||||
let source = classify_source(&args.url);
|
||||
let (raw_bytes, content_type_hint, source_url, filename_hint): (
|
||||
Vec<u8>,
|
||||
Option<String>,
|
||||
Option<String>,
|
||||
Option<String>,
|
||||
) = match &source {
|
||||
SourceKind::Stdin => {
|
||||
let mut buf = Vec::new();
|
||||
tokio::io::stdin().read_to_end(&mut buf).await?;
|
||||
(buf, None, None, None)
|
||||
}
|
||||
SourceKind::LocalFile(path) => {
|
||||
let bytes = std::fs::read(path).map_err(|e| {
|
||||
SwaggerCliError::Io(std::io::Error::new(
|
||||
e.kind(),
|
||||
format!("failed to read file '{path}': {e}"),
|
||||
))
|
||||
})?;
|
||||
let filename = Path::new(path)
|
||||
.file_name()
|
||||
.map(|f| f.to_string_lossy().to_string());
|
||||
(bytes, None, Some(format!("file://{path}")), filename)
|
||||
}
|
||||
SourceKind::Url(url) => {
|
||||
let mut builder = AsyncHttpClient::builder()
|
||||
.overall_timeout(Duration::from_millis(args.timeout_ms))
|
||||
.max_bytes(args.max_bytes)
|
||||
.max_retries(args.retries)
|
||||
.allow_insecure_http(args.allow_insecure_http)
|
||||
.allowed_private_hosts(args.allow_private_host.clone());
|
||||
|
||||
for (name, value) in &auth_headers {
|
||||
builder = builder.auth_header(name.clone(), value.clone());
|
||||
}
|
||||
|
||||
let client = builder.build();
|
||||
let result = client.fetch_spec(url).await?;
|
||||
(
|
||||
result.bytes,
|
||||
result.content_type,
|
||||
Some(url.clone()),
|
||||
Some(url.clone()),
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
// 5. Detect format, normalize to JSON, parse, build index
|
||||
let format = detect_format(
|
||||
&raw_bytes,
|
||||
filename_hint.as_deref(),
|
||||
content_type_hint.as_deref(),
|
||||
);
|
||||
let format_str = match format {
|
||||
Format::Json => "json",
|
||||
Format::Yaml => "yaml",
|
||||
};
|
||||
|
||||
let json_bytes = normalize_to_json(&raw_bytes, format)?;
|
||||
let value: serde_json::Value = serde_json::from_slice(&json_bytes)?;
|
||||
|
||||
// Compute content hash for indexing
|
||||
let content_hash = compute_hash(&raw_bytes);
|
||||
|
||||
// Determine generation: if overwriting, increment previous generation
|
||||
let previous_generation = if args.force && cm.alias_exists(&args.alias) {
|
||||
cm.load_index(&args.alias)
|
||||
.ok()
|
||||
.map(|(_, meta)| meta.generation)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let index = build_index(&value, &content_hash, previous_generation.unwrap_or(0) + 1)?;
|
||||
|
||||
let spec_title = index.info.title.clone();
|
||||
let spec_version = index.info.version.clone();
|
||||
|
||||
// 6. Write to cache
|
||||
let meta = cm.write_cache(
|
||||
&args.alias,
|
||||
&raw_bytes,
|
||||
&json_bytes,
|
||||
&index,
|
||||
source_url.clone(),
|
||||
&spec_version,
|
||||
&spec_title,
|
||||
format_str,
|
||||
None, // etag
|
||||
None, // last_modified
|
||||
previous_generation,
|
||||
)?;
|
||||
|
||||
// 7. Output
|
||||
let duration = start.elapsed();
|
||||
|
||||
if robot_mode {
|
||||
let output = FetchOutput {
|
||||
alias: args.alias.clone(),
|
||||
url: source_url.unwrap_or_else(|| "stdin".to_string()),
|
||||
title: spec_title,
|
||||
version: spec_version,
|
||||
endpoint_count: meta.endpoint_count,
|
||||
schema_count: meta.schema_count,
|
||||
cached_at: meta.fetched_at,
|
||||
source_format: format_str.to_string(),
|
||||
content_hash: meta.content_hash,
|
||||
};
|
||||
robot::robot_success(output, "fetch", duration);
|
||||
} else {
|
||||
println!("Fetched '{}' as alias '{}'", args.url, args.alias);
|
||||
println!(
|
||||
" {} v{} -- {} endpoints, {} schemas ({})",
|
||||
meta.spec_title, meta.spec_version, meta.endpoint_count, meta.schema_count, format_str,
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Public entry point
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
pub async fn execute(args: &Args, robot_mode: bool) -> Result<(), SwaggerCliError> {
|
||||
let cache = cache_dir();
|
||||
fetch_inner(args, cache, robot_mode).await
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
// -- Source classification -----------------------------------------------
|
||||
|
||||
#[test]
|
||||
fn test_classify_source_stdin() {
|
||||
assert_eq!(classify_source("-"), SourceKind::Stdin);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_classify_source_file_prefix() {
|
||||
assert_eq!(
|
||||
classify_source("file:///tmp/spec.json"),
|
||||
SourceKind::LocalFile("/tmp/spec.json".to_string()),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_classify_source_url() {
|
||||
assert_eq!(
|
||||
classify_source("https://example.com/api.json"),
|
||||
SourceKind::Url("https://example.com/api.json".to_string()),
|
||||
);
|
||||
assert_eq!(
|
||||
classify_source("http://localhost:8080/spec.yaml"),
|
||||
SourceKind::Url("http://localhost:8080/spec.yaml".to_string()),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_classify_source_ambiguous_defaults_to_url() {
|
||||
let result = classify_source("not-a-real-file-on-disk-xyz.json");
|
||||
assert_eq!(
|
||||
result,
|
||||
SourceKind::Url("not-a-real-file-on-disk-xyz.json".to_string()),
|
||||
);
|
||||
}
|
||||
|
||||
// -- Header parsing ------------------------------------------------------
|
||||
|
||||
#[test]
|
||||
fn test_parse_header_valid() {
|
||||
let (name, value) = parse_header("X-Custom: my-value").unwrap();
|
||||
assert_eq!(name, "X-Custom");
|
||||
assert_eq!(value, "my-value");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_header_with_colons_in_value() {
|
||||
let (name, value) = parse_header("Authorization: Bearer abc:def:ghi").unwrap();
|
||||
assert_eq!(name, "Authorization");
|
||||
assert_eq!(value, "Bearer abc:def:ghi");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_header_invalid_no_colon() {
|
||||
let result = parse_header("NoColonHere");
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_header_empty_name() {
|
||||
let result = parse_header(": value");
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
// -- Auth header resolution ----------------------------------------------
|
||||
|
||||
fn make_base_args() -> Args {
|
||||
Args {
|
||||
url: "https://example.com".to_string(),
|
||||
alias: "test".to_string(),
|
||||
header: vec![],
|
||||
bearer: None,
|
||||
auth_profile: None,
|
||||
force: false,
|
||||
timeout_ms: 10000,
|
||||
max_bytes: 26214400,
|
||||
retries: 2,
|
||||
allow_private_host: vec![],
|
||||
allow_insecure_http: false,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_auth_headers_bearer_flag() {
|
||||
let mut args = make_base_args();
|
||||
args.bearer = Some("my-token".to_string());
|
||||
|
||||
let config = Config::default();
|
||||
let headers = resolve_auth_headers(&args, &config).unwrap();
|
||||
assert_eq!(headers.len(), 1);
|
||||
assert_eq!(headers[0].0, "Authorization");
|
||||
assert_eq!(headers[0].1, "Bearer my-token");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_auth_headers_custom_header() {
|
||||
let mut args = make_base_args();
|
||||
args.header = vec!["X-Api-Key: secret123".to_string()];
|
||||
|
||||
let config = Config::default();
|
||||
let headers = resolve_auth_headers(&args, &config).unwrap();
|
||||
assert_eq!(headers.len(), 1);
|
||||
assert_eq!(headers[0].0, "X-Api-Key");
|
||||
assert_eq!(headers[0].1, "secret123");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_auth_headers_bearer_overrides_profile() {
|
||||
use crate::core::config::AuthConfig;
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
let mut profiles = BTreeMap::new();
|
||||
profiles.insert(
|
||||
"myprofile".to_string(),
|
||||
AuthConfig {
|
||||
auth_type: AuthType::Bearer,
|
||||
credential: CredentialSource::Literal {
|
||||
value: "profile-token".to_string(),
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
let config = Config {
|
||||
auth_profiles: profiles,
|
||||
..Config::default()
|
||||
};
|
||||
|
||||
let mut args = make_base_args();
|
||||
args.bearer = Some("override-token".to_string());
|
||||
args.auth_profile = Some("myprofile".to_string());
|
||||
|
||||
let headers = resolve_auth_headers(&args, &config).unwrap();
|
||||
assert_eq!(headers.len(), 1);
|
||||
assert_eq!(headers[0].0, "Authorization");
|
||||
assert_eq!(headers[0].1, "Bearer override-token");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_auth_headers_missing_profile() {
|
||||
let mut args = make_base_args();
|
||||
args.auth_profile = Some("nonexistent".to_string());
|
||||
|
||||
let config = Config::default();
|
||||
let result = resolve_auth_headers(&args, &config);
|
||||
assert!(result.is_err());
|
||||
match result.unwrap_err() {
|
||||
SwaggerCliError::Auth(msg) => {
|
||||
assert!(msg.contains("nonexistent"));
|
||||
}
|
||||
other => panic!("expected Auth error, got: {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_auth_headers_literal_credential_profile() {
|
||||
use crate::core::config::AuthConfig;
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
let mut profiles = BTreeMap::new();
|
||||
profiles.insert(
|
||||
"literal-profile".to_string(),
|
||||
AuthConfig {
|
||||
auth_type: AuthType::ApiKey {
|
||||
header: "X-Api-Key".to_string(),
|
||||
},
|
||||
credential: CredentialSource::Literal {
|
||||
value: "my-api-key".to_string(),
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
let config = Config {
|
||||
auth_profiles: profiles,
|
||||
..Config::default()
|
||||
};
|
||||
|
||||
let mut args = make_base_args();
|
||||
args.auth_profile = Some("literal-profile".to_string());
|
||||
|
||||
let headers = resolve_auth_headers(&args, &config).unwrap();
|
||||
assert_eq!(headers.len(), 1);
|
||||
assert_eq!(headers[0].0, "X-Api-Key");
|
||||
assert_eq!(headers[0].1, "my-api-key");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_auth_headers_header_overrides_profile() {
|
||||
use crate::core::config::AuthConfig;
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
let mut profiles = BTreeMap::new();
|
||||
profiles.insert(
|
||||
"apikey-profile".to_string(),
|
||||
AuthConfig {
|
||||
auth_type: AuthType::ApiKey {
|
||||
header: "X-Api-Key".to_string(),
|
||||
},
|
||||
credential: CredentialSource::Literal {
|
||||
value: "profile-key".to_string(),
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
let config = Config {
|
||||
auth_profiles: profiles,
|
||||
..Config::default()
|
||||
};
|
||||
|
||||
let mut args = make_base_args();
|
||||
args.header = vec!["X-Api-Key: override-key".to_string()];
|
||||
args.auth_profile = Some("apikey-profile".to_string());
|
||||
|
||||
let headers = resolve_auth_headers(&args, &config).unwrap();
|
||||
assert_eq!(headers.len(), 1);
|
||||
assert_eq!(headers[0].0, "X-Api-Key");
|
||||
assert_eq!(headers[0].1, "override-key");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_auth_headers_no_auth() {
|
||||
let args = make_base_args();
|
||||
let config = Config::default();
|
||||
let headers = resolve_auth_headers(&args, &config).unwrap();
|
||||
assert!(headers.is_empty());
|
||||
}
|
||||
|
||||
// -- Alias validation (integration) --------------------------------------
|
||||
|
||||
#[test]
|
||||
fn test_alias_validation_integration() {
|
||||
assert!(validate_alias("petstore").is_ok());
|
||||
assert!(validate_alias("my-api").is_ok());
|
||||
assert!(validate_alias("v1.0").is_ok());
|
||||
assert!(validate_alias("API_2").is_ok());
|
||||
|
||||
assert!(validate_alias("").is_err());
|
||||
assert!(validate_alias("../etc").is_err());
|
||||
assert!(validate_alias(".hidden").is_err());
|
||||
assert!(validate_alias("CON").is_err());
|
||||
}
|
||||
|
||||
// -- Full pipeline integration tests (using fetch_inner) -----------------
|
||||
|
||||
fn make_test_args(url: &str, alias: &str) -> Args {
|
||||
Args {
|
||||
url: url.to_string(),
|
||||
alias: alias.to_string(),
|
||||
header: vec![],
|
||||
bearer: None,
|
||||
auth_profile: None,
|
||||
force: false,
|
||||
timeout_ms: 10000,
|
||||
max_bytes: 26214400,
|
||||
retries: 2,
|
||||
allow_private_host: vec![],
|
||||
allow_insecure_http: false,
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_execute_local_file() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let spec_path = tmp.path().join("petstore.json");
|
||||
|
||||
let spec = serde_json::json!({
|
||||
"openapi": "3.0.3",
|
||||
"info": { "title": "Test API", "version": "1.0.0" },
|
||||
"paths": {
|
||||
"/pets": {
|
||||
"get": {
|
||||
"summary": "List pets",
|
||||
"responses": { "200": { "description": "OK" } }
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
std::fs::write(&spec_path, serde_json::to_vec_pretty(&spec).unwrap()).unwrap();
|
||||
|
||||
let cache_path = tmp.path().join("cache");
|
||||
std::fs::create_dir_all(&cache_path).unwrap();
|
||||
|
||||
let args = make_test_args(spec_path.to_str().unwrap(), "localtest");
|
||||
|
||||
let result = fetch_inner(&args, cache_path.clone(), false).await;
|
||||
assert!(result.is_ok(), "execute failed: {result:?}");
|
||||
|
||||
let cm = CacheManager::new(cache_path);
|
||||
assert!(cm.alias_exists("localtest"));
|
||||
|
||||
let (index, meta) = cm.load_index("localtest").unwrap();
|
||||
assert_eq!(meta.spec_title, "Test API");
|
||||
assert_eq!(meta.spec_version, "1.0.0");
|
||||
assert_eq!(index.endpoints.len(), 1);
|
||||
assert_eq!(meta.source_format, "json");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_execute_yaml_local_file() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let spec_path = tmp.path().join("api.yaml");
|
||||
|
||||
let yaml = r#"
|
||||
openapi: "3.0.3"
|
||||
info:
|
||||
title: YAML API
|
||||
version: "2.0.0"
|
||||
paths:
|
||||
/items:
|
||||
get:
|
||||
summary: List items
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
post:
|
||||
summary: Create item
|
||||
responses:
|
||||
"201":
|
||||
description: Created
|
||||
"#;
|
||||
std::fs::write(&spec_path, yaml).unwrap();
|
||||
|
||||
let cache_path = tmp.path().join("cache");
|
||||
std::fs::create_dir_all(&cache_path).unwrap();
|
||||
|
||||
let args = make_test_args(spec_path.to_str().unwrap(), "yamltest");
|
||||
|
||||
let result = fetch_inner(&args, cache_path.clone(), false).await;
|
||||
assert!(result.is_ok(), "execute failed: {result:?}");
|
||||
|
||||
let cm = CacheManager::new(cache_path);
|
||||
let (index, meta) = cm.load_index("yamltest").unwrap();
|
||||
assert_eq!(meta.spec_title, "YAML API");
|
||||
assert_eq!(meta.spec_version, "2.0.0");
|
||||
assert_eq!(meta.source_format, "yaml");
|
||||
assert_eq!(index.endpoints.len(), 2);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_execute_alias_exists_without_force() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let spec_path = tmp.path().join("spec.json");
|
||||
|
||||
let spec = serde_json::json!({
|
||||
"openapi": "3.0.3",
|
||||
"info": { "title": "Test", "version": "1.0.0" },
|
||||
"paths": {}
|
||||
});
|
||||
std::fs::write(&spec_path, serde_json::to_vec(&spec).unwrap()).unwrap();
|
||||
|
||||
let cache_path = tmp.path().join("cache");
|
||||
std::fs::create_dir_all(&cache_path).unwrap();
|
||||
|
||||
let args = make_test_args(spec_path.to_str().unwrap(), "dupetest");
|
||||
|
||||
assert!(fetch_inner(&args, cache_path.clone(), false).await.is_ok());
|
||||
|
||||
let result = fetch_inner(&args, cache_path, false).await;
|
||||
assert!(result.is_err());
|
||||
match result.unwrap_err() {
|
||||
SwaggerCliError::AliasExists(alias) => assert_eq!(alias, "dupetest"),
|
||||
other => panic!("expected AliasExists, got: {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_execute_force_overwrites() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let spec_path = tmp.path().join("spec.json");
|
||||
|
||||
let spec = serde_json::json!({
|
||||
"openapi": "3.0.3",
|
||||
"info": { "title": "Version 1", "version": "1.0.0" },
|
||||
"paths": {}
|
||||
});
|
||||
std::fs::write(&spec_path, serde_json::to_vec(&spec).unwrap()).unwrap();
|
||||
|
||||
let cache_path = tmp.path().join("cache");
|
||||
std::fs::create_dir_all(&cache_path).unwrap();
|
||||
|
||||
let args_v1 = make_test_args(spec_path.to_str().unwrap(), "forcetest");
|
||||
assert!(
|
||||
fetch_inner(&args_v1, cache_path.clone(), false)
|
||||
.await
|
||||
.is_ok()
|
||||
);
|
||||
|
||||
let spec_v2 = serde_json::json!({
|
||||
"openapi": "3.0.3",
|
||||
"info": { "title": "Version 2", "version": "2.0.0" },
|
||||
"paths": {}
|
||||
});
|
||||
std::fs::write(&spec_path, serde_json::to_vec(&spec_v2).unwrap()).unwrap();
|
||||
|
||||
let mut args_v2 = make_test_args(spec_path.to_str().unwrap(), "forcetest");
|
||||
args_v2.force = true;
|
||||
assert!(
|
||||
fetch_inner(&args_v2, cache_path.clone(), false)
|
||||
.await
|
||||
.is_ok()
|
||||
);
|
||||
|
||||
let cm = CacheManager::new(cache_path);
|
||||
let (_, meta) = cm.load_index("forcetest").unwrap();
|
||||
assert_eq!(meta.spec_title, "Version 2");
|
||||
assert_eq!(meta.generation, 2);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_execute_robot_output() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let spec_path = tmp.path().join("robot.json");
|
||||
|
||||
let spec = serde_json::json!({
|
||||
"openapi": "3.0.3",
|
||||
"info": { "title": "Robot Test", "version": "0.1.0" },
|
||||
"paths": {}
|
||||
});
|
||||
std::fs::write(&spec_path, serde_json::to_vec(&spec).unwrap()).unwrap();
|
||||
|
||||
let cache_path = tmp.path().join("cache");
|
||||
std::fs::create_dir_all(&cache_path).unwrap();
|
||||
|
||||
let args = make_test_args(spec_path.to_str().unwrap(), "robottest");
|
||||
|
||||
let result = fetch_inner(&args, cache_path.clone(), true).await;
|
||||
assert!(result.is_ok(), "robot mode execute failed: {result:?}");
|
||||
|
||||
let cm = CacheManager::new(cache_path);
|
||||
assert!(cm.alias_exists("robottest"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_execute_invalid_alias() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let spec_path = tmp.path().join("spec.json");
|
||||
|
||||
let spec = serde_json::json!({
|
||||
"openapi": "3.0.3",
|
||||
"info": { "title": "Test", "version": "1.0.0" },
|
||||
"paths": {}
|
||||
});
|
||||
std::fs::write(&spec_path, serde_json::to_vec(&spec).unwrap()).unwrap();
|
||||
|
||||
let cache_path = tmp.path().join("cache");
|
||||
std::fs::create_dir_all(&cache_path).unwrap();
|
||||
|
||||
let args = make_test_args(spec_path.to_str().unwrap(), "../bad-alias");
|
||||
|
||||
let result = fetch_inner(&args, cache_path, false).await;
|
||||
assert!(result.is_err());
|
||||
match result.unwrap_err() {
|
||||
SwaggerCliError::Usage(msg) => {
|
||||
assert!(msg.contains("Invalid alias"));
|
||||
}
|
||||
other => panic!("expected Usage error, got: {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_execute_file_prefix() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let spec_path = tmp.path().join("fileprefix.json");
|
||||
|
||||
let spec = serde_json::json!({
|
||||
"openapi": "3.0.3",
|
||||
"info": { "title": "File Prefix Test", "version": "1.0.0" },
|
||||
"paths": {}
|
||||
});
|
||||
std::fs::write(&spec_path, serde_json::to_vec(&spec).unwrap()).unwrap();
|
||||
|
||||
let cache_path = tmp.path().join("cache");
|
||||
std::fs::create_dir_all(&cache_path).unwrap();
|
||||
|
||||
let url = format!("file://{}", spec_path.to_str().unwrap());
|
||||
let args = make_test_args(&url, "fileprefixtest");
|
||||
|
||||
let result = fetch_inner(&args, cache_path.clone(), false).await;
|
||||
assert!(result.is_ok(), "file:// prefix failed: {result:?}");
|
||||
|
||||
let cm = CacheManager::new(cache_path);
|
||||
let (_, meta) = cm.load_index("fileprefixtest").unwrap();
|
||||
assert_eq!(meta.spec_title, "File Prefix Test");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_execute_nonexistent_file() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let cache_path = tmp.path().join("cache");
|
||||
std::fs::create_dir_all(&cache_path).unwrap();
|
||||
|
||||
let args = make_test_args("file:///nonexistent/path/spec.json", "nofile");
|
||||
|
||||
let result = fetch_inner(&args, cache_path, false).await;
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
matches!(result.unwrap_err(), SwaggerCliError::Io(_)),
|
||||
"expected Io error for missing file",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
666
src/cli/list.rs
666
src/cli/list.rs
@@ -1,6 +1,16 @@
|
||||
use clap::Args as ClapArgs;
|
||||
use std::collections::BTreeMap;
|
||||
use std::time::Instant;
|
||||
|
||||
use clap::Args as ClapArgs;
|
||||
use regex::Regex;
|
||||
use serde::Serialize;
|
||||
use tabled::Tabled;
|
||||
|
||||
use crate::core::cache::CacheManager;
|
||||
use crate::core::config::cache_dir;
|
||||
use crate::errors::SwaggerCliError;
|
||||
use crate::output::robot;
|
||||
use crate::output::table::render_table_or_empty;
|
||||
|
||||
/// List endpoints from a cached spec
|
||||
#[derive(Debug, ClapArgs)]
|
||||
@@ -8,19 +18,657 @@ pub struct Args {
|
||||
/// Alias of the cached spec
|
||||
pub alias: String,
|
||||
|
||||
/// Filter by HTTP method
|
||||
#[arg(long)]
|
||||
/// Filter by HTTP method (case-insensitive)
|
||||
#[arg(long, short = 'm')]
|
||||
pub method: Option<String>,
|
||||
|
||||
/// Filter by tag
|
||||
#[arg(long)]
|
||||
/// Filter by tag (endpoints containing this tag)
|
||||
#[arg(long, short = 't')]
|
||||
pub tag: Option<String>,
|
||||
|
||||
/// Filter by path pattern
|
||||
#[arg(long)]
|
||||
/// Filter by path pattern (regex)
|
||||
#[arg(long, short = 'p')]
|
||||
pub path: Option<String>,
|
||||
|
||||
/// Sort order: path, method, or tag
|
||||
#[arg(long, default_value = "path", value_parser = ["path", "method", "tag"])]
|
||||
pub sort: String,
|
||||
|
||||
/// Maximum number of endpoints to show
|
||||
#[arg(long, short = 'n', default_value = "50")]
|
||||
pub limit: usize,
|
||||
|
||||
/// Show all endpoints (no limit)
|
||||
#[arg(long, short = 'a')]
|
||||
pub all: bool,
|
||||
}
|
||||
|
||||
pub async fn execute(_args: &Args, _robot: bool) -> Result<(), SwaggerCliError> {
|
||||
Err(SwaggerCliError::Usage("list not yet implemented".into()))
|
||||
// ---------------------------------------------------------------------------
|
||||
// Robot output structs
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct ListOutput {
|
||||
endpoints: Vec<EndpointEntry>,
|
||||
total: usize,
|
||||
filtered: usize,
|
||||
applied_filters: BTreeMap<String, String>,
|
||||
meta: ListMeta,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct EndpointEntry {
|
||||
path: String,
|
||||
method: String,
|
||||
summary: Option<String>,
|
||||
operation_id: Option<String>,
|
||||
tags: Vec<String>,
|
||||
deprecated: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct ListMeta {
|
||||
alias: String,
|
||||
spec_version: String,
|
||||
cached_at: String,
|
||||
duration_ms: u64,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Human output row
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Tabled)]
|
||||
struct EndpointRow {
|
||||
#[tabled(rename = "METHOD")]
|
||||
method: String,
|
||||
#[tabled(rename = "PATH")]
|
||||
path: String,
|
||||
#[tabled(rename = "SUMMARY")]
|
||||
summary: String,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Map an HTTP method string to a sort rank.
|
||||
/// GET=0, POST=1, PUT=2, PATCH=3, DELETE=4, everything else=5.
|
||||
fn method_rank(method: &str) -> u8 {
|
||||
match method.to_uppercase().as_str() {
|
||||
"GET" => 0,
|
||||
"POST" => 1,
|
||||
"PUT" => 2,
|
||||
"PATCH" => 3,
|
||||
"DELETE" => 4,
|
||||
_ => 5,
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Execute
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
pub async fn execute(args: &Args, robot_mode: bool) -> Result<(), SwaggerCliError> {
|
||||
let start = Instant::now();
|
||||
|
||||
// Compile path regex early so we fail fast on invalid patterns
|
||||
let path_regex = match &args.path {
|
||||
Some(pattern) => {
|
||||
let re = Regex::new(pattern).map_err(|e| {
|
||||
SwaggerCliError::Usage(format!("Invalid path regex '{pattern}': {e}"))
|
||||
})?;
|
||||
Some(re)
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
let cm = CacheManager::new(cache_dir());
|
||||
let (index, meta) = cm.load_index(&args.alias)?;
|
||||
|
||||
let total = index.endpoints.len();
|
||||
|
||||
// ---- Filter ----
|
||||
let method_upper = args.method.as_ref().map(|m| m.to_uppercase());
|
||||
let tag_lower = args.tag.as_ref().map(|t| t.to_lowercase());
|
||||
|
||||
let mut filtered: Vec<_> = index
|
||||
.endpoints
|
||||
.into_iter()
|
||||
.filter(|ep| {
|
||||
if let Some(ref m) = method_upper
|
||||
&& ep.method.to_uppercase() != *m
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if let Some(ref t) = tag_lower
|
||||
&& !ep
|
||||
.tags
|
||||
.iter()
|
||||
.any(|tag| tag.to_lowercase().contains(t.as_str()))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if let Some(ref re) = path_regex
|
||||
&& !re.is_match(&ep.path)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
true
|
||||
})
|
||||
.collect();
|
||||
|
||||
let filtered_count = filtered.len();
|
||||
|
||||
// ---- Sort ----
|
||||
match args.sort.as_str() {
|
||||
"method" => {
|
||||
filtered.sort_by(|a, b| {
|
||||
method_rank(&a.method)
|
||||
.cmp(&method_rank(&b.method))
|
||||
.then_with(|| a.path.cmp(&b.path))
|
||||
});
|
||||
}
|
||||
"tag" => {
|
||||
filtered.sort_by(|a, b| {
|
||||
let tag_a = a.tags.first().map(String::as_str).unwrap_or("");
|
||||
let tag_b = b.tags.first().map(String::as_str).unwrap_or("");
|
||||
tag_a
|
||||
.cmp(tag_b)
|
||||
.then_with(|| a.path.cmp(&b.path))
|
||||
.then_with(|| method_rank(&a.method).cmp(&method_rank(&b.method)))
|
||||
});
|
||||
}
|
||||
// "path" or anything else: default sort
|
||||
_ => {
|
||||
filtered.sort_by(|a, b| {
|
||||
a.path
|
||||
.cmp(&b.path)
|
||||
.then_with(|| method_rank(&a.method).cmp(&method_rank(&b.method)))
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// ---- Limit ----
|
||||
if !args.all {
|
||||
filtered.truncate(args.limit);
|
||||
}
|
||||
|
||||
let duration = start.elapsed();
|
||||
|
||||
// ---- Output ----
|
||||
if robot_mode {
|
||||
let mut applied_filters = BTreeMap::new();
|
||||
if let Some(ref m) = args.method {
|
||||
applied_filters.insert("method".into(), m.clone());
|
||||
}
|
||||
if let Some(ref t) = args.tag {
|
||||
applied_filters.insert("tag".into(), t.clone());
|
||||
}
|
||||
if let Some(ref p) = args.path {
|
||||
applied_filters.insert("path".into(), p.clone());
|
||||
}
|
||||
|
||||
let entries: Vec<EndpointEntry> = filtered
|
||||
.iter()
|
||||
.map(|ep| EndpointEntry {
|
||||
path: ep.path.clone(),
|
||||
method: ep.method.clone(),
|
||||
summary: ep.summary.clone(),
|
||||
operation_id: ep.operation_id.clone(),
|
||||
tags: ep.tags.clone(),
|
||||
deprecated: ep.deprecated,
|
||||
})
|
||||
.collect();
|
||||
|
||||
let output = ListOutput {
|
||||
endpoints: entries,
|
||||
total,
|
||||
filtered: filtered_count,
|
||||
applied_filters,
|
||||
meta: ListMeta {
|
||||
alias: args.alias.clone(),
|
||||
spec_version: meta.spec_version.clone(),
|
||||
cached_at: meta.fetched_at.to_rfc3339(),
|
||||
duration_ms: duration.as_millis().min(u64::MAX as u128) as u64,
|
||||
},
|
||||
};
|
||||
|
||||
robot::robot_success(output, "list", duration);
|
||||
} else {
|
||||
println!("API: {} ({} endpoints)", index.info.title, total);
|
||||
println!();
|
||||
|
||||
let rows: Vec<EndpointRow> = filtered
|
||||
.iter()
|
||||
.map(|ep| EndpointRow {
|
||||
method: ep.method.clone(),
|
||||
path: ep.path.clone(),
|
||||
summary: ep.summary.clone().unwrap_or_default(),
|
||||
})
|
||||
.collect();
|
||||
|
||||
let table = render_table_or_empty(&rows, "No endpoints match the given filters.");
|
||||
println!("{table}");
|
||||
|
||||
if !rows.is_empty() {
|
||||
println!();
|
||||
if filtered_count > rows.len() {
|
||||
println!(
|
||||
"Showing {} of {} (filtered from {}). Use --all to show everything.",
|
||||
rows.len(),
|
||||
filtered_count,
|
||||
total
|
||||
);
|
||||
} else {
|
||||
println!("Showing {} of {}", rows.len(), total);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::core::spec::{
|
||||
IndexInfo, IndexedEndpoint, IndexedParam, IndexedSchema, IndexedTag, SpecIndex,
|
||||
};
|
||||
|
||||
fn make_test_endpoint(
|
||||
path: &str,
|
||||
method: &str,
|
||||
summary: Option<&str>,
|
||||
tags: &[&str],
|
||||
deprecated: bool,
|
||||
) -> IndexedEndpoint {
|
||||
IndexedEndpoint {
|
||||
path: path.to_string(),
|
||||
method: method.to_string(),
|
||||
summary: summary.map(|s| s.to_string()),
|
||||
description: None,
|
||||
operation_id: Some(format!(
|
||||
"{}{}",
|
||||
method.to_lowercase(),
|
||||
path.replace('/', "_")
|
||||
)),
|
||||
tags: tags.iter().map(|t| t.to_string()).collect(),
|
||||
deprecated,
|
||||
parameters: vec![IndexedParam {
|
||||
name: "id".into(),
|
||||
location: "path".into(),
|
||||
required: true,
|
||||
description: None,
|
||||
}],
|
||||
request_body_required: method != "GET" && method != "DELETE",
|
||||
request_body_content_types: if method != "GET" && method != "DELETE" {
|
||||
vec!["application/json".into()]
|
||||
} else {
|
||||
vec![]
|
||||
},
|
||||
security_schemes: vec!["bearerAuth".into()],
|
||||
security_required: true,
|
||||
operation_ptr: format!(
|
||||
"#/paths/~1{}/{}",
|
||||
path.trim_start_matches('/'),
|
||||
method.to_lowercase()
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
fn make_test_index() -> SpecIndex {
|
||||
SpecIndex {
|
||||
index_version: 1,
|
||||
generation: 1,
|
||||
content_hash: "sha256:test".into(),
|
||||
openapi: "3.0.3".into(),
|
||||
info: IndexInfo {
|
||||
title: "Petstore API".into(),
|
||||
version: "1.0.0".into(),
|
||||
},
|
||||
endpoints: vec![
|
||||
make_test_endpoint("/pets", "GET", Some("List all pets"), &["pets"], false),
|
||||
make_test_endpoint("/pets", "POST", Some("Create a pet"), &["pets"], false),
|
||||
make_test_endpoint(
|
||||
"/pets/{petId}",
|
||||
"GET",
|
||||
Some("Get a pet by ID"),
|
||||
&["pets"],
|
||||
false,
|
||||
),
|
||||
make_test_endpoint(
|
||||
"/pets/{petId}",
|
||||
"DELETE",
|
||||
Some("Delete a pet"),
|
||||
&["pets"],
|
||||
true,
|
||||
),
|
||||
make_test_endpoint(
|
||||
"/store/inventory",
|
||||
"GET",
|
||||
Some("Get store inventory"),
|
||||
&["store"],
|
||||
false,
|
||||
),
|
||||
],
|
||||
schemas: vec![IndexedSchema {
|
||||
name: "Pet".into(),
|
||||
schema_ptr: "#/components/schemas/Pet".into(),
|
||||
}],
|
||||
tags: vec![
|
||||
IndexedTag {
|
||||
name: "pets".into(),
|
||||
description: Some("Pet operations".into()),
|
||||
endpoint_count: 4,
|
||||
},
|
||||
IndexedTag {
|
||||
name: "store".into(),
|
||||
description: Some("Store operations".into()),
|
||||
endpoint_count: 1,
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
/// Apply the same filtering logic used in execute() to the test index.
|
||||
fn filter_endpoints<'a>(
|
||||
index: &'a SpecIndex,
|
||||
method: Option<&str>,
|
||||
tag: Option<&str>,
|
||||
path_pattern: Option<&str>,
|
||||
) -> Result<Vec<&'a IndexedEndpoint>, SwaggerCliError> {
|
||||
let path_regex = match path_pattern {
|
||||
Some(p) => Some(
|
||||
Regex::new(p).map_err(|e| SwaggerCliError::Usage(format!("Invalid regex: {e}")))?,
|
||||
),
|
||||
None => None,
|
||||
};
|
||||
|
||||
let method_upper = method.map(|m| m.to_uppercase());
|
||||
let tag_lower = tag.map(|t| t.to_lowercase());
|
||||
|
||||
let results: Vec<&IndexedEndpoint> = index
|
||||
.endpoints
|
||||
.iter()
|
||||
.filter(|ep| {
|
||||
if let Some(ref m) = method_upper
|
||||
&& ep.method.to_uppercase() != *m
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if let Some(ref t) = tag_lower
|
||||
&& !ep
|
||||
.tags
|
||||
.iter()
|
||||
.any(|tag| tag.to_lowercase().contains(t.as_str()))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if let Some(ref re) = path_regex
|
||||
&& !re.is_match(&ep.path)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
true
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_filter_by_method() {
|
||||
let index = make_test_index();
|
||||
let results = filter_endpoints(&index, Some("GET"), None, None).unwrap();
|
||||
|
||||
assert_eq!(results.len(), 3);
|
||||
for ep in &results {
|
||||
assert_eq!(ep.method, "GET");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_filter_by_method_case_insensitive() {
|
||||
let index = make_test_index();
|
||||
let results = filter_endpoints(&index, Some("get"), None, None).unwrap();
|
||||
assert_eq!(results.len(), 3);
|
||||
|
||||
let results = filter_endpoints(&index, Some("Post"), None, None).unwrap();
|
||||
assert_eq!(results.len(), 1);
|
||||
assert_eq!(results[0].method, "POST");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_filter_by_tag() {
|
||||
let index = make_test_index();
|
||||
let results = filter_endpoints(&index, None, Some("store"), None).unwrap();
|
||||
|
||||
assert_eq!(results.len(), 1);
|
||||
assert_eq!(results[0].path, "/store/inventory");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_filter_by_tag_case_insensitive() {
|
||||
let index = make_test_index();
|
||||
let results = filter_endpoints(&index, None, Some("PETS"), None).unwrap();
|
||||
assert_eq!(results.len(), 4);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_filter_by_path_regex() {
|
||||
let index = make_test_index();
|
||||
let results = filter_endpoints(&index, None, None, Some(r"\{petId\}")).unwrap();
|
||||
|
||||
assert_eq!(results.len(), 2);
|
||||
for ep in &results {
|
||||
assert!(ep.path.contains("{petId}"));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_filter_by_path_regex_prefix() {
|
||||
let index = make_test_index();
|
||||
let results = filter_endpoints(&index, None, None, Some("^/pets")).unwrap();
|
||||
assert_eq!(results.len(), 4);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_regex_error() {
|
||||
let index = make_test_index();
|
||||
let result = filter_endpoints(&index, None, None, Some("[invalid"));
|
||||
|
||||
assert!(result.is_err());
|
||||
let err = result.unwrap_err();
|
||||
match err {
|
||||
SwaggerCliError::Usage(msg) => {
|
||||
assert!(msg.contains("Invalid regex"), "unexpected message: {msg}");
|
||||
}
|
||||
other => panic!("expected Usage error, got: {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_combined_filters() {
|
||||
let index = make_test_index();
|
||||
let results = filter_endpoints(&index, Some("GET"), Some("pets"), None).unwrap();
|
||||
|
||||
assert_eq!(results.len(), 2);
|
||||
for ep in &results {
|
||||
assert_eq!(ep.method, "GET");
|
||||
assert!(ep.tags.contains(&"pets".to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_no_matches() {
|
||||
let index = make_test_index();
|
||||
let results = filter_endpoints(&index, Some("PATCH"), None, None).unwrap();
|
||||
assert!(results.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sort_by_method() {
|
||||
let index = make_test_index();
|
||||
let mut endpoints: Vec<IndexedEndpoint> = index.endpoints.clone();
|
||||
|
||||
endpoints.sort_by(|a, b| {
|
||||
method_rank(&a.method)
|
||||
.cmp(&method_rank(&b.method))
|
||||
.then_with(|| a.path.cmp(&b.path))
|
||||
});
|
||||
|
||||
// All GETs come first, then POST, then DELETE
|
||||
assert_eq!(endpoints[0].method, "GET");
|
||||
assert_eq!(endpoints[1].method, "GET");
|
||||
assert_eq!(endpoints[2].method, "GET");
|
||||
assert_eq!(endpoints[3].method, "POST");
|
||||
assert_eq!(endpoints[4].method, "DELETE");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sort_by_path_default() {
|
||||
let index = make_test_index();
|
||||
let mut endpoints: Vec<IndexedEndpoint> = index.endpoints.clone();
|
||||
|
||||
endpoints.sort_by(|a, b| {
|
||||
a.path
|
||||
.cmp(&b.path)
|
||||
.then_with(|| method_rank(&a.method).cmp(&method_rank(&b.method)))
|
||||
});
|
||||
|
||||
assert_eq!(endpoints[0].path, "/pets");
|
||||
assert_eq!(endpoints[0].method, "GET");
|
||||
assert_eq!(endpoints[1].path, "/pets");
|
||||
assert_eq!(endpoints[1].method, "POST");
|
||||
assert_eq!(endpoints[2].path, "/pets/{petId}");
|
||||
assert_eq!(endpoints[2].method, "GET");
|
||||
assert_eq!(endpoints[3].path, "/pets/{petId}");
|
||||
assert_eq!(endpoints[3].method, "DELETE");
|
||||
assert_eq!(endpoints[4].path, "/store/inventory");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sort_by_tag() {
|
||||
let index = make_test_index();
|
||||
let mut endpoints: Vec<IndexedEndpoint> = index.endpoints.clone();
|
||||
|
||||
endpoints.sort_by(|a, b| {
|
||||
let tag_a = a.tags.first().map(String::as_str).unwrap_or("");
|
||||
let tag_b = b.tags.first().map(String::as_str).unwrap_or("");
|
||||
tag_a
|
||||
.cmp(tag_b)
|
||||
.then_with(|| a.path.cmp(&b.path))
|
||||
.then_with(|| method_rank(&a.method).cmp(&method_rank(&b.method)))
|
||||
});
|
||||
|
||||
// "pets" < "store" alphabetically
|
||||
assert!(endpoints[0].tags.contains(&"pets".to_string()));
|
||||
assert!(endpoints[1].tags.contains(&"pets".to_string()));
|
||||
assert!(endpoints[2].tags.contains(&"pets".to_string()));
|
||||
assert!(endpoints[3].tags.contains(&"pets".to_string()));
|
||||
assert!(endpoints[4].tags.contains(&"store".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_limit_applied() {
|
||||
let index = make_test_index();
|
||||
let mut endpoints = index.endpoints.clone();
|
||||
|
||||
let limit: usize = 2;
|
||||
endpoints.truncate(limit);
|
||||
|
||||
assert_eq!(endpoints.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_limit_larger_than_count() {
|
||||
let index = make_test_index();
|
||||
let mut endpoints = index.endpoints.clone();
|
||||
|
||||
endpoints.truncate(100);
|
||||
assert_eq!(endpoints.len(), 5);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_method_rank_ordering() {
|
||||
assert_eq!(method_rank("GET"), 0);
|
||||
assert_eq!(method_rank("POST"), 1);
|
||||
assert_eq!(method_rank("PUT"), 2);
|
||||
assert_eq!(method_rank("PATCH"), 3);
|
||||
assert_eq!(method_rank("DELETE"), 4);
|
||||
assert_eq!(method_rank("OPTIONS"), 5);
|
||||
assert_eq!(method_rank("HEAD"), 5);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_method_rank_case_insensitive() {
|
||||
assert_eq!(method_rank("get"), 0);
|
||||
assert_eq!(method_rank("Post"), 1);
|
||||
assert_eq!(method_rank("delete"), 4);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_endpoint_entry_serialization() {
|
||||
let entry = EndpointEntry {
|
||||
path: "/pets".into(),
|
||||
method: "GET".into(),
|
||||
summary: Some("List pets".into()),
|
||||
operation_id: Some("listPets".into()),
|
||||
tags: vec!["pets".into()],
|
||||
deprecated: false,
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&entry).unwrap();
|
||||
assert!(json.contains("\"path\":\"/pets\""));
|
||||
assert!(json.contains("\"method\":\"GET\""));
|
||||
assert!(json.contains("\"deprecated\":false"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_list_output_serialization() {
|
||||
let output = ListOutput {
|
||||
endpoints: vec![],
|
||||
total: 5,
|
||||
filtered: 0,
|
||||
applied_filters: BTreeMap::new(),
|
||||
meta: ListMeta {
|
||||
alias: "petstore".into(),
|
||||
spec_version: "1.0.0".into(),
|
||||
cached_at: "2025-01-01T00:00:00+00:00".into(),
|
||||
duration_ms: 42,
|
||||
},
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&output).unwrap();
|
||||
assert!(json.contains("\"total\":5"));
|
||||
assert!(json.contains("\"filtered\":0"));
|
||||
assert!(json.contains("\"alias\":\"petstore\""));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_applied_filters_populated() {
|
||||
let mut filters: BTreeMap<String, String> = BTreeMap::new();
|
||||
filters.insert("method".into(), "GET".into());
|
||||
filters.insert("tag".into(), "pets".into());
|
||||
filters.insert("path".into(), "^/pets".into());
|
||||
|
||||
assert_eq!(filters.len(), 3);
|
||||
assert_eq!(filters.get("method").unwrap(), "GET");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_make_test_index_structure() {
|
||||
let index = make_test_index();
|
||||
|
||||
assert_eq!(index.info.title, "Petstore API");
|
||||
assert_eq!(index.info.version, "1.0.0");
|
||||
assert_eq!(index.endpoints.len(), 5);
|
||||
assert_eq!(index.schemas.len(), 1);
|
||||
assert_eq!(index.tags.len(), 2);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,15 @@
|
||||
use std::time::Instant;
|
||||
|
||||
use clap::Args as ClapArgs;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::core::cache::CacheManager;
|
||||
use crate::core::config::cache_dir;
|
||||
use crate::core::search::{SearchEngine, SearchOptions, SearchResult, SearchResultType};
|
||||
use crate::errors::SwaggerCliError;
|
||||
use crate::output::robot;
|
||||
|
||||
/// Search endpoints by keyword
|
||||
/// Search endpoints and schemas by keyword
|
||||
#[derive(Debug, ClapArgs)]
|
||||
pub struct Args {
|
||||
/// Alias of the cached spec
|
||||
@@ -11,11 +18,173 @@ pub struct Args {
|
||||
/// Search query
|
||||
pub query: String,
|
||||
|
||||
/// Case-sensitive matching
|
||||
#[arg(long)]
|
||||
pub case_sensitive: bool,
|
||||
|
||||
/// Match query as exact phrase
|
||||
#[arg(long)]
|
||||
pub exact: bool,
|
||||
|
||||
/// Fields to search (comma-separated: all, paths, descriptions, schemas)
|
||||
#[arg(long = "in")]
|
||||
pub in_fields: Option<String>,
|
||||
|
||||
/// Maximum number of results
|
||||
#[arg(long, default_value = "20")]
|
||||
pub limit: usize,
|
||||
}
|
||||
|
||||
pub async fn execute(_args: &Args, _robot: bool) -> Result<(), SwaggerCliError> {
|
||||
Err(SwaggerCliError::Usage("search not yet implemented".into()))
|
||||
// ---------------------------------------------------------------------------
|
||||
// Robot-mode output types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct RobotOutput {
|
||||
results: Vec<RobotResult>,
|
||||
total: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct RobotResult {
|
||||
#[serde(rename = "type")]
|
||||
result_type: &'static str,
|
||||
name: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
method: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
summary: Option<String>,
|
||||
rank: usize,
|
||||
score: u32,
|
||||
matches: Vec<RobotMatch>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct RobotMatch {
|
||||
field: String,
|
||||
snippet: String,
|
||||
}
|
||||
|
||||
impl From<&SearchResult> for RobotResult {
|
||||
fn from(r: &SearchResult) -> Self {
|
||||
Self {
|
||||
result_type: match r.result_type {
|
||||
SearchResultType::Endpoint => "endpoint",
|
||||
SearchResultType::Schema => "schema",
|
||||
},
|
||||
name: r.name.clone(),
|
||||
method: r.method.clone(),
|
||||
summary: r.summary.clone(),
|
||||
rank: r.rank,
|
||||
score: r.score,
|
||||
matches: r
|
||||
.matches
|
||||
.iter()
|
||||
.map(|m| RobotMatch {
|
||||
field: m.field.clone(),
|
||||
snippet: m.snippet.clone(),
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Field parsing
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn parse_in_fields(raw: &str) -> Result<(bool, bool, bool), SwaggerCliError> {
|
||||
let mut search_paths = false;
|
||||
let mut search_descriptions = false;
|
||||
let mut search_schemas = false;
|
||||
|
||||
for field in raw.split(',').map(str::trim) {
|
||||
match field.to_lowercase().as_str() {
|
||||
"all" => {
|
||||
search_paths = true;
|
||||
search_descriptions = true;
|
||||
search_schemas = true;
|
||||
}
|
||||
"paths" => search_paths = true,
|
||||
"descriptions" => search_descriptions = true,
|
||||
"schemas" => search_schemas = true,
|
||||
other => {
|
||||
return Err(SwaggerCliError::Usage(format!(
|
||||
"Unknown --in field '{other}'. Valid values: all, paths, descriptions, schemas"
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok((search_paths, search_descriptions, search_schemas))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Execute
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
pub async fn execute(args: &Args, robot_mode: bool) -> Result<(), SwaggerCliError> {
|
||||
let start = Instant::now();
|
||||
|
||||
let (search_paths, search_descriptions, search_schemas) = match &args.in_fields {
|
||||
Some(fields) => parse_in_fields(fields)?,
|
||||
None => (true, true, true),
|
||||
};
|
||||
|
||||
let cm = CacheManager::new(cache_dir());
|
||||
let (index, _meta) = cm.load_index(&args.alias)?;
|
||||
|
||||
let opts = SearchOptions {
|
||||
search_paths,
|
||||
search_descriptions,
|
||||
search_schemas,
|
||||
case_sensitive: args.case_sensitive,
|
||||
exact: args.exact,
|
||||
limit: args.limit,
|
||||
};
|
||||
|
||||
let engine = SearchEngine::new(&index);
|
||||
let results = engine.search(&args.query, &opts);
|
||||
|
||||
if robot_mode {
|
||||
let output = RobotOutput {
|
||||
total: results.len(),
|
||||
results: results.iter().map(RobotResult::from).collect(),
|
||||
};
|
||||
robot::robot_success(output, "search", start.elapsed());
|
||||
} else if results.is_empty() {
|
||||
println!("No results found for '{}'", args.query);
|
||||
} else {
|
||||
println!(
|
||||
"Found {} result{} for '{}':\n",
|
||||
results.len(),
|
||||
if results.len() == 1 { "" } else { "s" },
|
||||
args.query,
|
||||
);
|
||||
for r in &results {
|
||||
let type_label = match r.result_type {
|
||||
SearchResultType::Endpoint => "endpoint",
|
||||
SearchResultType::Schema => "schema",
|
||||
};
|
||||
let method_str = r
|
||||
.method
|
||||
.as_deref()
|
||||
.map(|m| format!("{m} "))
|
||||
.unwrap_or_default();
|
||||
let summary_str = r
|
||||
.summary
|
||||
.as_deref()
|
||||
.map(|s| format!(" - {s}"))
|
||||
.unwrap_or_default();
|
||||
|
||||
println!(
|
||||
" {rank}. [{type_label}] {method_str}{name}{summary_str} (score: {score})",
|
||||
rank = r.rank,
|
||||
name = r.name,
|
||||
score = r.score,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
425
src/cli/show.rs
425
src/cli/show.rs
@@ -1,6 +1,14 @@
|
||||
use clap::Args as ClapArgs;
|
||||
use std::time::Instant;
|
||||
|
||||
use clap::Args as ClapArgs;
|
||||
use serde::Serialize;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::core::cache::CacheManager;
|
||||
use crate::core::config::cache_dir;
|
||||
use crate::core::refs::expand_refs;
|
||||
use crate::errors::SwaggerCliError;
|
||||
use crate::output::robot::robot_success;
|
||||
|
||||
/// Show details of a specific endpoint
|
||||
#[derive(Debug, ClapArgs)]
|
||||
@@ -8,10 +16,417 @@ pub struct Args {
|
||||
/// Alias of the cached spec
|
||||
pub alias: String,
|
||||
|
||||
/// Operation ID or path to show
|
||||
pub endpoint: String,
|
||||
/// Endpoint path (e.g., "/pets/{petId}")
|
||||
pub path: String,
|
||||
|
||||
/// HTTP method to show (GET, POST, etc.). Required when path has multiple methods.
|
||||
#[arg(long, short)]
|
||||
pub method: Option<String>,
|
||||
|
||||
/// Expand $ref entries inline
|
||||
#[arg(long)]
|
||||
pub expand_refs: bool,
|
||||
|
||||
/// Maximum depth for ref expansion
|
||||
#[arg(long, default_value = "3")]
|
||||
pub max_depth: u32,
|
||||
}
|
||||
|
||||
pub async fn execute(_args: &Args, _robot: bool) -> Result<(), SwaggerCliError> {
|
||||
Err(SwaggerCliError::Usage("show not yet implemented".into()))
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct ShowOutput {
|
||||
pub path: String,
|
||||
pub method: String,
|
||||
pub summary: Option<String>,
|
||||
pub description: Option<String>,
|
||||
pub operation_id: Option<String>,
|
||||
pub tags: Vec<String>,
|
||||
pub deprecated: bool,
|
||||
pub parameters: Value,
|
||||
pub request_body: Option<Value>,
|
||||
pub responses: Value,
|
||||
pub security: Value,
|
||||
}
|
||||
|
||||
/// Navigate a JSON value using a JSON Pointer (RFC 6901).
|
||||
///
|
||||
/// Unescapes `~1` -> `/` and `~0` -> `~` (decode ~1 first per spec).
|
||||
fn navigate_pointer(root: &Value, pointer: &str) -> Option<Value> {
|
||||
if pointer.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let stripped = pointer.strip_prefix('/')?;
|
||||
|
||||
let mut current = root;
|
||||
for token in stripped.split('/') {
|
||||
let unescaped = token.replace("~1", "/").replace("~0", "~");
|
||||
match current {
|
||||
Value::Object(map) => {
|
||||
current = map.get(&unescaped)?;
|
||||
}
|
||||
Value::Array(arr) => {
|
||||
let idx: usize = unescaped.parse().ok()?;
|
||||
current = arr.get(idx)?;
|
||||
}
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
|
||||
Some(current.clone())
|
||||
}
|
||||
|
||||
pub async fn execute(args: &Args, robot: bool) -> Result<(), SwaggerCliError> {
|
||||
let start = Instant::now();
|
||||
|
||||
let cm = CacheManager::new(cache_dir());
|
||||
let (index, meta) = cm.load_index(&args.alias)?;
|
||||
|
||||
// Find endpoints matching the requested path
|
||||
let matching: Vec<_> = index
|
||||
.endpoints
|
||||
.iter()
|
||||
.filter(|ep| ep.path == args.path)
|
||||
.collect();
|
||||
|
||||
if matching.is_empty() {
|
||||
return Err(SwaggerCliError::Usage(format!(
|
||||
"No endpoint found for path '{}' in alias '{}'",
|
||||
args.path, args.alias
|
||||
)));
|
||||
}
|
||||
|
||||
// If a method is specified, filter to that method
|
||||
let endpoint = if let Some(method) = &args.method {
|
||||
let method_upper = method.to_uppercase();
|
||||
matching
|
||||
.iter()
|
||||
.find(|ep| ep.method.to_uppercase() == method_upper)
|
||||
.ok_or_else(|| {
|
||||
let available: Vec<&str> = matching.iter().map(|ep| ep.method.as_str()).collect();
|
||||
SwaggerCliError::Usage(format!(
|
||||
"Method '{}' not found for path '{}'. Available methods: {}",
|
||||
method,
|
||||
args.path,
|
||||
available.join(", ")
|
||||
))
|
||||
})?
|
||||
} else if matching.len() == 1 {
|
||||
&matching[0]
|
||||
} else {
|
||||
let available: Vec<&str> = matching.iter().map(|ep| ep.method.as_str()).collect();
|
||||
return Err(SwaggerCliError::Usage(format!(
|
||||
"Multiple methods available for path '{}': {}. Use --method to specify one.",
|
||||
args.path,
|
||||
available.join(", ")
|
||||
)));
|
||||
};
|
||||
|
||||
// Load raw spec
|
||||
let raw = cm.load_raw(&args.alias, &meta)?;
|
||||
|
||||
// Navigate to operation subtree
|
||||
let operation = navigate_pointer(&raw, &endpoint.operation_ptr).ok_or_else(|| {
|
||||
SwaggerCliError::Cache(format!(
|
||||
"Failed to navigate to operation at pointer '{}' in raw spec for alias '{}'",
|
||||
endpoint.operation_ptr, args.alias
|
||||
))
|
||||
})?;
|
||||
|
||||
let mut operation = operation;
|
||||
|
||||
if args.expand_refs {
|
||||
expand_refs(&mut operation, &raw, args.max_depth);
|
||||
}
|
||||
|
||||
let parameters = operation
|
||||
.get("parameters")
|
||||
.cloned()
|
||||
.unwrap_or(Value::Array(vec![]));
|
||||
|
||||
let request_body = operation.get("requestBody").cloned();
|
||||
|
||||
let responses = operation
|
||||
.get("responses")
|
||||
.cloned()
|
||||
.unwrap_or(Value::Object(serde_json::Map::new()));
|
||||
|
||||
let security = operation
|
||||
.get("security")
|
||||
.cloned()
|
||||
.unwrap_or(Value::Array(vec![]));
|
||||
|
||||
let output = ShowOutput {
|
||||
path: endpoint.path.clone(),
|
||||
method: endpoint.method.clone(),
|
||||
summary: endpoint.summary.clone(),
|
||||
description: endpoint.description.clone(),
|
||||
operation_id: endpoint.operation_id.clone(),
|
||||
tags: endpoint.tags.clone(),
|
||||
deprecated: endpoint.deprecated,
|
||||
parameters,
|
||||
request_body,
|
||||
responses,
|
||||
security,
|
||||
};
|
||||
|
||||
if robot {
|
||||
robot_success(&output, "show", start.elapsed());
|
||||
} else {
|
||||
print_human(&output);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn print_human(output: &ShowOutput) {
|
||||
println!("{} {}", output.method.to_uppercase(), output.path);
|
||||
|
||||
if let Some(summary) = &output.summary {
|
||||
println!(" Summary: {summary}");
|
||||
}
|
||||
|
||||
if let Some(description) = &output.description {
|
||||
println!(" Description: {description}");
|
||||
}
|
||||
|
||||
if let Some(op_id) = &output.operation_id {
|
||||
println!(" Operation ID: {op_id}");
|
||||
}
|
||||
|
||||
if !output.tags.is_empty() {
|
||||
println!(" Tags: {}", output.tags.join(", "));
|
||||
}
|
||||
|
||||
if output.deprecated {
|
||||
println!(" DEPRECATED");
|
||||
}
|
||||
|
||||
if let Value::Array(params) = &output.parameters
|
||||
&& !params.is_empty()
|
||||
{
|
||||
println!(" Parameters:");
|
||||
for param in params {
|
||||
let name = param.get("name").and_then(|v| v.as_str()).unwrap_or("?");
|
||||
let location = param.get("in").and_then(|v| v.as_str()).unwrap_or("?");
|
||||
let required = param
|
||||
.get("required")
|
||||
.and_then(|v| v.as_bool())
|
||||
.unwrap_or(false);
|
||||
let req_marker = if required { " (required)" } else { "" };
|
||||
println!(" - {name} [{location}]{req_marker}");
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(body) = &output.request_body {
|
||||
println!(" Request Body:");
|
||||
if let Ok(pretty) = serde_json::to_string_pretty(body) {
|
||||
for line in pretty.lines() {
|
||||
println!(" {line}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Value::Object(responses) = &output.responses
|
||||
&& !responses.is_empty()
|
||||
{
|
||||
println!(" Responses:");
|
||||
for (status, _) in responses {
|
||||
println!(" - {status}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::core::spec::{IndexedEndpoint, IndexedParam};
|
||||
use serde_json::json;
|
||||
|
||||
fn make_test_endpoints() -> Vec<IndexedEndpoint> {
|
||||
vec![
|
||||
IndexedEndpoint {
|
||||
path: "/pets/{petId}".into(),
|
||||
method: "GET".into(),
|
||||
summary: Some("Get a pet".into()),
|
||||
description: Some("Returns a single pet".into()),
|
||||
operation_id: Some("getPet".into()),
|
||||
tags: vec!["pets".into()],
|
||||
deprecated: false,
|
||||
parameters: vec![IndexedParam {
|
||||
name: "petId".into(),
|
||||
location: "path".into(),
|
||||
required: true,
|
||||
description: Some("ID of pet".into()),
|
||||
}],
|
||||
request_body_required: false,
|
||||
request_body_content_types: vec![],
|
||||
security_schemes: vec!["api_key".into()],
|
||||
security_required: true,
|
||||
operation_ptr: "/paths/~1pets~1{petId}/get".into(),
|
||||
},
|
||||
IndexedEndpoint {
|
||||
path: "/pets/{petId}".into(),
|
||||
method: "DELETE".into(),
|
||||
summary: Some("Delete a pet".into()),
|
||||
description: None,
|
||||
operation_id: Some("deletePet".into()),
|
||||
tags: vec!["pets".into()],
|
||||
deprecated: false,
|
||||
parameters: vec![IndexedParam {
|
||||
name: "petId".into(),
|
||||
location: "path".into(),
|
||||
required: true,
|
||||
description: Some("ID of pet".into()),
|
||||
}],
|
||||
request_body_required: false,
|
||||
request_body_content_types: vec![],
|
||||
security_schemes: vec!["api_key".into()],
|
||||
security_required: true,
|
||||
operation_ptr: "/paths/~1pets~1{petId}/delete".into(),
|
||||
},
|
||||
IndexedEndpoint {
|
||||
path: "/pets".into(),
|
||||
method: "GET".into(),
|
||||
summary: Some("List pets".into()),
|
||||
description: None,
|
||||
operation_id: Some("listPets".into()),
|
||||
tags: vec!["pets".into()],
|
||||
deprecated: false,
|
||||
parameters: vec![],
|
||||
request_body_required: false,
|
||||
request_body_content_types: vec![],
|
||||
security_schemes: vec![],
|
||||
security_required: false,
|
||||
operation_ptr: "/paths/~1pets/get".into(),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_endpoint_by_path() {
|
||||
let endpoints = make_test_endpoints();
|
||||
let path = "/pets";
|
||||
|
||||
let matching: Vec<_> = endpoints.iter().filter(|ep| ep.path == path).collect();
|
||||
assert_eq!(matching.len(), 1);
|
||||
assert_eq!(matching[0].method, "GET");
|
||||
assert_eq!(matching[0].operation_id, Some("listPets".into()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multiple_methods_error() {
|
||||
let endpoints = make_test_endpoints();
|
||||
let path = "/pets/{petId}";
|
||||
|
||||
let matching: Vec<_> = endpoints.iter().filter(|ep| ep.path == path).collect();
|
||||
assert_eq!(matching.len(), 2);
|
||||
|
||||
let available: Vec<&str> = matching.iter().map(|ep| ep.method.as_str()).collect();
|
||||
assert!(available.contains(&"GET"));
|
||||
assert!(available.contains(&"DELETE"));
|
||||
|
||||
// With method filter, should find exactly one
|
||||
let method = "GET";
|
||||
let method_upper = method.to_uppercase();
|
||||
let found: Vec<_> = matching
|
||||
.iter()
|
||||
.filter(|ep| ep.method.to_uppercase() == method_upper)
|
||||
.collect();
|
||||
assert_eq!(found.len(), 1);
|
||||
assert_eq!(found[0].operation_id, Some("getPet".into()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pointer_navigation() {
|
||||
let raw = json!({
|
||||
"paths": {
|
||||
"/pets/{petId}": {
|
||||
"get": {
|
||||
"summary": "Get a pet",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "petId",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": { "description": "A pet" },
|
||||
"404": { "description": "Not found" }
|
||||
}
|
||||
},
|
||||
"delete": {
|
||||
"summary": "Delete a pet",
|
||||
"responses": {
|
||||
"204": { "description": "Deleted" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"/pets": {
|
||||
"get": {
|
||||
"summary": "List pets",
|
||||
"responses": {
|
||||
"200": { "description": "A list of pets" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Navigate to GET /pets/{petId}
|
||||
let result = navigate_pointer(&raw, "/paths/~1pets~1{petId}/get");
|
||||
assert!(result.is_some());
|
||||
let op = result.unwrap();
|
||||
assert_eq!(op["summary"], "Get a pet");
|
||||
assert!(op["parameters"].is_array());
|
||||
assert_eq!(op["parameters"][0]["name"], "petId");
|
||||
|
||||
// Navigate to DELETE /pets/{petId}
|
||||
let result = navigate_pointer(&raw, "/paths/~1pets~1{petId}/delete");
|
||||
assert!(result.is_some());
|
||||
let op = result.unwrap();
|
||||
assert_eq!(op["summary"], "Delete a pet");
|
||||
|
||||
// Navigate to GET /pets
|
||||
let result = navigate_pointer(&raw, "/paths/~1pets/get");
|
||||
assert!(result.is_some());
|
||||
let op = result.unwrap();
|
||||
assert_eq!(op["summary"], "List pets");
|
||||
|
||||
// Invalid pointer
|
||||
let result = navigate_pointer(&raw, "/paths/~1nonexistent/get");
|
||||
assert!(result.is_none());
|
||||
|
||||
// Empty pointer
|
||||
let result = navigate_pointer(&raw, "");
|
||||
assert!(result.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_show_output_serialization() {
|
||||
let output = ShowOutput {
|
||||
path: "/pets/{petId}".into(),
|
||||
method: "GET".into(),
|
||||
summary: Some("Get a pet".into()),
|
||||
description: Some("Returns a single pet".into()),
|
||||
operation_id: Some("getPet".into()),
|
||||
tags: vec!["pets".into()],
|
||||
deprecated: false,
|
||||
parameters: json!([
|
||||
{ "name": "petId", "in": "path", "required": true }
|
||||
]),
|
||||
request_body: None,
|
||||
responses: json!({
|
||||
"200": { "description": "A pet" }
|
||||
}),
|
||||
security: json!([]),
|
||||
};
|
||||
|
||||
let serialized = serde_json::to_value(&output).unwrap();
|
||||
assert_eq!(serialized["path"], "/pets/{petId}");
|
||||
assert_eq!(serialized["method"], "GET");
|
||||
assert_eq!(serialized["deprecated"], false);
|
||||
assert!(serialized["request_body"].is_null());
|
||||
assert!(serialized["tags"].is_array());
|
||||
}
|
||||
}
|
||||
|
||||
160
src/cli/tags.rs
160
src/cli/tags.rs
@@ -1,6 +1,15 @@
|
||||
use clap::Args as ClapArgs;
|
||||
use std::time::Instant;
|
||||
|
||||
use clap::Args as ClapArgs;
|
||||
use serde::Serialize;
|
||||
use tabled::Tabled;
|
||||
|
||||
use crate::core::cache::CacheManager;
|
||||
use crate::core::config::cache_dir;
|
||||
use crate::core::spec::SpecIndex;
|
||||
use crate::errors::SwaggerCliError;
|
||||
use crate::output::robot;
|
||||
use crate::output::table::render_table_or_empty;
|
||||
|
||||
/// List tags from a cached spec
|
||||
#[derive(Debug, ClapArgs)]
|
||||
@@ -9,6 +18,151 @@ pub struct Args {
|
||||
pub alias: String,
|
||||
}
|
||||
|
||||
pub async fn execute(_args: &Args, _robot: bool) -> Result<(), SwaggerCliError> {
|
||||
Err(SwaggerCliError::Usage("tags not yet implemented".into()))
|
||||
#[derive(Debug, Serialize)]
|
||||
struct TagsOutput {
|
||||
tags: Vec<TagEntry>,
|
||||
total: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct TagEntry {
|
||||
name: String,
|
||||
description: Option<String>,
|
||||
endpoint_count: usize,
|
||||
}
|
||||
|
||||
#[derive(Tabled)]
|
||||
struct TagRow {
|
||||
#[tabled(rename = "NAME")]
|
||||
name: String,
|
||||
#[tabled(rename = "ENDPOINTS")]
|
||||
endpoints: usize,
|
||||
#[tabled(rename = "DESCRIPTION")]
|
||||
description: String,
|
||||
}
|
||||
|
||||
fn build_output(index: &SpecIndex) -> TagsOutput {
|
||||
let tags: Vec<TagEntry> = index
|
||||
.tags
|
||||
.iter()
|
||||
.map(|t| TagEntry {
|
||||
name: t.name.clone(),
|
||||
description: t.description.clone(),
|
||||
endpoint_count: t.endpoint_count,
|
||||
})
|
||||
.collect();
|
||||
let total = tags.len();
|
||||
TagsOutput { tags, total }
|
||||
}
|
||||
|
||||
pub async fn execute(args: &Args, robot_mode: bool) -> Result<(), SwaggerCliError> {
|
||||
let start = Instant::now();
|
||||
let cm = CacheManager::new(cache_dir());
|
||||
let (index, meta) = cm.load_index(&args.alias)?;
|
||||
let output = build_output(&index);
|
||||
|
||||
if robot_mode {
|
||||
robot::robot_success(output, "tags", start.elapsed());
|
||||
} else {
|
||||
let rows: Vec<TagRow> = output
|
||||
.tags
|
||||
.iter()
|
||||
.map(|t| TagRow {
|
||||
name: t.name.clone(),
|
||||
endpoints: t.endpoint_count,
|
||||
description: t.description.clone().unwrap_or_default(),
|
||||
})
|
||||
.collect();
|
||||
|
||||
println!(
|
||||
"{} {} -- {} tags",
|
||||
meta.spec_title, meta.spec_version, output.total
|
||||
);
|
||||
println!(
|
||||
"{}",
|
||||
render_table_or_empty(&rows, "No tags defined in this spec.")
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::core::spec::{IndexInfo, IndexedTag, SpecIndex};
|
||||
|
||||
fn make_test_index() -> SpecIndex {
|
||||
SpecIndex {
|
||||
index_version: 1,
|
||||
generation: 1,
|
||||
content_hash: "sha256:test".into(),
|
||||
openapi: "3.0.3".into(),
|
||||
info: IndexInfo {
|
||||
title: "Test API".into(),
|
||||
version: "2.0.0".into(),
|
||||
},
|
||||
endpoints: vec![],
|
||||
schemas: vec![],
|
||||
tags: vec![
|
||||
IndexedTag {
|
||||
name: "auth".into(),
|
||||
description: Some("Authentication endpoints".into()),
|
||||
endpoint_count: 3,
|
||||
},
|
||||
IndexedTag {
|
||||
name: "users".into(),
|
||||
description: None,
|
||||
endpoint_count: 5,
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tags_output() {
|
||||
let index = make_test_index();
|
||||
let output = build_output(&index);
|
||||
|
||||
assert_eq!(output.total, 2);
|
||||
assert_eq!(output.tags.len(), 2);
|
||||
|
||||
assert_eq!(output.tags[0].name, "auth");
|
||||
assert_eq!(
|
||||
output.tags[0].description.as_deref(),
|
||||
Some("Authentication endpoints")
|
||||
);
|
||||
assert_eq!(output.tags[0].endpoint_count, 3);
|
||||
|
||||
assert_eq!(output.tags[1].name, "users");
|
||||
assert!(output.tags[1].description.is_none());
|
||||
assert_eq!(output.tags[1].endpoint_count, 5);
|
||||
|
||||
// Verify serialization roundtrip
|
||||
let json = serde_json::to_string(&output).unwrap();
|
||||
let parsed: serde_json::Value = serde_json::from_str(&json).unwrap();
|
||||
assert_eq!(parsed["total"], 2);
|
||||
assert_eq!(parsed["tags"][0]["name"], "auth");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tags_empty() {
|
||||
let index = SpecIndex {
|
||||
index_version: 1,
|
||||
generation: 1,
|
||||
content_hash: "sha256:empty".into(),
|
||||
openapi: "3.1.0".into(),
|
||||
info: IndexInfo {
|
||||
title: "Empty".into(),
|
||||
version: "0.1.0".into(),
|
||||
},
|
||||
endpoints: vec![],
|
||||
schemas: vec![],
|
||||
tags: vec![],
|
||||
};
|
||||
let output = build_output(&index);
|
||||
|
||||
assert_eq!(output.total, 0);
|
||||
assert!(output.tags.is_empty());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -236,10 +236,7 @@ impl CacheManager {
|
||||
/// Validates that index_version, generation, and index_hash all match
|
||||
/// between meta and the on-disk index. Returns `AliasNotFound` if
|
||||
/// meta.json is missing, `CacheIntegrity` on any mismatch.
|
||||
pub fn load_index(
|
||||
&self,
|
||||
alias: &str,
|
||||
) -> Result<(SpecIndex, CacheMetadata), SwaggerCliError> {
|
||||
pub fn load_index(&self, alias: &str) -> Result<(SpecIndex, CacheMetadata), SwaggerCliError> {
|
||||
validate_alias(alias)?;
|
||||
let dir = self.alias_dir(alias);
|
||||
|
||||
@@ -248,24 +245,16 @@ impl CacheManager {
|
||||
if e.kind() == std::io::ErrorKind::NotFound {
|
||||
SwaggerCliError::AliasNotFound(alias.to_string())
|
||||
} else {
|
||||
SwaggerCliError::Cache(format!(
|
||||
"Failed to read {}: {e}",
|
||||
meta_path.display()
|
||||
))
|
||||
SwaggerCliError::Cache(format!("Failed to read {}: {e}", meta_path.display()))
|
||||
}
|
||||
})?;
|
||||
let meta: CacheMetadata = serde_json::from_slice(&meta_bytes).map_err(|e| {
|
||||
SwaggerCliError::CacheIntegrity(format!(
|
||||
"Corrupt meta.json for alias '{alias}': {e}"
|
||||
))
|
||||
SwaggerCliError::CacheIntegrity(format!("Corrupt meta.json for alias '{alias}': {e}"))
|
||||
})?;
|
||||
|
||||
let index_path = dir.join("index.json");
|
||||
let index_bytes = fs::read(&index_path).map_err(|e| {
|
||||
SwaggerCliError::Cache(format!(
|
||||
"Failed to read {}: {e}",
|
||||
index_path.display()
|
||||
))
|
||||
SwaggerCliError::Cache(format!("Failed to read {}: {e}", index_path.display()))
|
||||
})?;
|
||||
|
||||
let actual_hash = compute_hash(&index_bytes);
|
||||
@@ -277,9 +266,7 @@ impl CacheManager {
|
||||
}
|
||||
|
||||
let index: SpecIndex = serde_json::from_slice(&index_bytes).map_err(|e| {
|
||||
SwaggerCliError::CacheIntegrity(format!(
|
||||
"Corrupt index.json for alias '{alias}': {e}"
|
||||
))
|
||||
SwaggerCliError::CacheIntegrity(format!("Corrupt index.json for alias '{alias}': {e}"))
|
||||
})?;
|
||||
|
||||
if meta.index_version != index.index_version {
|
||||
@@ -317,10 +304,7 @@ impl CacheManager {
|
||||
) -> Result<serde_json::Value, SwaggerCliError> {
|
||||
let raw_path = self.alias_dir(alias).join("raw.json");
|
||||
let raw_bytes = fs::read(&raw_path).map_err(|e| {
|
||||
SwaggerCliError::Cache(format!(
|
||||
"Failed to read {}: {e}",
|
||||
raw_path.display()
|
||||
))
|
||||
SwaggerCliError::Cache(format!("Failed to read {}: {e}", raw_path.display()))
|
||||
})?;
|
||||
|
||||
let actual_hash = compute_hash(&raw_bytes);
|
||||
@@ -331,13 +315,9 @@ impl CacheManager {
|
||||
)));
|
||||
}
|
||||
|
||||
let value: serde_json::Value =
|
||||
serde_json::from_slice(&raw_bytes).map_err(|e| {
|
||||
SwaggerCliError::Cache(format!(
|
||||
"Failed to parse raw.json for '{}': {e}",
|
||||
alias
|
||||
))
|
||||
})?;
|
||||
let value: serde_json::Value = serde_json::from_slice(&raw_bytes).map_err(|e| {
|
||||
SwaggerCliError::Cache(format!("Failed to parse raw.json for '{}': {e}", alias))
|
||||
})?;
|
||||
|
||||
Ok(value)
|
||||
}
|
||||
@@ -697,14 +677,12 @@ mod tests {
|
||||
|
||||
manager
|
||||
.write_cache(
|
||||
"api1", b"src1", b"{}", &index, None, "1.0", "API 1", "json",
|
||||
None, None, None,
|
||||
"api1", b"src1", b"{}", &index, None, "1.0", "API 1", "json", None, None, None,
|
||||
)
|
||||
.unwrap();
|
||||
manager
|
||||
.write_cache(
|
||||
"api2", b"src2", b"{}", &index, None, "2.0", "API 2", "yaml",
|
||||
None, None, None,
|
||||
"api2", b"src2", b"{}", &index, None, "2.0", "API 2", "yaml", None, None, None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
|
||||
@@ -72,19 +72,18 @@ fn is_blocked_mapped_v4(v6: &std::net::Ipv6Addr) -> bool {
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn validate_url(url: &str, allow_insecure_http: bool) -> Result<Url, SwaggerCliError> {
|
||||
let parsed = Url::parse(url).map_err(|e| {
|
||||
SwaggerCliError::InvalidSpec(format!("invalid URL '{url}': {e}"))
|
||||
})?;
|
||||
let parsed = Url::parse(url)
|
||||
.map_err(|e| SwaggerCliError::InvalidSpec(format!("invalid URL '{url}': {e}")))?;
|
||||
|
||||
match parsed.scheme() {
|
||||
"https" => Ok(parsed),
|
||||
"http" if allow_insecure_http => Ok(parsed),
|
||||
"http" => Err(SwaggerCliError::PolicyBlocked(
|
||||
format!("HTTP is not allowed for '{url}'. Use --allow-insecure-http to override."),
|
||||
)),
|
||||
other => Err(SwaggerCliError::InvalidSpec(
|
||||
format!("unsupported scheme '{other}' in URL '{url}'"),
|
||||
)),
|
||||
"http" => Err(SwaggerCliError::PolicyBlocked(format!(
|
||||
"HTTP is not allowed for '{url}'. Use --allow-insecure-http to override."
|
||||
))),
|
||||
other => Err(SwaggerCliError::InvalidSpec(format!(
|
||||
"unsupported scheme '{other}' in URL '{url}'"
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -105,16 +104,16 @@ async fn resolve_and_check(
|
||||
let addrs: Vec<_> = match lookup_host(&addr).await {
|
||||
Ok(iter) => iter.collect(),
|
||||
Err(e) => {
|
||||
return Err(SwaggerCliError::InvalidSpec(
|
||||
format!("DNS resolution failed for '{host}': {e}"),
|
||||
));
|
||||
return Err(SwaggerCliError::InvalidSpec(format!(
|
||||
"DNS resolution failed for '{host}': {e}"
|
||||
)));
|
||||
}
|
||||
};
|
||||
|
||||
if addrs.is_empty() {
|
||||
return Err(SwaggerCliError::InvalidSpec(
|
||||
format!("DNS resolution returned no addresses for '{host}'"),
|
||||
));
|
||||
return Err(SwaggerCliError::InvalidSpec(format!(
|
||||
"DNS resolution returned no addresses for '{host}'"
|
||||
)));
|
||||
}
|
||||
|
||||
for socket_addr in &addrs {
|
||||
@@ -178,9 +177,9 @@ impl AsyncHttpClient {
|
||||
pub async fn fetch_spec(&self, url: &str) -> Result<FetchResult, SwaggerCliError> {
|
||||
let parsed = validate_url(url, self.allow_insecure_http)?;
|
||||
|
||||
let host = parsed.host_str().ok_or_else(|| {
|
||||
SwaggerCliError::InvalidSpec(format!("URL '{url}' has no host"))
|
||||
})?;
|
||||
let host = parsed
|
||||
.host_str()
|
||||
.ok_or_else(|| SwaggerCliError::InvalidSpec(format!("URL '{url}' has no host")))?;
|
||||
let port = parsed.port_or_known_default().unwrap_or(443);
|
||||
|
||||
resolve_and_check(host, port, &self.allowed_private_hosts).await?;
|
||||
@@ -215,11 +214,7 @@ impl AsyncHttpClient {
|
||||
attempts += 1;
|
||||
if attempts > self.max_retries {
|
||||
return Err(SwaggerCliError::Network(
|
||||
client
|
||||
.get(url)
|
||||
.send()
|
||||
.await
|
||||
.unwrap_err(),
|
||||
client.get(url).send().await.unwrap_err(),
|
||||
));
|
||||
}
|
||||
let delay = self.retry_delay(&response, attempts);
|
||||
@@ -370,7 +365,9 @@ mod tests {
|
||||
#[test]
|
||||
fn test_ssrf_blocks_loopback() {
|
||||
assert!(is_ip_blocked(&IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1))));
|
||||
assert!(is_ip_blocked(&IpAddr::V4(Ipv4Addr::new(127, 255, 255, 254))));
|
||||
assert!(is_ip_blocked(&IpAddr::V4(Ipv4Addr::new(
|
||||
127, 255, 255, 254
|
||||
))));
|
||||
assert!(is_ip_blocked(&IpAddr::V6(Ipv6Addr::LOCALHOST)));
|
||||
}
|
||||
|
||||
@@ -392,7 +389,9 @@ mod tests {
|
||||
#[test]
|
||||
fn test_ssrf_blocks_link_local() {
|
||||
// IPv4 link-local (169.254.x.x) -- includes the AWS metadata endpoint
|
||||
assert!(is_ip_blocked(&IpAddr::V4(Ipv4Addr::new(169, 254, 169, 254))));
|
||||
assert!(is_ip_blocked(&IpAddr::V4(Ipv4Addr::new(
|
||||
169, 254, 169, 254
|
||||
))));
|
||||
assert!(is_ip_blocked(&IpAddr::V4(Ipv4Addr::new(169, 254, 0, 1))));
|
||||
|
||||
// IPv6 link-local (fe80::/10)
|
||||
@@ -441,10 +440,7 @@ mod tests {
|
||||
fn test_url_allows_https() {
|
||||
let result = validate_url("https://example.com/spec.json", false);
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(
|
||||
result.unwrap().as_str(),
|
||||
"https://example.com/spec.json"
|
||||
);
|
||||
assert_eq!(result.unwrap().as_str(), "https://example.com/spec.json");
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -457,7 +453,10 @@ mod tests {
|
||||
fn test_url_rejects_unsupported_scheme() {
|
||||
let result = validate_url("ftp://example.com/spec.json", false);
|
||||
assert!(result.is_err());
|
||||
assert!(matches!(result.unwrap_err(), SwaggerCliError::InvalidSpec(_)));
|
||||
assert!(matches!(
|
||||
result.unwrap_err(),
|
||||
SwaggerCliError::InvalidSpec(_)
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -505,8 +504,7 @@ mod tests {
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_and_check_skips_allowed_host() {
|
||||
let result =
|
||||
resolve_and_check("localhost", 80, &["localhost".into()]).await;
|
||||
let result = resolve_and_check("localhost", 80, &["localhost".into()]).await;
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,4 +2,6 @@ pub mod cache;
|
||||
pub mod config;
|
||||
pub mod http;
|
||||
pub mod indexer;
|
||||
pub mod refs;
|
||||
pub mod search;
|
||||
pub mod spec;
|
||||
|
||||
295
src/core/refs.rs
Normal file
295
src/core/refs.rs
Normal file
@@ -0,0 +1,295 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use serde_json::Value;
|
||||
|
||||
/// Resolve a JSON Pointer (RFC 6901) against a root value.
|
||||
///
|
||||
/// Unescapes `~1` -> `/` and `~0` -> `~` (in that order per spec).
|
||||
/// Returns `None` if the pointer is empty, malformed, or the path does not exist.
|
||||
pub fn resolve_json_pointer<'a>(root: &'a Value, pointer: &str) -> Option<&'a Value> {
|
||||
if pointer.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let stripped = pointer.strip_prefix('/')?;
|
||||
|
||||
let mut current = root;
|
||||
for token in stripped.split('/') {
|
||||
let unescaped = token.replace("~1", "/").replace("~0", "~");
|
||||
match current {
|
||||
Value::Object(map) => {
|
||||
current = map.get(&unescaped)?;
|
||||
}
|
||||
Value::Array(arr) => {
|
||||
let idx: usize = unescaped.parse().ok()?;
|
||||
current = arr.get(idx)?;
|
||||
}
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
|
||||
Some(current)
|
||||
}
|
||||
|
||||
/// Expand all `$ref` entries in `value` by inlining the referenced content from `root`.
|
||||
///
|
||||
/// - Internal refs (starting with `#/`) are resolved via JSON pointer navigation.
|
||||
/// - External refs (not starting with `#/`) are replaced with `{"$external_ref": "..."}`.
|
||||
/// - Circular refs (already visited in the current path) are replaced with `{"$circular_ref": "..."}`.
|
||||
/// - Expansion stops at `max_depth` to prevent unbounded recursion.
|
||||
pub fn expand_refs(value: &mut Value, root: &Value, max_depth: u32) {
|
||||
let mut visited = HashSet::new();
|
||||
expand_recursive(value, root, max_depth, 0, &mut visited);
|
||||
}
|
||||
|
||||
fn expand_recursive(
|
||||
value: &mut Value,
|
||||
root: &Value,
|
||||
max_depth: u32,
|
||||
depth: u32,
|
||||
visited: &mut HashSet<String>,
|
||||
) {
|
||||
if let Some(ref_str) = extract_ref_if_present(value) {
|
||||
if !ref_str.starts_with("#/") {
|
||||
*value = serde_json::json!({ "$external_ref": ref_str });
|
||||
return;
|
||||
}
|
||||
|
||||
if depth >= max_depth || visited.contains(&ref_str) {
|
||||
*value = serde_json::json!({ "$circular_ref": ref_str });
|
||||
return;
|
||||
}
|
||||
|
||||
let pointer = &ref_str[1..]; // strip leading '#'
|
||||
if let Some(resolved) = resolve_json_pointer(root, pointer) {
|
||||
let mut expanded = resolved.clone();
|
||||
visited.insert(ref_str);
|
||||
expand_recursive(&mut expanded, root, max_depth, depth + 1, visited);
|
||||
// Do not remove from visited: keep it for sibling detection within the same
|
||||
// subtree path. The caller manages the visited set across siblings.
|
||||
*value = expanded;
|
||||
}
|
||||
// If pointer doesn't resolve, leave the $ref as-is (broken ref)
|
||||
return;
|
||||
}
|
||||
|
||||
match value {
|
||||
Value::Object(map) => {
|
||||
for val in map.values_mut() {
|
||||
expand_recursive(val, root, max_depth, depth, visited);
|
||||
}
|
||||
}
|
||||
Value::Array(arr) => {
|
||||
for item in arr.iter_mut() {
|
||||
expand_recursive(item, root, max_depth, depth, visited);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_ref_if_present(value: &Value) -> Option<String> {
|
||||
let map = value.as_object()?;
|
||||
let ref_val = map.get("$ref")?;
|
||||
Some(ref_val.as_str()?.to_string())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use serde_json::json;
|
||||
|
||||
#[test]
|
||||
fn test_resolve_json_pointer() {
|
||||
let root = json!({
|
||||
"components": {
|
||||
"schemas": {
|
||||
"Pet": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": { "type": "string" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let result = resolve_json_pointer(&root, "/components/schemas/Pet");
|
||||
assert!(result.is_some());
|
||||
let pet = result.unwrap();
|
||||
assert_eq!(pet["type"], "object");
|
||||
assert_eq!(pet["properties"]["name"]["type"], "string");
|
||||
|
||||
// Empty pointer
|
||||
assert!(resolve_json_pointer(&root, "").is_none());
|
||||
|
||||
// Missing path
|
||||
assert!(resolve_json_pointer(&root, "/components/schemas/Dog").is_none());
|
||||
|
||||
// No leading slash
|
||||
assert!(resolve_json_pointer(&root, "components").is_none());
|
||||
|
||||
// Escaped path segments: ~1 -> /
|
||||
let root_with_slash = json!({
|
||||
"paths": {
|
||||
"/pets/{petId}": {
|
||||
"get": { "summary": "Get pet" }
|
||||
}
|
||||
}
|
||||
});
|
||||
let result = resolve_json_pointer(&root_with_slash, "/paths/~1pets~1{petId}/get");
|
||||
assert!(result.is_some());
|
||||
assert_eq!(result.unwrap()["summary"], "Get pet");
|
||||
|
||||
// Escaped: ~0 -> ~
|
||||
let root_with_tilde = json!({
|
||||
"x~y": "found"
|
||||
});
|
||||
let result = resolve_json_pointer(&root_with_tilde, "/x~0y");
|
||||
assert!(result.is_some());
|
||||
assert_eq!(result.unwrap(), "found");
|
||||
|
||||
// Array indexing
|
||||
let root_with_array = json!({
|
||||
"items": ["a", "b", "c"]
|
||||
});
|
||||
let result = resolve_json_pointer(&root_with_array, "/items/1");
|
||||
assert!(result.is_some());
|
||||
assert_eq!(result.unwrap(), "b");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expand_basic_ref() {
|
||||
let root = json!({
|
||||
"components": {
|
||||
"schemas": {
|
||||
"Pet": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": { "type": "string" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let mut value = json!({
|
||||
"schema": { "$ref": "#/components/schemas/Pet" }
|
||||
});
|
||||
|
||||
expand_refs(&mut value, &root, 10);
|
||||
|
||||
assert_eq!(value["schema"]["type"], "object");
|
||||
assert_eq!(value["schema"]["properties"]["name"]["type"], "string");
|
||||
// $ref key should be gone (replaced with inlined content)
|
||||
assert!(value["schema"]["$ref"].is_null());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expand_circular_ref() {
|
||||
let root = json!({
|
||||
"components": {
|
||||
"schemas": {
|
||||
"Node": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"child": { "$ref": "#/components/schemas/Node" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let mut value = json!({
|
||||
"schema": { "$ref": "#/components/schemas/Node" }
|
||||
});
|
||||
|
||||
expand_refs(&mut value, &root, 5);
|
||||
|
||||
// The first expansion should succeed
|
||||
assert_eq!(value["schema"]["type"], "object");
|
||||
|
||||
// The recursive child ref should be replaced with $circular_ref
|
||||
let child = &value["schema"]["properties"]["child"];
|
||||
assert_eq!(child["$circular_ref"], "#/components/schemas/Node");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expand_external_ref() {
|
||||
let root = json!({});
|
||||
|
||||
let mut value = json!({
|
||||
"schema": { "$ref": "https://example.com/schemas/Pet.json" }
|
||||
});
|
||||
|
||||
expand_refs(&mut value, &root, 5);
|
||||
|
||||
assert_eq!(
|
||||
value["schema"]["$external_ref"],
|
||||
"https://example.com/schemas/Pet.json"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expand_max_depth() {
|
||||
let root = json!({
|
||||
"components": {
|
||||
"schemas": {
|
||||
"A": {
|
||||
"nested": { "$ref": "#/components/schemas/B" }
|
||||
},
|
||||
"B": {
|
||||
"value": "deep"
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// With max_depth=1, the first ref resolves but nested ref hits depth limit
|
||||
let mut value = json!({ "$ref": "#/components/schemas/A" });
|
||||
expand_refs(&mut value, &root, 1);
|
||||
|
||||
// A should be expanded
|
||||
assert!(value.get("nested").is_some());
|
||||
// B ref should be left as $circular_ref due to max_depth
|
||||
assert_eq!(value["nested"]["$circular_ref"], "#/components/schemas/B");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expand_array_refs() {
|
||||
let root = json!({
|
||||
"components": {
|
||||
"schemas": {
|
||||
"Tag": { "type": "string" }
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let mut value = json!({
|
||||
"items": [
|
||||
{ "$ref": "#/components/schemas/Tag" },
|
||||
{ "type": "integer" }
|
||||
]
|
||||
});
|
||||
|
||||
expand_refs(&mut value, &root, 5);
|
||||
|
||||
assert_eq!(value["items"][0]["type"], "string");
|
||||
assert_eq!(value["items"][1]["type"], "integer");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expand_broken_ref_left_as_is() {
|
||||
let root = json!({});
|
||||
|
||||
let mut value = json!({
|
||||
"schema": { "$ref": "#/components/schemas/Missing" }
|
||||
});
|
||||
|
||||
let original = value.clone();
|
||||
expand_refs(&mut value, &root, 5);
|
||||
|
||||
// Broken internal ref left untouched
|
||||
assert_eq!(value, original);
|
||||
}
|
||||
}
|
||||
634
src/core/search.rs
Normal file
634
src/core/search.rs
Normal file
@@ -0,0 +1,634 @@
|
||||
use serde::Serialize;
|
||||
|
||||
use super::indexer::method_rank;
|
||||
use super::spec::SpecIndex;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Public types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
pub struct SearchResult {
|
||||
pub result_type: SearchResultType,
|
||||
pub name: String,
|
||||
pub method: Option<String>,
|
||||
pub summary: Option<String>,
|
||||
pub rank: usize,
|
||||
pub score: u32,
|
||||
pub matches: Vec<Match>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum SearchResultType {
|
||||
Endpoint,
|
||||
Schema,
|
||||
}
|
||||
|
||||
impl SearchResultType {
|
||||
fn ordinal(self) -> u8 {
|
||||
match self {
|
||||
Self::Endpoint => 0,
|
||||
Self::Schema => 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
pub struct Match {
|
||||
pub field: String,
|
||||
pub snippet: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SearchOptions {
|
||||
pub search_paths: bool,
|
||||
pub search_descriptions: bool,
|
||||
pub search_schemas: bool,
|
||||
pub case_sensitive: bool,
|
||||
pub exact: bool,
|
||||
pub limit: usize,
|
||||
}
|
||||
|
||||
impl Default for SearchOptions {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
search_paths: true,
|
||||
search_descriptions: true,
|
||||
search_schemas: true,
|
||||
case_sensitive: false,
|
||||
exact: false,
|
||||
limit: 20,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Field weights
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const WEIGHT_PATH: f64 = 10.0;
|
||||
const WEIGHT_SUMMARY: f64 = 5.0;
|
||||
const WEIGHT_DESCRIPTION: f64 = 2.0;
|
||||
const WEIGHT_SCHEMA_NAME: f64 = 8.0;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Search engine
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
pub struct SearchEngine<'a> {
|
||||
index: &'a SpecIndex,
|
||||
}
|
||||
|
||||
impl<'a> SearchEngine<'a> {
|
||||
pub fn new(index: &'a SpecIndex) -> Self {
|
||||
Self { index }
|
||||
}
|
||||
|
||||
pub fn search(&self, query: &str, opts: &SearchOptions) -> Vec<SearchResult> {
|
||||
let query = query.trim();
|
||||
if query.is_empty() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let terms = tokenize(query, opts.exact);
|
||||
let total_terms = terms.len();
|
||||
|
||||
let mut results: Vec<SearchResult> = Vec::new();
|
||||
|
||||
// Search endpoints
|
||||
if opts.search_paths || opts.search_descriptions {
|
||||
for ep in &self.index.endpoints {
|
||||
let mut raw_score: f64 = 0.0;
|
||||
let mut matched_terms: usize = 0;
|
||||
let mut matches: Vec<Match> = Vec::new();
|
||||
|
||||
for term in &terms {
|
||||
let mut term_matched = false;
|
||||
|
||||
if opts.search_paths && contains_term(&ep.path, term, opts.case_sensitive) {
|
||||
raw_score += WEIGHT_PATH;
|
||||
matches.push(Match {
|
||||
field: "path".into(),
|
||||
snippet: safe_snippet(&ep.path, term, opts.case_sensitive),
|
||||
});
|
||||
term_matched = true;
|
||||
}
|
||||
|
||||
if (opts.search_descriptions || opts.search_paths)
|
||||
&& let Some(ref summary) = ep.summary
|
||||
&& contains_term(summary, term, opts.case_sensitive)
|
||||
{
|
||||
raw_score += WEIGHT_SUMMARY;
|
||||
matches.push(Match {
|
||||
field: "summary".into(),
|
||||
snippet: safe_snippet(summary, term, opts.case_sensitive),
|
||||
});
|
||||
term_matched = true;
|
||||
}
|
||||
|
||||
if opts.search_descriptions
|
||||
&& let Some(ref desc) = ep.description
|
||||
&& contains_term(desc, term, opts.case_sensitive)
|
||||
{
|
||||
raw_score += WEIGHT_DESCRIPTION;
|
||||
matches.push(Match {
|
||||
field: "description".into(),
|
||||
snippet: safe_snippet(desc, term, opts.case_sensitive),
|
||||
});
|
||||
term_matched = true;
|
||||
}
|
||||
|
||||
if term_matched {
|
||||
matched_terms += 1;
|
||||
}
|
||||
}
|
||||
|
||||
if raw_score > 0.0 {
|
||||
let coverage_boost = 1.0 + (matched_terms as f64 / total_terms.max(1) as f64);
|
||||
let final_score = raw_score * coverage_boost;
|
||||
let quantized = (final_score * 100.0).round() as u32;
|
||||
|
||||
results.push(SearchResult {
|
||||
result_type: SearchResultType::Endpoint,
|
||||
name: ep.path.clone(),
|
||||
method: Some(ep.method.clone()),
|
||||
summary: ep.summary.clone(),
|
||||
rank: 0, // assigned after sort
|
||||
score: quantized,
|
||||
matches,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Search schemas
|
||||
if opts.search_schemas {
|
||||
for schema in &self.index.schemas {
|
||||
let mut raw_score: f64 = 0.0;
|
||||
let mut matched_terms: usize = 0;
|
||||
let mut matches: Vec<Match> = Vec::new();
|
||||
|
||||
for term in &terms {
|
||||
if contains_term(&schema.name, term, opts.case_sensitive) {
|
||||
raw_score += WEIGHT_SCHEMA_NAME;
|
||||
matches.push(Match {
|
||||
field: "schema_name".into(),
|
||||
snippet: safe_snippet(&schema.name, term, opts.case_sensitive),
|
||||
});
|
||||
matched_terms += 1;
|
||||
}
|
||||
}
|
||||
|
||||
if raw_score > 0.0 {
|
||||
let coverage_boost = 1.0 + (matched_terms as f64 / total_terms.max(1) as f64);
|
||||
let final_score = raw_score * coverage_boost;
|
||||
let quantized = (final_score * 100.0).round() as u32;
|
||||
|
||||
results.push(SearchResult {
|
||||
result_type: SearchResultType::Schema,
|
||||
name: schema.name.clone(),
|
||||
method: None,
|
||||
summary: None,
|
||||
rank: 0,
|
||||
score: quantized,
|
||||
matches,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Deterministic sort: score DESC, type ordinal ASC, name ASC, method_rank ASC
|
||||
results.sort_by(|a, b| {
|
||||
b.score
|
||||
.cmp(&a.score)
|
||||
.then_with(|| a.result_type.ordinal().cmp(&b.result_type.ordinal()))
|
||||
.then_with(|| a.name.cmp(&b.name))
|
||||
.then_with(|| {
|
||||
let a_rank = a.method.as_deref().map(method_rank).unwrap_or(u8::MAX);
|
||||
let b_rank = b.method.as_deref().map(method_rank).unwrap_or(u8::MAX);
|
||||
a_rank.cmp(&b_rank)
|
||||
})
|
||||
});
|
||||
|
||||
// Assign 1-based ranks and apply limit
|
||||
results.truncate(opts.limit);
|
||||
for (i, result) in results.iter_mut().enumerate() {
|
||||
result.rank = i + 1;
|
||||
}
|
||||
|
||||
results
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn tokenize(query: &str, exact: bool) -> Vec<String> {
|
||||
if exact {
|
||||
vec![query.to_string()]
|
||||
} else {
|
||||
query.split_whitespace().map(String::from).collect()
|
||||
}
|
||||
}
|
||||
|
||||
fn contains_term(haystack: &str, needle: &str, case_sensitive: bool) -> bool {
|
||||
if case_sensitive {
|
||||
haystack.contains(needle)
|
||||
} else {
|
||||
let h = haystack.to_lowercase();
|
||||
let n = needle.to_lowercase();
|
||||
h.contains(&n)
|
||||
}
|
||||
}
|
||||
|
||||
/// Build a Unicode-safe snippet around the first occurrence of `needle` in
|
||||
/// `haystack`. The context window is 50 characters. Ellipses are added when
|
||||
/// the snippet is truncated.
|
||||
fn safe_snippet(haystack: &str, needle: &str, case_sensitive: bool) -> String {
|
||||
let (h_search, n_search) = if case_sensitive {
|
||||
(haystack.to_string(), needle.to_string())
|
||||
} else {
|
||||
(haystack.to_lowercase(), needle.to_lowercase())
|
||||
};
|
||||
|
||||
let byte_pos = match h_search.find(&n_search) {
|
||||
Some(pos) => pos,
|
||||
None => return haystack.chars().take(50).collect(),
|
||||
};
|
||||
|
||||
// Convert byte position to char index.
|
||||
let char_start = haystack[..byte_pos].chars().count();
|
||||
let needle_char_len = needle.chars().count();
|
||||
let haystack_chars: Vec<char> = haystack.chars().collect();
|
||||
let total_chars = haystack_chars.len();
|
||||
|
||||
const WINDOW: usize = 50;
|
||||
|
||||
// Centre the window around the match.
|
||||
let context_budget = WINDOW.saturating_sub(needle_char_len);
|
||||
let left_context = context_budget / 2;
|
||||
|
||||
let snippet_start = char_start.saturating_sub(left_context);
|
||||
let snippet_end = (snippet_start + WINDOW).min(total_chars);
|
||||
|
||||
let prefix = if snippet_start > 0 { "..." } else { "" };
|
||||
let suffix = if snippet_end < total_chars { "..." } else { "" };
|
||||
|
||||
let snippet_body: String = haystack_chars[snippet_start..snippet_end].iter().collect();
|
||||
|
||||
format!("{prefix}{snippet_body}{suffix}")
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::core::spec::{
|
||||
IndexInfo, IndexedEndpoint, IndexedParam, IndexedSchema, IndexedTag, SpecIndex,
|
||||
};
|
||||
|
||||
fn petstore_index() -> SpecIndex {
|
||||
SpecIndex {
|
||||
index_version: 1,
|
||||
generation: 1,
|
||||
content_hash: "sha256:test".into(),
|
||||
openapi: "3.0.3".into(),
|
||||
info: IndexInfo {
|
||||
title: "Petstore".into(),
|
||||
version: "1.0.0".into(),
|
||||
},
|
||||
endpoints: vec![
|
||||
IndexedEndpoint {
|
||||
path: "/pets".into(),
|
||||
method: "GET".into(),
|
||||
summary: Some("List all pets".into()),
|
||||
description: Some("Returns a list of pets from the store".into()),
|
||||
operation_id: Some("listPets".into()),
|
||||
tags: vec!["pets".into()],
|
||||
deprecated: false,
|
||||
parameters: vec![IndexedParam {
|
||||
name: "limit".into(),
|
||||
location: "query".into(),
|
||||
required: false,
|
||||
description: Some("Max items".into()),
|
||||
}],
|
||||
request_body_required: false,
|
||||
request_body_content_types: vec![],
|
||||
security_schemes: vec![],
|
||||
security_required: false,
|
||||
operation_ptr: "/paths/~1pets/get".into(),
|
||||
},
|
||||
IndexedEndpoint {
|
||||
path: "/pets".into(),
|
||||
method: "POST".into(),
|
||||
summary: Some("Create a pet".into()),
|
||||
description: None,
|
||||
operation_id: Some("createPet".into()),
|
||||
tags: vec!["pets".into()],
|
||||
deprecated: false,
|
||||
parameters: vec![],
|
||||
request_body_required: true,
|
||||
request_body_content_types: vec!["application/json".into()],
|
||||
security_schemes: vec![],
|
||||
security_required: false,
|
||||
operation_ptr: "/paths/~1pets/post".into(),
|
||||
},
|
||||
IndexedEndpoint {
|
||||
path: "/pets/{petId}".into(),
|
||||
method: "GET".into(),
|
||||
summary: Some("Info for a specific pet".into()),
|
||||
description: Some("Detailed information about a single pet".into()),
|
||||
operation_id: Some("showPetById".into()),
|
||||
tags: vec!["pets".into()],
|
||||
deprecated: false,
|
||||
parameters: vec![IndexedParam {
|
||||
name: "petId".into(),
|
||||
location: "path".into(),
|
||||
required: true,
|
||||
description: Some("The id of the pet".into()),
|
||||
}],
|
||||
request_body_required: false,
|
||||
request_body_content_types: vec![],
|
||||
security_schemes: vec![],
|
||||
security_required: false,
|
||||
operation_ptr: "/paths/~1pets~1{petId}/get".into(),
|
||||
},
|
||||
IndexedEndpoint {
|
||||
path: "/store/inventory".into(),
|
||||
method: "GET".into(),
|
||||
summary: Some("Returns store inventory".into()),
|
||||
description: None,
|
||||
operation_id: Some("getInventory".into()),
|
||||
tags: vec!["store".into()],
|
||||
deprecated: false,
|
||||
parameters: vec![],
|
||||
request_body_required: false,
|
||||
request_body_content_types: vec![],
|
||||
security_schemes: vec![],
|
||||
security_required: false,
|
||||
operation_ptr: "/paths/~1store~1inventory/get".into(),
|
||||
},
|
||||
],
|
||||
schemas: vec![
|
||||
IndexedSchema {
|
||||
name: "Pet".into(),
|
||||
schema_ptr: "/components/schemas/Pet".into(),
|
||||
},
|
||||
IndexedSchema {
|
||||
name: "Error".into(),
|
||||
schema_ptr: "/components/schemas/Error".into(),
|
||||
},
|
||||
IndexedSchema {
|
||||
name: "PetList".into(),
|
||||
schema_ptr: "/components/schemas/PetList".into(),
|
||||
},
|
||||
],
|
||||
tags: vec![
|
||||
IndexedTag {
|
||||
name: "pets".into(),
|
||||
description: Some("Pet operations".into()),
|
||||
endpoint_count: 3,
|
||||
},
|
||||
IndexedTag {
|
||||
name: "store".into(),
|
||||
description: Some("Store operations".into()),
|
||||
endpoint_count: 1,
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_search_basic() {
|
||||
let index = petstore_index();
|
||||
let engine = SearchEngine::new(&index);
|
||||
let opts = SearchOptions::default();
|
||||
|
||||
let results = engine.search("pet", &opts);
|
||||
assert!(
|
||||
!results.is_empty(),
|
||||
"should find 'pet' in petstore endpoints"
|
||||
);
|
||||
|
||||
// All results should mention pet somewhere
|
||||
for r in &results {
|
||||
let has_pet = r
|
||||
.matches
|
||||
.iter()
|
||||
.any(|m| m.snippet.to_lowercase().contains("pet"));
|
||||
assert!(has_pet, "result {:?} should match 'pet'", r.name);
|
||||
}
|
||||
|
||||
// Ranks should be sequential 1-based
|
||||
for (i, r) in results.iter().enumerate() {
|
||||
assert_eq!(r.rank, i + 1, "rank should be 1-based sequential");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_search_scores_deterministic() {
|
||||
let index = petstore_index();
|
||||
let engine = SearchEngine::new(&index);
|
||||
let opts = SearchOptions::default();
|
||||
|
||||
let run1 = engine.search("pet", &opts);
|
||||
let run2 = engine.search("pet", &opts);
|
||||
|
||||
assert_eq!(run1.len(), run2.len());
|
||||
for (a, b) in run1.iter().zip(run2.iter()) {
|
||||
assert_eq!(a.score, b.score, "scores should be identical across runs");
|
||||
assert_eq!(a.rank, b.rank, "ranks should be identical across runs");
|
||||
assert_eq!(a.name, b.name, "names should be identical across runs");
|
||||
assert_eq!(
|
||||
a.method, b.method,
|
||||
"methods should be identical across runs"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_search_exact_mode() {
|
||||
let index = petstore_index();
|
||||
let engine = SearchEngine::new(&index);
|
||||
|
||||
// "list all" as two tokens: should match broadly
|
||||
let loose_opts = SearchOptions {
|
||||
exact: false,
|
||||
..SearchOptions::default()
|
||||
};
|
||||
let loose = engine.search("list all", &loose_opts);
|
||||
|
||||
// "list all" as exact phrase: only matches if that exact phrase appears
|
||||
let exact_opts = SearchOptions {
|
||||
exact: true,
|
||||
..SearchOptions::default()
|
||||
};
|
||||
let exact = engine.search("list all", &exact_opts);
|
||||
|
||||
// Exact should be a subset of (or equal to) loose results
|
||||
assert!(
|
||||
exact.len() <= loose.len(),
|
||||
"exact mode should return fewer or equal results"
|
||||
);
|
||||
|
||||
// The exact match should find "List all pets" summary
|
||||
assert!(
|
||||
!exact.is_empty(),
|
||||
"exact 'list all' should match 'List all pets'"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_search_case_sensitive() {
|
||||
let index = petstore_index();
|
||||
let engine = SearchEngine::new(&index);
|
||||
|
||||
// Case-insensitive (default): "PET" matches "pet", "/pets", etc.
|
||||
let insensitive = SearchOptions {
|
||||
case_sensitive: false,
|
||||
..SearchOptions::default()
|
||||
};
|
||||
let results_insensitive = engine.search("PET", &insensitive);
|
||||
|
||||
// Case-sensitive: "PET" should NOT match lowercase "pet" or "/pets"
|
||||
let sensitive = SearchOptions {
|
||||
case_sensitive: true,
|
||||
..SearchOptions::default()
|
||||
};
|
||||
let results_sensitive = engine.search("PET", &sensitive);
|
||||
|
||||
assert!(
|
||||
results_sensitive.len() < results_insensitive.len(),
|
||||
"case-sensitive 'PET' should match fewer results than case-insensitive"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_safe_snippet_unicode() {
|
||||
// Emoji and multi-byte characters
|
||||
let haystack = "Hello \u{1F600} world of pets and \u{1F431} cats everywhere";
|
||||
let snippet = safe_snippet(haystack, "pets", false);
|
||||
assert!(
|
||||
snippet.contains("pets"),
|
||||
"snippet should contain the search term"
|
||||
);
|
||||
// Must not panic on multi-byte boundaries
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_safe_snippet_truncation() {
|
||||
let long = "a".repeat(200);
|
||||
let haystack = format!("{long}needle{long}");
|
||||
let snippet = safe_snippet(&haystack, "needle", false);
|
||||
assert!(snippet.contains("needle"));
|
||||
assert!(
|
||||
snippet.contains("..."),
|
||||
"should have ellipsis for truncation"
|
||||
);
|
||||
// Snippet should be around 50 chars + ellipsis markers
|
||||
let body_len = snippet.replace("...", "").chars().count();
|
||||
assert!(body_len <= 50, "snippet body should be at most 50 chars");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_query_returns_empty() {
|
||||
let index = petstore_index();
|
||||
let engine = SearchEngine::new(&index);
|
||||
let opts = SearchOptions::default();
|
||||
|
||||
assert!(engine.search("", &opts).is_empty());
|
||||
assert!(engine.search(" ", &opts).is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_search_limit() {
|
||||
let index = petstore_index();
|
||||
let engine = SearchEngine::new(&index);
|
||||
let opts = SearchOptions {
|
||||
limit: 2,
|
||||
..SearchOptions::default()
|
||||
};
|
||||
|
||||
let results = engine.search("pet", &opts);
|
||||
assert!(results.len() <= 2, "should respect limit");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_search_schemas_only() {
|
||||
let index = petstore_index();
|
||||
let engine = SearchEngine::new(&index);
|
||||
let opts = SearchOptions {
|
||||
search_paths: false,
|
||||
search_descriptions: false,
|
||||
search_schemas: true,
|
||||
..SearchOptions::default()
|
||||
};
|
||||
|
||||
let results = engine.search("Pet", &opts);
|
||||
assert!(!results.is_empty());
|
||||
for r in &results {
|
||||
assert_eq!(
|
||||
r.result_type,
|
||||
SearchResultType::Schema,
|
||||
"should only return schemas"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_search_paths_only() {
|
||||
let index = petstore_index();
|
||||
let engine = SearchEngine::new(&index);
|
||||
let opts = SearchOptions {
|
||||
search_paths: true,
|
||||
search_descriptions: false,
|
||||
search_schemas: false,
|
||||
..SearchOptions::default()
|
||||
};
|
||||
|
||||
let results = engine.search("store", &opts);
|
||||
assert!(!results.is_empty());
|
||||
for r in &results {
|
||||
assert_eq!(
|
||||
r.result_type,
|
||||
SearchResultType::Endpoint,
|
||||
"should only return endpoints"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multi_term_coverage_boost() {
|
||||
let index = petstore_index();
|
||||
let engine = SearchEngine::new(&index);
|
||||
let opts = SearchOptions::default();
|
||||
|
||||
// "pets store" has two terms; an endpoint matching both gets higher coverage
|
||||
let results = engine.search("pets list", &opts);
|
||||
if results.len() >= 2 {
|
||||
// The first result should have a higher score due to more term matches
|
||||
assert!(
|
||||
results[0].score >= results[1].score,
|
||||
"results should be sorted by score descending"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_no_match_returns_empty() {
|
||||
let index = petstore_index();
|
||||
let engine = SearchEngine::new(&index);
|
||||
let opts = SearchOptions::default();
|
||||
|
||||
let results = engine.search("zzzznotfound", &opts);
|
||||
assert!(
|
||||
results.is_empty(),
|
||||
"gibberish query should return no results"
|
||||
);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user