Skip to content

Commit

Permalink
refactor: remove built-in apidocs and schemars
Browse files Browse the repository at this point in the history
  • Loading branch information
sunng87 committed Nov 28, 2024
1 parent 797dfad commit 86eafd8
Show file tree
Hide file tree
Showing 23 changed files with 45 additions and 237 deletions.
24 changes: 0 additions & 24 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,6 @@ rstest = "0.21"
rstest_reuse = "0.7"
rust_decimal = "1.33"
rustc-hash = "2.0"
schemars = "0.8"
serde = { version = "1.0", features = ["derive"] }
serde_json = { version = "1.0", features = ["float_roundtrip"] }
serde_with = "3"
Expand Down
3 changes: 1 addition & 2 deletions src/common/version/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,10 @@ license.workspace = true
workspace = true

[features]
codec = ["dep:serde", "dep:schemars"]
codec = ["dep:serde"]

[dependencies]
const_format = "0.2"
schemars = { workspace = true, optional = true }
serde = { workspace = true, optional = true }
shadow-rs.workspace = true

Expand Down
5 changes: 1 addition & 4 deletions src/common/version/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,10 +49,7 @@ impl Display for BuildInfo {
}

#[derive(Clone, Debug, PartialEq)]
#[cfg_attr(
feature = "codec",
derive(serde::Serialize, serde::Deserialize, schemars::JsonSchema)
)]
#[cfg_attr(feature = "codec", derive(serde::Serialize, serde::Deserialize))]
pub struct OwnedBuildInfo {
pub branch: String,
pub commit: String,
Expand Down
4 changes: 0 additions & 4 deletions src/servers/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,12 @@ dashboard = []
mem-prof = ["dep:common-mem-prof"]
pprof = ["dep:common-pprof"]
testing = []
apidocs = ["dep:aide"]


[lints]
workspace = true

[dependencies]
ahash = "0.8"
aide = { version = "0.9", features = ["axum"], optional = true }
api.workspace = true
arrow.workspace = true
arrow-flight.workspace = true
Expand Down Expand Up @@ -95,7 +92,6 @@ rust-embed = { version = "6.6", features = ["debug-embed"] }
rustls = { version = "0.23", default-features = false, features = ["ring", "logging", "std", "tls12"] }
rustls-pemfile = "2.0"
rustls-pki-types = "1.0"
schemars.workspace = true
serde.workspace = true
serde_json.workspace = true
session.workspace = true
Expand Down
99 changes: 5 additions & 94 deletions src/servers/src/http.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,20 +18,10 @@ use std::net::SocketAddr;
use std::sync::Mutex as StdMutex;
use std::time::Duration;

#[cfg(feature = "apidocs")]
use aide::{
axum::{routing as apirouting, ApiRouter, IntoApiResponse},
openapi::{Info, OpenApi, Server as OpenAPIServer},
OperationOutput,
};
use async_trait::async_trait;
use auth::UserProviderRef;
#[cfg(feature = "apidocs")]
use axem::Extension;
use axum::error_handling::HandleErrorLayer;
use axum::extract::DefaultBodyLimit;
#[cfg(feature = "apidocs")]
use axum::response::Html;
use axum::response::{IntoResponse, Json, Response};
use axum::{middleware, routing, BoxError, Router};
use common_base::readable_size::ReadableSize;
Expand All @@ -46,7 +36,6 @@ use datatypes::schema::SchemaRef;
use datatypes::value::transform_value_ref_to_json_value;
use event::{LogState, LogValidatorRef};
use futures::FutureExt;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use snafu::{ensure, ResultExt};
Expand Down Expand Up @@ -155,7 +144,7 @@ impl Default for HttpOptions {
}
}

#[derive(Debug, Serialize, Deserialize, JsonSchema, Eq, PartialEq)]
#[derive(Debug, Serialize, Deserialize, Eq, PartialEq)]
pub struct ColumnSchema {
name: String,
data_type: String,
Expand All @@ -167,7 +156,7 @@ impl ColumnSchema {
}
}

#[derive(Debug, Serialize, Deserialize, JsonSchema, Eq, PartialEq)]
#[derive(Debug, Serialize, Deserialize, Eq, PartialEq)]
pub struct OutputSchema {
column_schemas: Vec<ColumnSchema>,
}
Expand Down Expand Up @@ -195,7 +184,7 @@ impl From<SchemaRef> for OutputSchema {
}
}

#[derive(Debug, Serialize, Deserialize, JsonSchema, Eq, PartialEq)]
#[derive(Debug, Serialize, Deserialize, Eq, PartialEq)]
pub struct HttpRecordsOutput {
schema: OutputSchema,
rows: Vec<Vec<Value>>,
Expand Down Expand Up @@ -271,7 +260,7 @@ impl HttpRecordsOutput {
}
}

#[derive(Serialize, Deserialize, Debug, JsonSchema, Eq, PartialEq)]
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum GreptimeQueryOutput {
AffectedRows(usize),
Expand Down Expand Up @@ -359,7 +348,7 @@ impl Display for Epoch {
}
}

#[derive(Serialize, Deserialize, Debug, JsonSchema)]
#[derive(Serialize, Deserialize, Debug)]
pub enum HttpResponse {
Arrow(ArrowResponse),
Csv(CsvResponse),
Expand Down Expand Up @@ -427,11 +416,6 @@ impl IntoResponse for HttpResponse {
}
}

#[cfg(feature = "apidocs")]
impl OperationOutput for HttpResponse {
type Inner = Response;
}

impl From<ArrowResponse> for HttpResponse {
fn from(value: ArrowResponse) -> Self {
HttpResponse::Arrow(value)
Expand Down Expand Up @@ -474,16 +458,6 @@ impl From<JsonResponse> for HttpResponse {
}
}

#[cfg(feature = "apidocs")]
async fn serve_api(Extension(api): Extension<OpenApi>) -> impl IntoApiResponse {
Json(api)
}

#[cfg(feature = "apidocs")]
async fn serve_docs() -> Html<String> {
Html(include_str!("http/redoc.html").to_owned())
}

#[derive(Clone)]
pub struct ApiState {
pub sql_handler: ServerSqlQueryHandlerRef,
Expand All @@ -500,61 +474,19 @@ pub struct HttpServerBuilder {
options: HttpOptions,
plugins: Plugins,
user_provider: Option<UserProviderRef>,
#[cfg(feature = "apidocs")]
api: OpenApi,
router: Router,
}

impl HttpServerBuilder {
pub fn new(options: HttpOptions) -> Self {
#[cfg(feature = "apidocs")]
let api = OpenApi {
info: Info {
title: "GreptimeDB HTTP API".to_string(),
description: Some("HTTP APIs to interact with GreptimeDB".to_string()),
version: HTTP_API_VERSION.to_string(),
..Info::default()
},
servers: vec![OpenAPIServer {
url: format!("/{HTTP_API_VERSION}"),
..OpenAPIServer::default()
}],
..OpenApi::default()
};
Self {
options,
plugins: Plugins::default(),
user_provider: None,
#[cfg(feature = "apidocs")]
api,
router: Router::new(),
}
}

#[cfg(feature = "apidocs")]
pub fn with_sql_handler(
mut self,
sql_handler: ServerSqlQueryHandlerRef,
script_handler: Option<ScriptHandlerRef>,
) -> Self {
let sql_router = HttpServer::route_sql(ApiState {
sql_handler,
script_handler,
});

let sql_router = sql_router
.finish_api(&mut self.api)
.layer(Extension(self.api.clone()));

Self {
router: self
.router
.nest(&format!("/{HTTP_API_VERSION}"), sql_router),
..self
}
}

#[cfg(not(feature = "apidocs"))]
pub fn with_sql_handler(
self,
sql_handler: ServerSqlQueryHandlerRef,
Expand Down Expand Up @@ -825,27 +757,6 @@ impl HttpServer {
.with_state(log_state)
}

#[cfg(feature = "apidocs")]
fn route_sql<S>(api_state: ApiState) -> ApiRouter<S> {
ApiRouter::new()
.api_route(
"/sql",
apirouting::get_with(handler::sql, handler::sql_docs)
.post_with(handler::sql, handler::sql_docs),
)
.api_route(
"/promql",
apirouting::get_with(handler::promql, handler::sql_docs)
.post_with(handler::promql, handler::sql_docs),
)
.api_route("/scripts", apirouting::post(script::scripts))
.api_route("/run-script", apirouting::post(script::run_script))
.route("/private/api.json", apirouting::get(serve_api))
.route("/private/docs", apirouting::get(serve_docs))
.with_state(api_state)
}

#[cfg(not(feature = "apidocs"))]
fn route_sql<S>(api_state: ApiState) -> Router<S> {
Router::new()
.route("/sql", routing::get(handler::sql).post(handler::sql))
Expand Down
3 changes: 1 addition & 2 deletions src/servers/src/http/event.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@ use pipeline::error::PipelineTransformSnafu;
use pipeline::util::to_pipeline_version;
use pipeline::PipelineVersion;
use prost::Message;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use serde_json::{Deserializer, Map, Value};
use session::context::{Channel, QueryContext, QueryContextRef};
Expand Down Expand Up @@ -89,7 +88,7 @@ lazy_static! {
];
}

#[derive(Debug, Default, Serialize, Deserialize, JsonSchema)]
#[derive(Debug, Default, Serialize, Deserialize)]
pub struct LogIngesterQueryParams {
pub table: Option<String>,
pub db: Option<String>,
Expand Down
18 changes: 5 additions & 13 deletions src/servers/src/http/handler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,6 @@ use std::collections::HashMap;
use std::sync::Arc;
use std::time::Instant;

#[cfg(feature = "apidocs")]
use aide::transform::TransformOperation;
use axum::extract::{Json, Query, State};
use axum::response::{IntoResponse, Response};
use axum::{Extension, Form};
Expand All @@ -29,7 +27,6 @@ use common_query::{Output, OutputData};
use common_recordbatch::util;
use common_telemetry::tracing;
use query::parser::{PromQuery, DEFAULT_LOOKBACK_STRING};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use session::context::{Channel, QueryContext, QueryContextRef};
Expand All @@ -49,7 +46,7 @@ use crate::http::{
use crate::metrics_handler::MetricsHandler;
use crate::query_handler::sql::ServerSqlQueryHandlerRef;

#[derive(Debug, Default, Serialize, Deserialize, JsonSchema)]
#[derive(Debug, Default, Serialize, Deserialize)]
pub struct SqlQuery {
pub db: Option<String>,
pub sql: Option<String>,
Expand Down Expand Up @@ -220,7 +217,7 @@ pub async fn from_output(
Ok((results, merge_map))
}

#[derive(Debug, Default, Serialize, Deserialize, JsonSchema)]
#[derive(Debug, Default, Serialize, Deserialize)]
pub struct PromqlQuery {
pub query: String,
pub start: String,
Expand Down Expand Up @@ -278,11 +275,6 @@ pub async fn promql(
.into_response()
}

#[cfg(feature = "apidocs")]
pub(crate) fn sql_docs(op: TransformOperation) -> TransformOperation {
op.response::<200, Json<HttpResponse>>()
}

/// Handler to export metrics
#[axum_macros::debug_handler]
pub async fn metrics(
Expand All @@ -302,10 +294,10 @@ pub async fn metrics(
state.render()
}

#[derive(Debug, Serialize, Deserialize, JsonSchema)]
#[derive(Debug, Serialize, Deserialize)]
pub struct HealthQuery {}

#[derive(Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct HealthResponse {}

/// Handler to export healthy check
Expand All @@ -316,7 +308,7 @@ pub async fn health(Query(_params): Query<HealthQuery>) -> Json<HealthResponse>
Json(HealthResponse {})
}

#[derive(Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct StatusResponse<'a> {
pub source_time: &'a str,
pub commit: &'a str,
Expand Down
Loading

0 comments on commit 86eafd8

Please sign in to comment.