diff --git a/Cargo.lock b/Cargo.lock index 5d6750643035..65612d90514e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -74,26 +74,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "aide" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "befdff0b4683a0824fc8719ce639a252d9d62cd89c8d0004c39e2417128c1eb8" -dependencies = [ - "axum", - "bytes", - "cfg-if", - "http 0.2.12", - "indexmap 1.9.3", - "schemars", - "serde", - "serde_json", - "thiserror", - "tower-layer", - "tower-service", - "tracing", -] - [[package]] name = "alloc-no-stdlib" version = "2.0.4" @@ -2443,7 +2423,6 @@ version = "0.11.0" dependencies = [ "build-data", "const_format", - "schemars", "serde", "shadow-rs", ] @@ -10651,7 +10630,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09c024468a378b7e36765cd36702b7a90cc3cba11654f6685c8f233408e89e92" dependencies = [ "dyn-clone", - "indexmap 1.9.3", "schemars_derive", "serde", "serde_json", @@ -10974,7 +10952,6 @@ name = "servers" version = "0.11.0" dependencies = [ "ahash 0.8.11", - "aide", "api", "arrow", "arrow-flight", @@ -11056,7 +11033,6 @@ dependencies = [ "rustls 0.23.13", "rustls-pemfile 2.2.0", "rustls-pki-types", - "schemars", "script", "serde", "serde_json", diff --git a/Cargo.toml b/Cargo.toml index 9f3a4ff59e76..2082d873866d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -167,7 +167,6 @@ rstest = "0.21" rstest_reuse = "0.7" rust_decimal = "1.33" rustc-hash = "2.0" -schemars = "0.8" serde = { version = "1.0", features = ["derive"] } serde_json = { version = "1.0", features = ["float_roundtrip"] } serde_with = "3" diff --git a/src/common/version/Cargo.toml b/src/common/version/Cargo.toml index 830f5a757f39..63d7395f0702 100644 --- a/src/common/version/Cargo.toml +++ b/src/common/version/Cargo.toml @@ -8,11 +8,10 @@ license.workspace = true workspace = true [features] -codec = ["dep:serde", "dep:schemars"] +codec = ["dep:serde"] [dependencies] const_format = "0.2" -schemars = { workspace = true, optional = true } serde = { workspace = true, optional = true } shadow-rs.workspace = true diff --git a/src/common/version/src/lib.rs b/src/common/version/src/lib.rs index 151ad079e8b7..e4cee2d3f482 100644 --- a/src/common/version/src/lib.rs +++ b/src/common/version/src/lib.rs @@ -49,10 +49,7 @@ impl Display for BuildInfo { } #[derive(Clone, Debug, PartialEq)] -#[cfg_attr( - feature = "codec", - derive(serde::Serialize, serde::Deserialize, schemars::JsonSchema) -)] +#[cfg_attr(feature = "codec", derive(serde::Serialize, serde::Deserialize))] pub struct OwnedBuildInfo { pub branch: String, pub commit: String, diff --git a/src/servers/Cargo.toml b/src/servers/Cargo.toml index 96ec28f2578b..6365bbc8d041 100644 --- a/src/servers/Cargo.toml +++ b/src/servers/Cargo.toml @@ -10,15 +10,12 @@ dashboard = [] mem-prof = ["dep:common-mem-prof"] pprof = ["dep:common-pprof"] testing = [] -apidocs = ["dep:aide"] - [lints] workspace = true [dependencies] ahash = "0.8" -aide = { version = "0.9", features = ["axum"], optional = true } api.workspace = true arrow.workspace = true arrow-flight.workspace = true @@ -95,7 +92,6 @@ rust-embed = { version = "6.6", features = ["debug-embed"] } rustls = { version = "0.23", default-features = false, features = ["ring", "logging", "std", "tls12"] } rustls-pemfile = "2.0" rustls-pki-types = "1.0" -schemars.workspace = true serde.workspace = true serde_json.workspace = true session.workspace = true diff --git a/src/servers/src/http.rs b/src/servers/src/http.rs index d62de607ca35..e2d5fbce4754 100644 --- a/src/servers/src/http.rs +++ b/src/servers/src/http.rs @@ -18,20 +18,10 @@ use std::net::SocketAddr; use std::sync::Mutex as StdMutex; use std::time::Duration; -#[cfg(feature = "apidocs")] -use aide::{ - axum::{routing as apirouting, ApiRouter, IntoApiResponse}, - openapi::{Info, OpenApi, Server as OpenAPIServer}, - OperationOutput, -}; use async_trait::async_trait; use auth::UserProviderRef; -#[cfg(feature = "apidocs")] -use axem::Extension; use axum::error_handling::HandleErrorLayer; use axum::extract::DefaultBodyLimit; -#[cfg(feature = "apidocs")] -use axum::response::Html; use axum::response::{IntoResponse, Json, Response}; use axum::{middleware, routing, BoxError, Router}; use common_base::readable_size::ReadableSize; @@ -46,7 +36,6 @@ use datatypes::schema::SchemaRef; use datatypes::value::transform_value_ref_to_json_value; use event::{LogState, LogValidatorRef}; use futures::FutureExt; -use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use serde_json::Value; use snafu::{ensure, ResultExt}; @@ -155,7 +144,7 @@ impl Default for HttpOptions { } } -#[derive(Debug, Serialize, Deserialize, JsonSchema, Eq, PartialEq)] +#[derive(Debug, Serialize, Deserialize, Eq, PartialEq)] pub struct ColumnSchema { name: String, data_type: String, @@ -167,7 +156,7 @@ impl ColumnSchema { } } -#[derive(Debug, Serialize, Deserialize, JsonSchema, Eq, PartialEq)] +#[derive(Debug, Serialize, Deserialize, Eq, PartialEq)] pub struct OutputSchema { column_schemas: Vec, } @@ -195,7 +184,7 @@ impl From for OutputSchema { } } -#[derive(Debug, Serialize, Deserialize, JsonSchema, Eq, PartialEq)] +#[derive(Debug, Serialize, Deserialize, Eq, PartialEq)] pub struct HttpRecordsOutput { schema: OutputSchema, rows: Vec>, @@ -271,7 +260,7 @@ impl HttpRecordsOutput { } } -#[derive(Serialize, Deserialize, Debug, JsonSchema, Eq, PartialEq)] +#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] #[serde(rename_all = "lowercase")] pub enum GreptimeQueryOutput { AffectedRows(usize), @@ -359,7 +348,7 @@ impl Display for Epoch { } } -#[derive(Serialize, Deserialize, Debug, JsonSchema)] +#[derive(Serialize, Deserialize, Debug)] pub enum HttpResponse { Arrow(ArrowResponse), Csv(CsvResponse), @@ -427,11 +416,6 @@ impl IntoResponse for HttpResponse { } } -#[cfg(feature = "apidocs")] -impl OperationOutput for HttpResponse { - type Inner = Response; -} - impl From for HttpResponse { fn from(value: ArrowResponse) -> Self { HttpResponse::Arrow(value) @@ -474,16 +458,6 @@ impl From for HttpResponse { } } -#[cfg(feature = "apidocs")] -async fn serve_api(Extension(api): Extension) -> impl IntoApiResponse { - Json(api) -} - -#[cfg(feature = "apidocs")] -async fn serve_docs() -> Html { - Html(include_str!("http/redoc.html").to_owned()) -} - #[derive(Clone)] pub struct ApiState { pub sql_handler: ServerSqlQueryHandlerRef, @@ -500,61 +474,19 @@ pub struct HttpServerBuilder { options: HttpOptions, plugins: Plugins, user_provider: Option, - #[cfg(feature = "apidocs")] - api: OpenApi, router: Router, } impl HttpServerBuilder { pub fn new(options: HttpOptions) -> Self { - #[cfg(feature = "apidocs")] - let api = OpenApi { - info: Info { - title: "GreptimeDB HTTP API".to_string(), - description: Some("HTTP APIs to interact with GreptimeDB".to_string()), - version: HTTP_API_VERSION.to_string(), - ..Info::default() - }, - servers: vec![OpenAPIServer { - url: format!("/{HTTP_API_VERSION}"), - ..OpenAPIServer::default() - }], - ..OpenApi::default() - }; Self { options, plugins: Plugins::default(), user_provider: None, - #[cfg(feature = "apidocs")] - api, router: Router::new(), } } - #[cfg(feature = "apidocs")] - pub fn with_sql_handler( - mut self, - sql_handler: ServerSqlQueryHandlerRef, - script_handler: Option, - ) -> Self { - let sql_router = HttpServer::route_sql(ApiState { - sql_handler, - script_handler, - }); - - let sql_router = sql_router - .finish_api(&mut self.api) - .layer(Extension(self.api.clone())); - - Self { - router: self - .router - .nest(&format!("/{HTTP_API_VERSION}"), sql_router), - ..self - } - } - - #[cfg(not(feature = "apidocs"))] pub fn with_sql_handler( self, sql_handler: ServerSqlQueryHandlerRef, @@ -825,27 +757,6 @@ impl HttpServer { .with_state(log_state) } - #[cfg(feature = "apidocs")] - fn route_sql(api_state: ApiState) -> ApiRouter { - ApiRouter::new() - .api_route( - "/sql", - apirouting::get_with(handler::sql, handler::sql_docs) - .post_with(handler::sql, handler::sql_docs), - ) - .api_route( - "/promql", - apirouting::get_with(handler::promql, handler::sql_docs) - .post_with(handler::promql, handler::sql_docs), - ) - .api_route("/scripts", apirouting::post(script::scripts)) - .api_route("/run-script", apirouting::post(script::run_script)) - .route("/private/api.json", apirouting::get(serve_api)) - .route("/private/docs", apirouting::get(serve_docs)) - .with_state(api_state) - } - - #[cfg(not(feature = "apidocs"))] fn route_sql(api_state: ApiState) -> Router { Router::new() .route("/sql", routing::get(handler::sql).post(handler::sql)) diff --git a/src/servers/src/http/event.rs b/src/servers/src/http/event.rs index 7f227bdc6f6e..2f318d14a059 100644 --- a/src/servers/src/http/event.rs +++ b/src/servers/src/http/event.rs @@ -40,7 +40,6 @@ use pipeline::error::PipelineTransformSnafu; use pipeline::util::to_pipeline_version; use pipeline::PipelineVersion; use prost::Message; -use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use serde_json::{Deserializer, Map, Value}; use session::context::{Channel, QueryContext, QueryContextRef}; @@ -89,7 +88,7 @@ lazy_static! { ]; } -#[derive(Debug, Default, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Default, Serialize, Deserialize)] pub struct LogIngesterQueryParams { pub table: Option, pub db: Option, diff --git a/src/servers/src/http/handler.rs b/src/servers/src/http/handler.rs index 5b3aef989c43..15a1a0e16c73 100644 --- a/src/servers/src/http/handler.rs +++ b/src/servers/src/http/handler.rs @@ -16,8 +16,6 @@ use std::collections::HashMap; use std::sync::Arc; use std::time::Instant; -#[cfg(feature = "apidocs")] -use aide::transform::TransformOperation; use axum::extract::{Json, Query, State}; use axum::response::{IntoResponse, Response}; use axum::{Extension, Form}; @@ -29,7 +27,6 @@ use common_query::{Output, OutputData}; use common_recordbatch::util; use common_telemetry::tracing; use query::parser::{PromQuery, DEFAULT_LOOKBACK_STRING}; -use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use serde_json::Value; use session::context::{Channel, QueryContext, QueryContextRef}; @@ -49,7 +46,7 @@ use crate::http::{ use crate::metrics_handler::MetricsHandler; use crate::query_handler::sql::ServerSqlQueryHandlerRef; -#[derive(Debug, Default, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Default, Serialize, Deserialize)] pub struct SqlQuery { pub db: Option, pub sql: Option, @@ -220,7 +217,7 @@ pub async fn from_output( Ok((results, merge_map)) } -#[derive(Debug, Default, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Default, Serialize, Deserialize)] pub struct PromqlQuery { pub query: String, pub start: String, @@ -278,11 +275,6 @@ pub async fn promql( .into_response() } -#[cfg(feature = "apidocs")] -pub(crate) fn sql_docs(op: TransformOperation) -> TransformOperation { - op.response::<200, Json>() -} - /// Handler to export metrics #[axum_macros::debug_handler] pub async fn metrics( @@ -302,10 +294,10 @@ pub async fn metrics( state.render() } -#[derive(Debug, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Serialize, Deserialize)] pub struct HealthQuery {} -#[derive(Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)] pub struct HealthResponse {} /// Handler to export healthy check @@ -316,7 +308,7 @@ pub async fn health(Query(_params): Query) -> Json Json(HealthResponse {}) } -#[derive(Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)] pub struct StatusResponse<'a> { pub source_time: &'a str, pub commit: &'a str, diff --git a/src/servers/src/http/pprof.rs b/src/servers/src/http/pprof.rs index 7994c9ace3d8..5d0dee0f90d7 100644 --- a/src/servers/src/http/pprof.rs +++ b/src/servers/src/http/pprof.rs @@ -22,14 +22,13 @@ pub mod handler { use axum::response::IntoResponse; use common_pprof::Profiling; use common_telemetry::info; - use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use snafu::ResultExt; use crate::error::{DumpPprofSnafu, Result}; /// Output format. - #[derive(Debug, Serialize, Deserialize, JsonSchema)] + #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "snake_case")] pub enum Output { /// google’s pprof format report in protobuf. @@ -40,7 +39,7 @@ pub mod handler { Flamegraph, } - #[derive(Serialize, Deserialize, Debug, JsonSchema)] + #[derive(Serialize, Deserialize, Debug)] #[serde(default)] pub struct PprofQuery { seconds: u64, diff --git a/src/servers/src/http/prom_store.rs b/src/servers/src/http/prom_store.rs index 953160de5b31..a429725819c0 100644 --- a/src/servers/src/http/prom_store.rs +++ b/src/servers/src/http/prom_store.rs @@ -28,7 +28,6 @@ use hyper::{Body, HeaderMap}; use lazy_static::lazy_static; use object_pool::Pool; use prost::Message; -use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use session::context::{Channel, QueryContext}; use snafu::prelude::*; @@ -49,7 +48,7 @@ pub const DEFAULT_ENCODING: &str = "snappy"; pub const VM_ENCODING: &str = "zstd"; pub const VM_PROTO_VERSION: &str = "1"; -#[derive(Debug, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Serialize, Deserialize)] pub struct RemoteWriteQuery { pub db: Option, /// Specify which physical table to use for storing metrics. diff --git a/src/servers/src/http/prometheus.rs b/src/servers/src/http/prometheus.rs index 684845e69abc..fa7f67f6fff4 100644 --- a/src/servers/src/http/prometheus.rs +++ b/src/servers/src/http/prometheus.rs @@ -38,7 +38,6 @@ use promql_parser::parser::{ UnaryExpr, VectorSelector, }; use query::parser::{PromQuery, DEFAULT_LOOKBACK_STRING}; -use schemars::JsonSchema; use serde::de::{self, MapAccess, Visitor}; use serde::{Deserialize, Serialize}; use serde_json::Value; @@ -55,7 +54,7 @@ use crate::prom_store::{FIELD_NAME_LABEL, METRIC_NAME_LABEL}; use crate::prometheus_handler::PrometheusHandlerRef; /// For [ValueType::Vector] result type -#[derive(Debug, Default, Serialize, Deserialize, JsonSchema, PartialEq)] +#[derive(Debug, Default, Serialize, Deserialize, PartialEq)] pub struct PromSeriesVector { pub metric: HashMap, #[serde(skip_serializing_if = "Option::is_none")] @@ -63,14 +62,14 @@ pub struct PromSeriesVector { } /// For [ValueType::Matrix] result type -#[derive(Debug, Default, Serialize, Deserialize, JsonSchema, PartialEq)] +#[derive(Debug, Default, Serialize, Deserialize, PartialEq)] pub struct PromSeriesMatrix { pub metric: HashMap, pub values: Vec<(f64, String)>, } /// Variants corresponding to [ValueType] -#[derive(Debug, Serialize, Deserialize, JsonSchema, PartialEq)] +#[derive(Debug, Serialize, Deserialize, PartialEq)] #[serde(untagged)] pub enum PromQueryResult { Matrix(Vec), @@ -85,14 +84,14 @@ impl Default for PromQueryResult { } } -#[derive(Debug, Default, Serialize, Deserialize, JsonSchema, PartialEq)] +#[derive(Debug, Default, Serialize, Deserialize, PartialEq)] pub struct PromData { #[serde(rename = "resultType")] pub result_type: String, pub result: PromQueryResult, } -#[derive(Debug, Serialize, Deserialize, JsonSchema, PartialEq)] +#[derive(Debug, Serialize, Deserialize, PartialEq)] #[serde(untagged)] pub enum PrometheusResponse { PromData(PromData), @@ -101,7 +100,6 @@ pub enum PrometheusResponse { LabelValues(Vec), FormatQuery(String), BuildInfo(OwnedBuildInfo), - #[schemars(skip)] #[serde(skip_deserializing)] ParseResult(promql_parser::parser::Expr), } @@ -112,7 +110,7 @@ impl Default for PrometheusResponse { } } -#[derive(Debug, Default, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Default, Serialize, Deserialize)] pub struct FormatQuery { query: Option, } @@ -141,7 +139,7 @@ pub async fn format_query( } } -#[derive(Debug, Default, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Default, Serialize, Deserialize)] pub struct BuildInfoQuery {} #[axum_macros::debug_handler] @@ -154,7 +152,7 @@ pub async fn build_info_query() -> PrometheusJsonResponse { PrometheusJsonResponse::success(PrometheusResponse::BuildInfo(build_info.into())) } -#[derive(Debug, Default, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Default, Serialize, Deserialize)] pub struct InstantQuery { query: Option, lookback: Option, @@ -209,7 +207,7 @@ pub async fn instant_query( PrometheusJsonResponse::from_query_result(result, metric_name, result_type).await } -#[derive(Debug, Default, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Default, Serialize, Deserialize)] pub struct RangeQuery { query: Option, start: Option, @@ -261,10 +259,10 @@ pub async fn range_query( PrometheusJsonResponse::from_query_result(result, metric_name, ValueType::Matrix).await } -#[derive(Debug, Default, Serialize, JsonSchema)] +#[derive(Debug, Default, Serialize)] struct Matches(Vec); -#[derive(Debug, Default, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Default, Serialize, Deserialize)] pub struct LabelsQuery { start: Option, end: Option, @@ -663,7 +661,7 @@ fn promql_expr_to_metric_name(expr: &PromqlExpr) -> Option { } } -#[derive(Debug, Default, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Default, Serialize, Deserialize)] pub struct LabelValueQuery { start: Option, end: Option, @@ -927,7 +925,7 @@ fn retrieve_metric_name_from_promql(query: &str) -> Option { visitor.metric_name } -#[derive(Debug, Default, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Default, Serialize, Deserialize)] pub struct SeriesQuery { start: Option, end: Option, @@ -1018,7 +1016,7 @@ pub async fn series_query( resp } -#[derive(Debug, Default, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Default, Serialize, Deserialize)] pub struct ParseQuery { query: Option, db: Option, diff --git a/src/servers/src/http/result/arrow_result.rs b/src/servers/src/http/result/arrow_result.rs index e6e9abe6d398..bb865d9c000c 100644 --- a/src/servers/src/http/result/arrow_result.rs +++ b/src/servers/src/http/result/arrow_result.rs @@ -24,7 +24,6 @@ use common_error::status_code::StatusCode; use common_query::{Output, OutputData}; use common_recordbatch::RecordBatchStream; use futures::StreamExt; -use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use snafu::ResultExt; @@ -33,7 +32,7 @@ use crate::http::header::{GREPTIME_DB_HEADER_EXECUTION_TIME, GREPTIME_DB_HEADER_ use crate::http::result::error_result::ErrorResponse; use crate::http::{HttpResponse, ResponseFormat}; -#[derive(Serialize, Deserialize, Debug, JsonSchema)] +#[derive(Serialize, Deserialize, Debug)] pub struct ArrowResponse { pub(crate) data: Vec, pub(crate) execution_time_ms: u64, diff --git a/src/servers/src/http/result/csv_result.rs b/src/servers/src/http/result/csv_result.rs index 4f78eb8da7b5..b6b997ef2e4d 100644 --- a/src/servers/src/http/result/csv_result.rs +++ b/src/servers/src/http/result/csv_result.rs @@ -20,7 +20,6 @@ use common_error::status_code::StatusCode; use common_query::Output; use itertools::Itertools; use mime_guess::mime; -use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use crate::http::header::{GREPTIME_DB_HEADER_EXECUTION_TIME, GREPTIME_DB_HEADER_FORMAT}; @@ -28,7 +27,7 @@ use crate::http::header::{GREPTIME_DB_HEADER_EXECUTION_TIME, GREPTIME_DB_HEADER_ use crate::http::result::error_result::ErrorResponse; use crate::http::{handler, process_with_limit, GreptimeQueryOutput, HttpResponse, ResponseFormat}; -#[derive(Serialize, Deserialize, Debug, JsonSchema)] +#[derive(Serialize, Deserialize, Debug)] pub struct CsvResponse { output: Vec, execution_time_ms: u64, diff --git a/src/servers/src/http/result/error_result.rs b/src/servers/src/http/result/error_result.rs index bbc488e56250..be5f01f9e950 100644 --- a/src/servers/src/http/result/error_result.rs +++ b/src/servers/src/http/result/error_result.rs @@ -18,14 +18,13 @@ use axum::Json; use common_error::ext::ErrorExt; use common_error::status_code::StatusCode; use common_telemetry::{debug, error}; -use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use crate::error::status_code_to_http_status; use crate::http::header::constants::GREPTIME_DB_HEADER_ERROR_CODE; use crate::http::header::GREPTIME_DB_HEADER_EXECUTION_TIME; -#[derive(Serialize, Deserialize, Debug, JsonSchema)] +#[derive(Serialize, Deserialize, Debug)] pub struct ErrorResponse { code: u32, error: String, diff --git a/src/servers/src/http/result/greptime_manage_resp.rs b/src/servers/src/http/result/greptime_manage_resp.rs index d2f61715b5e3..668a12f48021 100644 --- a/src/servers/src/http/result/greptime_manage_resp.rs +++ b/src/servers/src/http/result/greptime_manage_resp.rs @@ -16,14 +16,13 @@ use axum::response::IntoResponse; use axum::Json; use http::header::CONTENT_TYPE; use http::HeaderValue; -use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use crate::http::header::{GREPTIME_DB_HEADER_EXECUTION_TIME, GREPTIME_DB_HEADER_FORMAT}; /// Greptimedb Manage Api Response struct /// Currently we have `Pipelines` and `Scripts` as control panel api -#[derive(Serialize, Deserialize, Debug, JsonSchema)] +#[derive(Serialize, Deserialize, Debug)] pub struct GreptimedbManageResponse { #[serde(flatten)] pub(crate) manage_result: ManageResult, @@ -57,7 +56,7 @@ impl GreptimedbManageResponse { } } -#[derive(Serialize, Deserialize, Debug, JsonSchema)] +#[derive(Serialize, Deserialize, Debug)] #[serde(untagged)] pub enum ManageResult { Pipelines { pipelines: Vec }, @@ -65,7 +64,7 @@ pub enum ManageResult { Scripts(), } -#[derive(Serialize, Deserialize, Debug, JsonSchema)] +#[derive(Serialize, Deserialize, Debug)] pub struct PipelineOutput { name: String, version: String, diff --git a/src/servers/src/http/result/greptime_result_v1.rs b/src/servers/src/http/result/greptime_result_v1.rs index ba90b6b329b7..794308b1f924 100644 --- a/src/servers/src/http/result/greptime_result_v1.rs +++ b/src/servers/src/http/result/greptime_result_v1.rs @@ -18,7 +18,6 @@ use axum::headers::HeaderValue; use axum::response::{IntoResponse, Response}; use axum::Json; use common_query::Output; -use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use serde_json::Value; @@ -27,7 +26,7 @@ use crate::http::header::{ }; use crate::http::{handler, process_with_limit, GreptimeQueryOutput, HttpResponse, ResponseFormat}; -#[derive(Serialize, Deserialize, Debug, JsonSchema)] +#[derive(Serialize, Deserialize, Debug)] pub struct GreptimedbV1Response { #[serde(skip_serializing_if = "Vec::is_empty", default)] pub(crate) output: Vec, diff --git a/src/servers/src/http/result/influxdb_result_v1.rs b/src/servers/src/http/result/influxdb_result_v1.rs index ae65b8fa0acb..6ddf8fbcf766 100644 --- a/src/servers/src/http/result/influxdb_result_v1.rs +++ b/src/servers/src/http/result/influxdb_result_v1.rs @@ -17,7 +17,6 @@ use axum::response::{IntoResponse, Response}; use axum::Json; use common_query::{Output, OutputData}; use common_recordbatch::{util, RecordBatch}; -use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use serde_json::Value; use snafu::ResultExt; @@ -27,7 +26,7 @@ use crate::http::header::{GREPTIME_DB_HEADER_EXECUTION_TIME, GREPTIME_DB_HEADER_ use crate::http::result::error_result::ErrorResponse; use crate::http::{Epoch, HttpResponse, ResponseFormat}; -#[derive(Debug, Default, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Default, Serialize, Deserialize)] pub struct SqlQuery { pub db: Option, // Returns epoch timestamps with the specified precision. @@ -37,7 +36,7 @@ pub struct SqlQuery { pub sql: Option, } -#[derive(Serialize, Deserialize, Debug, JsonSchema, Eq, PartialEq)] +#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] pub struct InfluxdbRecordsOutput { // The SQL query does not return the table name, but in InfluxDB, // we require the table name, so we set it to an empty string “”. @@ -106,7 +105,7 @@ impl TryFrom<(Option, Vec)> for InfluxdbRecordsOutput { } } -#[derive(Serialize, Deserialize, Debug, JsonSchema, Eq, PartialEq)] +#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] pub struct InfluxdbOutput { pub statement_id: u32, pub series: Vec, @@ -125,7 +124,7 @@ impl InfluxdbOutput { } } -#[derive(Serialize, Deserialize, Debug, JsonSchema)] +#[derive(Serialize, Deserialize, Debug)] pub struct InfluxdbV1Response { results: Vec, execution_time_ms: u64, diff --git a/src/servers/src/http/result/json_result.rs b/src/servers/src/http/result/json_result.rs index 71546c570d8a..4b84f0de8123 100644 --- a/src/servers/src/http/result/json_result.rs +++ b/src/servers/src/http/result/json_result.rs @@ -17,7 +17,6 @@ use axum::response::{IntoResponse, Response}; use common_error::status_code::StatusCode; use common_query::Output; use mime_guess::mime; -use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use serde_json::{json, Map, Value}; @@ -27,7 +26,7 @@ use crate::http::{handler, process_with_limit, GreptimeQueryOutput, HttpResponse /// The json format here is different from the default json output of `GreptimedbV1` result. /// `JsonResponse` is intended to make it easier for user to consume data. -#[derive(Serialize, Deserialize, Debug, JsonSchema)] +#[derive(Serialize, Deserialize, Debug)] pub struct JsonResponse { output: Vec, execution_time_ms: u64, diff --git a/src/servers/src/http/result/prometheus_resp.rs b/src/servers/src/http/result/prometheus_resp.rs index 84497c1533df..d4ab0480dabf 100644 --- a/src/servers/src/http/result/prometheus_resp.rs +++ b/src/servers/src/http/result/prometheus_resp.rs @@ -27,7 +27,6 @@ use datatypes::scalars::ScalarVector; use datatypes::vectors::{Float64Vector, StringVector, TimestampMillisecondVector}; use promql_parser::label::METRIC_NAME; use promql_parser::parser::value::ValueType; -use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use serde_json::Value; use snafu::{OptionExt, ResultExt}; @@ -40,7 +39,7 @@ use crate::http::prometheus::{ PromData, PromQueryResult, PromSeriesMatrix, PromSeriesVector, PrometheusResponse, }; -#[derive(Debug, Default, Serialize, Deserialize, JsonSchema, PartialEq)] +#[derive(Debug, Default, Serialize, Deserialize, PartialEq)] pub struct PrometheusJsonResponse { pub status: String, pub data: PrometheusResponse, diff --git a/src/servers/src/http/result/table_result.rs b/src/servers/src/http/result/table_result.rs index 9a69b08c0a5e..91780ed6b6ba 100644 --- a/src/servers/src/http/result/table_result.rs +++ b/src/servers/src/http/result/table_result.rs @@ -21,14 +21,13 @@ use common_error::status_code::StatusCode; use common_query::Output; use itertools::Itertools; use mime_guess::mime; -use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use crate::http::header::{GREPTIME_DB_HEADER_EXECUTION_TIME, GREPTIME_DB_HEADER_FORMAT}; use crate::http::result::error_result::ErrorResponse; use crate::http::{handler, process_with_limit, GreptimeQueryOutput, HttpResponse, ResponseFormat}; -#[derive(Serialize, Deserialize, Debug, JsonSchema)] +#[derive(Serialize, Deserialize, Debug)] pub struct TableResponse { output: Vec, execution_time_ms: u64, diff --git a/src/servers/src/http/script.rs b/src/servers/src/http/script.rs index 278e54457a85..c981aca88c73 100644 --- a/src/servers/src/http/script.rs +++ b/src/servers/src/http/script.rs @@ -20,7 +20,6 @@ use axum::extract::{Query, RawBody, State}; use common_catalog::consts::DEFAULT_CATALOG_NAME; use common_error::ext::ErrorExt; use common_error::status_code::StatusCode; -use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use session::context::QueryContext; use snafu::ResultExt; @@ -96,7 +95,7 @@ pub async fn scripts( } } -#[derive(Debug, Serialize, Deserialize, JsonSchema, Default)] +#[derive(Debug, Serialize, Deserialize, Default)] pub struct ScriptQuery { pub catalog: Option, pub db: Option, diff --git a/src/servers/tests/http/http_test.rs b/src/servers/tests/http/http_test.rs deleted file mode 100644 index 071d0f6548cb..000000000000 --- a/src/servers/tests/http/http_test.rs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright 2023 Greptime Team -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -use axum::Router; -use common_test_util::ports; -use servers::http::test_helpers::TestClient; -use servers::http::{HttpOptions, HttpServerBuilder}; -use table::test_util::MemTable; - -use crate::create_testing_sql_query_handler; - -fn make_test_app() -> Router { - let http_opts = HttpOptions { - addr: format!("127.0.0.1:{}", ports::get_port()), - ..Default::default() - }; - - let server = HttpServerBuilder::new(http_opts) - .with_sql_handler( - create_testing_sql_query_handler(MemTable::default_numbers_table()), - None, - ) - .build(); - server.build(server.make_app()) -} - -#[tokio::test] -async fn test_api_and_doc() { - let app = make_test_app(); - let client = TestClient::new(app); - let result = client.get("/v1/private/api.json").send().await; - assert_eq!(result.status(), 200); - let result = client.get("/v1/private/docs").send().await; - assert_eq!(result.status(), 200); -} diff --git a/src/servers/tests/http/mod.rs b/src/servers/tests/http/mod.rs index 338ea3f87c3d..cca2dfd78737 100644 --- a/src/servers/tests/http/mod.rs +++ b/src/servers/tests/http/mod.rs @@ -14,7 +14,6 @@ mod authorize; mod http_handler_test; -mod http_test; mod influxdb_test; mod opentsdb_test; mod prom_store_test;