Skip to content

Commit

Permalink
wip
Browse files Browse the repository at this point in the history
  • Loading branch information
benthecarman committed Sep 23, 2023
1 parent 45833ac commit c65735f
Show file tree
Hide file tree
Showing 15 changed files with 629 additions and 546 deletions.
458 changes: 263 additions & 195 deletions Cargo.lock

Large diffs are not rendered by default.

5 changes: 3 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,6 @@ anyhow = "1.0"
axum = { version = "0.6.16", features = ["headers"] }
base64 = "0.13.1"
chrono = { version = "0.4.26", features = ["serde"] }
diesel = { version = "2.1", features = ["postgres", "chrono", "numeric"] }
diesel_migrations = "2.1.0"
dotenv = "0.15.0"
futures = "0.3.28"
hex = "0.4.3"
Expand All @@ -21,6 +19,9 @@ sha2 = { version = "0.10", default-features = false }
serde = { version = "^1.0", features = ["derive"] }
serde_json = "1.0.67"
tokio = { version = "1.12.0", features = ["full"] }
tokio-postgres = { version = "0.7.10", features = ["with-chrono-0_4"] }
tower-http = { version = "0.4.0", features = ["cors"] }

ureq = { version = "2.5.0", features = ["json"] }
postgres-openssl = "0.5.0"
openssl = "0.10.57"
4 changes: 2 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@ RUN --mount=type=cache,target=/usr/local/cargo,from=rust:latest,source=/usr/loca
cargo build --release && mv ./target/release/vss-rs ./vss-rs

# Runtime image
FROM debian:bookworm-slim
FROM debian:bullseye-slim

RUN apt update && apt install -y openssl libpq-dev pkg-config libc6
RUN apt update && apt install -y openssl libpq-dev pkg-config libc6 openssl libssl-dev libpq5 ca-certificates

# Run as "app" user
RUN useradd -ms /bin/bash app
Expand Down
Empty file removed migrations/.keep
Empty file.
6 changes: 0 additions & 6 deletions migrations/00000000000000_diesel_initial_setup/down.sql

This file was deleted.

36 changes: 0 additions & 36 deletions migrations/00000000000000_diesel_initial_setup/up.sql

This file was deleted.

9 changes: 0 additions & 9 deletions migrations/2023-09-18-225828_baseline/down.sql

This file was deleted.

43 changes: 0 additions & 43 deletions migrations/2023-09-18-225828_baseline/up.sql

This file was deleted.

14 changes: 3 additions & 11 deletions src/auth.rs
Original file line number Diff line number Diff line change
@@ -1,26 +1,18 @@
use crate::State;
use axum::http::{HeaderMap, StatusCode};
use axum::http::StatusCode;
use jwt_compact::alg::Es256k;
use jwt_compact::{AlgorithmExt, TimeOptions, Token, UntrustedToken};
use log::error;
use secp256k1::PublicKey;
use serde::{Deserialize, Serialize};
use sha2::Sha256;

pub(crate) fn verify_token(
token: &str,
state: &State,
headers: &HeaderMap,
) -> Result<String, (StatusCode, HeaderMap, String)> {
pub(crate) fn verify_token(token: &str, state: &State) -> Result<String, (StatusCode, String)> {
let es256k1 = Es256k::<Sha256>::new(state.secp.clone());

validate_jwt_from_user(token, state.auth_key, &es256k1).map_err(|e| {
error!("Unauthorized: {e}");
(
StatusCode::UNAUTHORIZED,
headers.clone(),
format!("Unauthorized: {e}"),
)
(StatusCode::UNAUTHORIZED, format!("Unauthorized: {e}"))
})
}

Expand Down
77 changes: 47 additions & 30 deletions src/main.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
use crate::models::MIGRATIONS;
use crate::routes::*;
use axum::headers::Origin;
use axum::http::{HeaderMap, StatusCode, Uri};
use axum::http::{request::Parts, HeaderValue, Method, StatusCode, Uri};
use axum::routing::{get, post, put};
use axum::{Extension, Router, TypedHeader};
use diesel::{Connection, PgConnection};
use diesel_migrations::MigrationHarness;
use axum::{http, Extension, Router, TypedHeader};
use openssl::ssl::{SslConnector, SslMethod};
use postgres_openssl::MakeTlsConnector;
use secp256k1::{All, PublicKey, Secp256k1};
use std::sync::Arc;
use tokio_postgres::Client;
use tower_http::cors::{AllowOrigin, CorsLayer};

mod auth;
mod kv;
Expand All @@ -28,9 +30,9 @@ const ALLOWED_LOCALHOST: &str = "http://127.0.0.1:";

#[derive(Clone)]
pub struct State {
pg_url: String,
auth_key: PublicKey,
secp: Secp256k1<All>,
pub client: Arc<Client>,
pub auth_key: PublicKey,
pub secp: Secp256k1<All>,
}

#[tokio::main]
Expand All @@ -51,19 +53,24 @@ async fn main() -> anyhow::Result<()> {
let auth_key_bytes = hex::decode(auth_key)?;
let auth_key = PublicKey::from_slice(&auth_key_bytes)?;

// DB management
let mut connection = PgConnection::establish(&pg_url).unwrap();
let builder = SslConnector::builder(SslMethod::tls())?;
let connector = MakeTlsConnector::new(builder.build());

// TODO not sure if code should handle the migration, could be dangerous with multiple instances
// run migrations
connection
.run_pending_migrations(MIGRATIONS)
.expect("migrations could not run");
// Connect to the database.
let (client, connection) = tokio_postgres::connect(&pg_url, connector).await?;

// The connection object performs the actual communication with the database,
// so spawn it off to run on its own.
tokio::spawn(async move {
if let Err(e) = connection.await {
eprintln!("db connection error: {e}");
}
});

let secp = Secp256k1::new();

let state = State {
pg_url,
client: Arc::new(client),
auth_key,
secp,
};
Expand All @@ -82,6 +89,26 @@ async fn main() -> anyhow::Result<()> {
.route("/v2/listKeyVersions", post(list_key_versions))
.route("/migration", get(migration::migration))
.fallback(fallback)
.layer(
CorsLayer::new()
.allow_origin(AllowOrigin::predicate(
|origin: &HeaderValue, _request_parts: &Parts| {
let Ok(origin) = origin.to_str() else {
return false;
};

valid_origin(origin)
},
))
.allow_headers([http::header::CONTENT_TYPE, http::header::AUTHORIZATION])
.allow_methods([
Method::GET,
Method::POST,
Method::PUT,
Method::DELETE,
Method::OPTIONS,
]),
)
.layer(Extension(state));

let server = axum::Server::bind(&addr).serve(server_router.into_make_service());
Expand All @@ -102,20 +129,10 @@ async fn main() -> anyhow::Result<()> {
Ok(())
}

async fn fallback(
origin: Option<TypedHeader<Origin>>,
uri: Uri,
) -> (StatusCode, HeaderMap, String) {
let origin = match validate_cors(origin) {
Ok(origin) => origin,
Err((status, headers, msg)) => return (status, headers, msg),
async fn fallback(origin: Option<TypedHeader<Origin>>, uri: Uri) -> (StatusCode, String) {
if let Err((status, msg)) = validate_cors(origin) {
return (status, msg);
};

let headers = create_cors_headers(&origin);

(
StatusCode::NOT_FOUND,
headers,
format!("No route for {uri}"),
)
(StatusCode::NOT_FOUND, format!("No route for {uri}"))
}
22 changes: 11 additions & 11 deletions src/migration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ use axum::headers::Authorization;
use axum::http::StatusCode;
use axum::{Extension, Json, TypedHeader};
use chrono::{DateTime, NaiveDateTime, Utc};
use diesel::{Connection, PgConnection};
use log::{error, info};
use serde::{Deserialize, Deserializer};
use serde_json::json;
Expand Down Expand Up @@ -66,8 +65,6 @@ pub async fn migration_impl(admin_key: String, state: &State) -> anyhow::Result<

let mut finished = false;

let mut conn = PgConnection::establish(&state.pg_url).unwrap();

info!("Starting migration");
while !finished {
info!("Fetching {limit} items from offset {offset}");
Expand All @@ -81,15 +78,18 @@ pub async fn migration_impl(admin_key: String, state: &State) -> anyhow::Result<
let items: Vec<Item> = resp.into_json()?;

// Insert values into DB
conn.transaction::<_, anyhow::Error, _>(|conn| {
for item in items.iter() {
if let Ok(value) = base64::decode(&item.value) {
VssItem::put_item(conn, &item.store_id, &item.key, &value, item.version)?;
}
for item in items.iter() {
if let Ok(value) = base64::decode(&item.value) {
VssItem::put_item(
&state.client,
&item.store_id,
&item.key,
&value,
item.version,
)
.await?;
}

Ok(())
})?;
}

if items.len() < limit {
finished = true;
Expand Down
76 changes: 76 additions & 0 deletions src/models/migration_baseline.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
CREATE TABLE vss_db
(
store_id TEXT NOT NULL CHECK (store_id != ''),
key TEXT NOT NULL,
value bytea,
version BIGINT NOT NULL,
created_date TIMESTAMP DEFAULT '2023-07-13'::TIMESTAMP NOT NULL,
updated_date TIMESTAMP DEFAULT '2023-07-13'::TIMESTAMP NOT NULL,
PRIMARY KEY (store_id, key)
);

-- triggers to set dates automatically, generated by ChatGPT

-- Function to set created_date and updated_date during INSERT
CREATE OR REPLACE FUNCTION set_created_date()
RETURNS TRIGGER AS $$
BEGIN
NEW.created_date := CURRENT_TIMESTAMP;
NEW.updated_date := CURRENT_TIMESTAMP;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;

-- Function to set updated_date during UPDATE
CREATE OR REPLACE FUNCTION set_updated_date()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_date := CURRENT_TIMESTAMP;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;

-- Trigger for INSERT operation on vss_db
CREATE TRIGGER tr_set_dates_after_insert
BEFORE INSERT ON vss_db
FOR EACH ROW
EXECUTE FUNCTION set_created_date();

-- Trigger for UPDATE operation on vss_db
CREATE TRIGGER tr_set_dates_after_update
BEFORE UPDATE ON vss_db
FOR EACH ROW
EXECUTE FUNCTION set_updated_date();

-- upsert function
CREATE OR REPLACE FUNCTION upsert_vss_db(
p_store_id TEXT,
p_key TEXT,
p_value bytea,
p_version BIGINT
) RETURNS VOID AS $$
BEGIN

WITH new_values (store_id, key, value, version) AS (VALUES (p_store_id, p_key, p_value, p_version))
INSERT
INTO vss_db
(store_id, key, value, version)
SELECT new_values.store_id,
new_values.key,
new_values.value,
new_values.version
FROM new_values
LEFT JOIN vss_db AS existing
ON new_values.store_id = existing.store_id
AND new_values.key = existing.key
WHERE CASE
WHEN new_values.version >= 4294967295 THEN new_values.version >= COALESCE(existing.version, -1)
ELSE new_values.version > COALESCE(existing.version, -1)
END
ON CONFLICT (store_id, key)
DO UPDATE SET value = excluded.value,
version = excluded.version;

END;
$$ LANGUAGE plpgsql;

Loading

0 comments on commit c65735f

Please sign in to comment.