Skip to content

Commit

Permalink
Merge pull request #1394 from akto-api-security/develop
Browse files Browse the repository at this point in the history
Develop
  • Loading branch information
avneesh-akto authored Aug 22, 2024
2 parents 3788a88 + 0a34ed8 commit 56cd17d
Show file tree
Hide file tree
Showing 4 changed files with 124 additions and 45 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,21 @@

import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.akto.dao.context.Context;
import com.akto.utils.jobs.JobUtils;

import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;

public abstract class AfterMongoConnectListener implements ServletContextListener {

private boolean ranOnce = false;
private static final Logger logger = LoggerFactory.getLogger(AfterMongoConnectListener.class);

private final ScheduledExecutorService executorService = Executors.newSingleThreadScheduledExecutor();

Expand All @@ -29,8 +37,24 @@ public void run() {
continue;
}

boolean runJobFunctions = JobUtils.getRunJobFunctions();
boolean runJobFunctionsAnyway = JobUtils.getRunJobFunctionsAnyway();

try {
runMainFunction();

int now = Context.now();
if (runJobFunctions || runJobFunctionsAnyway) {
logger.info("Starting runtime init functions at " + now);
runMainFunction();
int now2 = Context.now();
int diffNow = now2 - now;
logger.info(String.format(
"Completed runtime init functions at %d , time taken : %d", now2,
diffNow));
} else {
logger.info("Skipping runtime init functions at " + now);
}

ranOnce = true;
} catch (Exception e) {
e.printStackTrace();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import com.akto.dao.traffic_metrics.TrafficMetricsDao;
import com.akto.dao.upload.FileUploadLogsDao;
import com.akto.dao.upload.FileUploadsDao;
import com.akto.dao.usage.UsageMetricInfoDao;
import com.akto.dao.usage.UsageMetricsDao;
import com.akto.dto.*;
import com.akto.dto.billing.FeatureAccess;
Expand Down Expand Up @@ -57,9 +56,7 @@
import com.akto.dto.type.SingleTypeInfo;
import com.akto.dto.upload.FileUpload;
import com.akto.dto.type.URLMethods;
import com.akto.dto.usage.MetricTypes;
import com.akto.dto.usage.UsageMetric;
import com.akto.dto.usage.UsageMetricInfo;
import com.akto.log.CacheLoggerMaker;
import com.akto.log.LoggerMaker;
import com.akto.log.LoggerMaker.LogDb;
Expand All @@ -73,9 +70,9 @@
import com.akto.testing.ApiExecutor;
import com.akto.testing.ApiWorkflowExecutor;
import com.akto.testing.HostDNSLookup;
import com.akto.usage.UsageMetricCalculator;
import com.akto.usage.UsageMetricHandler;
import com.akto.testing.workflow_node_executor.Utils;
import com.akto.utils.jobs.JobUtils;
import com.akto.util.AccountTask;
import com.akto.util.ConnectionInfo;
import com.akto.util.EmailAccountName;
Expand All @@ -91,7 +88,6 @@
import com.akto.util.tasks.OrganizationTask;
import com.akto.utils.*;
import com.akto.util.DashboardMode;
import com.akto.utils.scripts.FixMultiSTIs;
import com.akto.utils.crons.SyncCron;
import com.akto.utils.crons.TokenGeneratorCron;
import com.akto.utils.crons.UpdateSensitiveInfoInApiInfo;
Expand Down Expand Up @@ -126,7 +122,6 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.print.attribute.standard.Severity;
import javax.servlet.ServletContextListener;
import java.io.*;
import java.net.URI;
Expand Down Expand Up @@ -912,7 +907,7 @@ public void run() {
public void accept(Account t) {
webhookSender();
}
}, "webhook-sener");
}, "webhook-sender");
}
}, 0, 1, TimeUnit.HOURS);
}
Expand Down Expand Up @@ -1910,10 +1905,17 @@ public void contextInitialized(javax.servlet.ServletContextEvent sce) {
e.printStackTrace();
}

boolean runJobFunctions = JobUtils.getRunJobFunctions();
boolean runJobFunctionsAnyway = JobUtils.getRunJobFunctionsAnyway();

executorService.schedule(new Runnable() {
public void run() {
DaoInit.init(new ConnectionString(mongoURI), ReadPreference.primary());

ReadPreference readPreference = ReadPreference.primary();
if (runJobFunctions) {
readPreference = ReadPreference.secondary();
}
DaoInit.init(new ConnectionString(mongoURI), readPreference);

connectedToMongo = false;
do {
Expand All @@ -1933,28 +1935,57 @@ public void run() {
public void accept(Account account) {
AccountSettingsDao.instance.getStats();
Intercom.setToken(System.getenv("INTERCOM_TOKEN"));
runInitializerFunctions();
setDashboardVersionForAccount();
}
}, "context-initializer");

if (DashboardMode.isMetered()) {
setupUsageScheduler();
setupUsageSyncScheduler();
}
trimCappedCollections();
setUpPiiAndTestSourcesScheduler();
setUpTrafficAlertScheduler();
// setUpAktoMixpanelEndpointsScheduler();
SingleTypeInfo.init();
setUpDailyScheduler();
setUpWebhookScheduler();
setUpDefaultPayloadRemover();
setUpTestEditorTemplatesScheduler();
setUpDependencyFlowScheduler();
tokenGeneratorCron.tokenGeneratorScheduler();
crons.deleteTestRunsScheduler();
updateSensitiveInfoInApiInfo.setUpSensitiveMapInApiInfoScheduler();
syncCronInfo.setUpUpdateCronScheduler();

int now = Context.now();
if (runJobFunctions || runJobFunctionsAnyway) {

logger.info("Starting init functions and scheduling jobs at " + now);

AccountTask.instance.executeTask(new Consumer<Account>() {
@Override
public void accept(Account account) {
runInitializerFunctions();
}
}, "context-initializer-secondary");

if (DashboardMode.isMetered()) {
setupUsageScheduler();
setupUsageSyncScheduler();
}
trimCappedCollections();
setUpPiiAndTestSourcesScheduler();
setUpTrafficAlertScheduler();
// setUpAktoMixpanelEndpointsScheduler();
setUpDailyScheduler();
setUpWebhookScheduler();
setUpDefaultPayloadRemover();
setUpTestEditorTemplatesScheduler();
setUpDependencyFlowScheduler();
tokenGeneratorCron.tokenGeneratorScheduler();
crons.deleteTestRunsScheduler();
updateSensitiveInfoInApiInfo.setUpSensitiveMapInApiInfoScheduler();
syncCronInfo.setUpUpdateCronScheduler();
updateApiGroupsForAccounts();
setUpUpdateCustomCollections();
setUpFillCollectionIdArrayJob();
setupAutomatedApiGroupsScheduler();
/*
* This is a temporary job.
* TODO: Remove this once traffic pipeline is cleaned.
*/
CleanInventory.cleanInventoryJobRunner();

int now2 = Context.now();
int diffNow = now2 - now;
logger.info(String.format("Completed init functions and scheduling jobs at %d , time taken : %d", now2, diffNow));
} else {
logger.info("Skipping init functions and scheduling jobs at " + now);
}
// setUpAktoMixpanelEndpointsScheduler();
//fetchGithubZip();
if(isSaas){
Expand All @@ -1965,19 +1996,9 @@ public void accept(Account account) {
loggerMaker.errorAndAddToDb("Failed to initialize Auth0 due to: " + e.getMessage(), LogDb.DASHBOARD);
}
}
updateApiGroupsForAccounts();
setUpUpdateCustomCollections();
setUpFillCollectionIdArrayJob();
setupAutomatedApiGroupsScheduler();
/*
* This is a temporary job.
* TODO: Remove this once traffic pipeline is cleaned.
*/
CleanInventory.cleanInventoryJobRunner();
}
}, 0, TimeUnit.SECONDS);


}


Expand Down Expand Up @@ -2358,14 +2379,6 @@ public void runInitializerFunctions() {
loggerMaker.errorAndAddToDb(e,"error while setting up dashboard: " + e.toString(), LogDb.DASHBOARD);
}

try {
loggerMaker.infoAndAddToDb("Updating account version for " + Context.accountId.get(), LogDb.DASHBOARD);
AccountSettingsDao.instance.updateVersion(AccountSettings.DASHBOARD_VERSION);
} catch (Exception e) {
loggerMaker.errorAndAddToDb(e,"error while updating dashboard version: " + e.toString(), LogDb.DASHBOARD);
}


if(DashboardMode.isOnPremDeployment()) {
telemetryExecutorService.scheduleAtFixedRate(() -> {
boolean dibs = callDibs(Cluster.TELEMETRY_CRON, 60, 60);
Expand All @@ -2380,6 +2393,14 @@ public void runInitializerFunctions() {
}
}

private static void setDashboardVersionForAccount(){
try {
loggerMaker.infoAndAddToDb("Updating account version for " + Context.accountId.get(), LogDb.DASHBOARD);
AccountSettingsDao.instance.updateVersion(AccountSettings.DASHBOARD_VERSION);
} catch (Exception e) {
loggerMaker.errorAndAddToDb(e,"error while updating dashboard version: " + e.toString(), LogDb.DASHBOARD);
}
}

public static int burpPluginVersion = -1;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,9 @@
import java.util.function.Consumer;
import java.util.stream.Collectors;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.akto.dao.SensitiveSampleDataDao;
import com.akto.dao.SingleTypeInfoDao;
import com.akto.dao.context.Context;
Expand All @@ -24,13 +27,18 @@
public class CleanInventory {

private static final LoggerMaker loggerMaker = new LoggerMaker(CleanInventory.class, LogDb.DASHBOARD);
private static final Logger logger = LoggerFactory.getLogger(CleanInventory.class);

final static ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1);

public static void cleanInventoryJobRunner() {

scheduler.scheduleAtFixedRate(new Runnable() {
public void run() {

int now = Context.now();
logger.info("Starting cleanInventoryJob for all accounts at " + now);

AccountTask.instance.executeTask(new Consumer<Account>() {
@Override
public void accept(Account t) {
Expand All @@ -41,6 +49,10 @@ public void accept(Account t) {
}
}
}, "clean-inventory-job");

int now2 = Context.now();
int diffNow = now2-now;
logger.info(String.format("Completed cleanInventoryJob for all accounts at %d , time taken : %d", now2, diffNow));
}
}, 0, 5, TimeUnit.HOURS);

Expand Down
22 changes: 22 additions & 0 deletions apps/dashboard/src/main/java/com/akto/utils/jobs/JobUtils.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
package com.akto.utils.jobs;

import com.akto.util.DashboardMode;

public class JobUtils {

public static boolean getRunJobFunctions() {
try {
return Boolean.parseBoolean(System.getenv().getOrDefault("AKTO_RUN_JOB", "false"));
} catch (Exception e) {
return true;
}
}

public static boolean getRunJobFunctionsAnyway() {
try {
return DashboardMode.isOnPremDeployment() || !DashboardMode.isSaasDeployment();
} catch (Exception e) {
return true;
}
}
}

0 comments on commit 56cd17d

Please sign in to comment.