Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

allow code analysis in dashboard #1590

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/staging.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ jobs:
- name: Deploy polaris site to S3 bucket
run: aws s3 sync ./apps/dashboard/web/polaris_web/web/dist s3://dashboard-on-cdn/polaris_web/${{steps.docker_tag.outputs.IMAGE_TAG}}/dist --delete

- run: mvn package -Dakto-image-tag=${{ github.event.inputs.Tag }} -Dakto-build-time=$(eval "date +%s") -Dakto-release-version=${{steps.docker_tag.outputs.IMAGE_TAG}}
- run: mvn package -Dakto-image-tag=${{ github.event.inputs.Tag }} -Dakto-build-time=$(eval "date +%s") -Dakto-release-version=${{steps.docker_tag.outputs.IMAGE_TAG}} -DskipTests
- name: DockerHub login
env:
DOCKER_USERNAME: ${{secrets.DOCKER_USERNAME}}
Expand Down
180 changes: 134 additions & 46 deletions apps/dashboard/src/main/java/com/akto/action/CodeAnalysisAction.java
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import com.akto.dao.*;
import com.akto.dto.*;
import com.akto.dto.type.SingleTypeInfo;
import org.bson.Document;
import org.bson.conversions.Bson;
import org.bson.types.Code;
import org.bson.types.ObjectId;
Expand Down Expand Up @@ -50,6 +51,10 @@ public class CodeAnalysisAction extends UserAction {
private List<CodeAnalysisApi> codeAnalysisApisList;
private CodeAnalysisRepo.SourceCodeType sourceCodeType;
public static final int MAX_BATCH_SIZE = 100;
private String projectName;
private String repoName;
private boolean isLastBatch;
private CodeAnalysisRepo codeAnalysisRepo;

private static final LoggerMaker loggerMaker = new LoggerMaker(CodeAnalysisAction.class);
private static final ScheduledExecutorService executorService = Executors.newSingleThreadScheduledExecutor();
Expand Down Expand Up @@ -98,8 +103,9 @@ public void sendMixpanelEvent() {
loggerMaker.errorAndAddToDb("Error sending CODE_ANALYSIS_SYNC mixpanel event: " + e.getMessage(), LogDb.DASHBOARD);
}
}

public String syncExtractedAPIs() {
String apiCollectionName = projectName + "/" + repoName;
loggerMaker.infoAndAddToDb("Syncing code analysis endpoints for collection: " + apiCollectionName, LogDb.DASHBOARD);

if (codeAnalysisApisList == null) {
Expand All @@ -108,6 +114,12 @@ public String syncExtractedAPIs() {
return ERROR.toUpperCase();
}

if (codeAnalysisRepo == null) {
loggerMaker.errorAndAddToDb("Code analysis repo is null", LogDb.DASHBOARD);
addActionError("Code analysis repo is null");
return ERROR.toUpperCase();
}

// Ensure batch size is not exceeded
if (codeAnalysisApisList.size() > MAX_BATCH_SIZE) {
String errorMsg = "Code analysis api's sync batch size exceeded. Max Batch size: " + MAX_BATCH_SIZE + " Batch size: " + codeAnalysisApisList.size();
Expand All @@ -122,18 +134,16 @@ public String syncExtractedAPIs() {
codeAnalysisApisMap.put(codeAnalysisApi.generateCodeAnalysisApisMapKey(), codeAnalysisApi);
}

// todo: If API collection does exist, create it
ApiCollection apiCollection = ApiCollectionsDao.instance.findByName(apiCollectionName);
if (apiCollection == null) {
loggerMaker.errorAndAddToDb("API collection not found " + apiCollectionName, LogDb.DASHBOARD);
addActionError("API collection not found: " + apiCollectionName);
return ERROR.toUpperCase();
apiCollection = new ApiCollection(Context.now(), apiCollectionName, Context.now(), new HashSet<>(), null, 0, false, false);
ApiCollectionsDao.instance.insertOne(apiCollection);
}

/*
* In some cases it is not possible to determine the type of template url from source code
* In such cases, we can use the information from traffic endpoints to match the traffic and source code endpoints
*
*
* Eg:
* Source code endpoints:
* GET /books/STRING -> GET /books/AKTO_TEMPLATE_STR -> GET /books/INTEGER
Expand All @@ -142,7 +152,8 @@ public String syncExtractedAPIs() {
* GET /books/INTEGER -> GET /books/AKTO_TEMPLATE_STR
* POST /city/STRING/district/INTEGER -> POST /city/AKTO_TEMPLATE_STR/district/AKTO_TEMPLATE_STR
*/
List<BasicDBObject> trafficApis = Utils.fetchEndpointsInCollectionUsingHost(apiCollection.getId(), 0);

List<BasicDBObject> trafficApis = ApiCollectionsDao.fetchEndpointsInCollectionUsingHost(apiCollection.getId(), 0, -1, 60 * 24 * 60 * 60);
Map<String, String> trafficApiEndpointAktoTemplateStrToOriginalMap = new HashMap<>();
List<String> trafficApiKeys = new ArrayList<>();
for (BasicDBObject trafficApi: trafficApis) {
Expand Down Expand Up @@ -183,7 +194,7 @@ public String syncExtractedAPIs() {

trafficApiEndpointAktoTemplateStrToOriginalMap.put(trafficApiEndpointAktoTemplateStr, trafficApiEndpoint);
}

Map<String, CodeAnalysisApi> tempCodeAnalysisApisMap = new HashMap<>(codeAnalysisApisMap);
for (Map.Entry<String, CodeAnalysisApi> codeAnalysisApiEntry: codeAnalysisApisMap.entrySet()) {
String codeAnalysisApiKey = codeAnalysisApiEntry.getKey();
Expand All @@ -199,11 +210,11 @@ public String syncExtractedAPIs() {
}

if(codeAnalysisApiEndpointAktoTemplateStr.contains("AKTO_TEMPLATE_STR") && trafficApiEndpointAktoTemplateStrToOriginalMap.containsKey(codeAnalysisApiEndpointAktoTemplateStr)) {
CodeAnalysisApi newCodeAnalysisApi = new CodeAnalysisApi(
codeAnalysisApi.getMethod(),
trafficApiEndpointAktoTemplateStrToOriginalMap.get(codeAnalysisApiEndpointAktoTemplateStr),
codeAnalysisApi.getLocation(), codeAnalysisApi.getRequestBody(), codeAnalysisApi.getResponseBody());
CodeAnalysisApi newCodeAnalysisApi = new CodeAnalysisApi(
codeAnalysisApi.getMethod(),
trafficApiEndpointAktoTemplateStrToOriginalMap.get(codeAnalysisApiEndpointAktoTemplateStr),
codeAnalysisApi.getLocation(), codeAnalysisApi.getRequestBody(), codeAnalysisApi.getResponseBody());

tempCodeAnalysisApisMap.remove(codeAnalysisApiKey);
tempCodeAnalysisApisMap.put(newCodeAnalysisApi.generateCodeAnalysisApisMapKey(), newCodeAnalysisApi);
}
Expand All @@ -224,7 +235,7 @@ public String syncExtractedAPIs() {
for(Map.Entry<String, CodeAnalysisApi> codeAnalysisApiEntry: tempCodeAnalysisApisMap.entrySet()) {
CodeAnalysisApi codeAnalysisApi = codeAnalysisApiEntry.getValue();
String codeAnalysisApiEndpoint = codeAnalysisApi.getEndpoint();

String trafficApiMethod = "", trafficApiEndpoint = "";
try {
String[] trafficApiKeyParts = trafficApiKey.split(" ");
Expand All @@ -237,10 +248,10 @@ public String syncExtractedAPIs() {

if (codeAnalysisApiEndpoint.equals(trafficApiEndpoint)) {
CodeAnalysisApi newCodeAnalysisApi = new CodeAnalysisApi(
trafficApiMethod,
trafficApiEndpoint,
codeAnalysisApi.getLocation(), codeAnalysisApi.getRequestBody(), codeAnalysisApi.getResponseBody());
trafficApiMethod,
trafficApiEndpoint,
codeAnalysisApi.getLocation(), codeAnalysisApi.getRequestBody(), codeAnalysisApi.getResponseBody());

tempCodeAnalysisApisMap.put(newCodeAnalysisApi.generateCodeAnalysisApisMapKey(), newCodeAnalysisApi);
break;
}
Expand All @@ -255,14 +266,16 @@ public String syncExtractedAPIs() {
// ObjectId for new code analysis collection
codeAnalysisCollectionId = new ObjectId();

String projectDir = projectName + "/" + repoName; //todo:

CodeAnalysisCollection codeAnalysisCollection = CodeAnalysisCollectionDao.instance.updateOne(
Filters.eq("codeAnalysisCollectionName", apiCollectionName),
Updates.combine(
Updates.setOnInsert(CodeAnalysisCollection.ID, codeAnalysisCollectionId),
Updates.setOnInsert(CodeAnalysisCollection.NAME, apiCollectionName),
Updates.set(CodeAnalysisCollection.PROJECT_DIR, projectDir),
Updates.setOnInsert(CodeAnalysisCollection.API_COLLECTION_ID, apiCollection.getId())
)
Filters.eq("codeAnalysisCollectionName", apiCollectionName),
Updates.combine(
Updates.setOnInsert(CodeAnalysisCollection.ID, codeAnalysisCollectionId),
Updates.setOnInsert(CodeAnalysisCollection.NAME, apiCollectionName),
Updates.set(CodeAnalysisCollection.PROJECT_DIR, projectDir),
Updates.setOnInsert(CodeAnalysisCollection.API_COLLECTION_ID, apiCollection.getId())
)
);

// Set code analysis collection id if existing collection is updated
Expand All @@ -282,21 +295,21 @@ public String syncExtractedAPIs() {
List<WriteModel<SingleTypeInfo>> bulkUpdatesSTI = new ArrayList<>();

for(Map.Entry<String, CodeAnalysisApi> codeAnalysisApiEntry: codeAnalysisApisMap.entrySet()) {
CodeAnalysisApi codeAnalysisApi = codeAnalysisApiEntry.getValue();
CodeAnalysisApiInfo.CodeAnalysisApiInfoKey codeAnalysisApiInfoKey = new CodeAnalysisApiInfo.CodeAnalysisApiInfoKey(codeAnalysisCollectionId, codeAnalysisApi.getMethod(), codeAnalysisApi.getEndpoint());
CodeAnalysisApi codeAnalysisApi = codeAnalysisApiEntry.getValue();
CodeAnalysisApiInfo.CodeAnalysisApiInfoKey codeAnalysisApiInfoKey = new CodeAnalysisApiInfo.CodeAnalysisApiInfoKey(codeAnalysisCollectionId, codeAnalysisApi.getMethod(), codeAnalysisApi.getEndpoint());

bulkUpdates.add(
bulkUpdates.add(
new UpdateOneModel<>(
Filters.eq(CodeAnalysisApiInfo.ID, codeAnalysisApiInfoKey),
Updates.combine(
Updates.setOnInsert(CodeAnalysisApiInfo.ID, codeAnalysisApiInfoKey),
Updates.set(CodeAnalysisApiInfo.LOCATION, codeAnalysisApi.getLocation()),
Updates.setOnInsert(CodeAnalysisApiInfo.DISCOVERED_TS, now),
Updates.set(CodeAnalysisApiInfo.LAST_SEEN_TS, now)
),
new UpdateOptions().upsert(true)
Filters.eq(CodeAnalysisApiInfo.ID, codeAnalysisApiInfoKey),
Updates.combine(
Updates.setOnInsert(CodeAnalysisApiInfo.ID, codeAnalysisApiInfoKey),
Updates.set(CodeAnalysisApiInfo.LOCATION, codeAnalysisApi.getLocation()),
Updates.setOnInsert(CodeAnalysisApiInfo.DISCOVERED_TS, now),
Updates.set(CodeAnalysisApiInfo.LAST_SEEN_TS, now)
),
new UpdateOptions().upsert(true)
)
);
);

String requestBody = codeAnalysisApi.getRequestBody();
String responseBody = codeAnalysisApi.getResponseBody();
Expand Down Expand Up @@ -337,15 +350,27 @@ public String syncExtractedAPIs() {
loggerMaker.infoAndAddToDb("Updated code analysis collection: " + apiCollectionName, LogDb.DASHBOARD);
loggerMaker.infoAndAddToDb("Source code endpoints count: " + codeAnalysisApisMap.size(), LogDb.DASHBOARD);

// Send mixpanel event
int accountId = Context.accountId.get();
executorService.schedule( new Runnable() {
public void run() {
Context.accountId.set(accountId);
sendMixpanelEvent();
if (isLastBatch) {//Remove scheduled state from codeAnalysisRepo
Bson sourceCodeFilter;
if (this.codeAnalysisRepo.getSourceCodeType() == CodeAnalysisRepo.SourceCodeType.BITBUCKET) {
sourceCodeFilter = Filters.or(
Filters.eq(CodeAnalysisRepo.SOURCE_CODE_TYPE, this.codeAnalysisRepo.getSourceCodeType()),
Filters.exists(CodeAnalysisRepo.SOURCE_CODE_TYPE, false)

);
} else {
sourceCodeFilter = Filters.eq(CodeAnalysisRepo.SOURCE_CODE_TYPE, this.codeAnalysisRepo.getSourceCodeType());
}
}, 0, TimeUnit.SECONDS);


Bson filters = Filters.and(
Filters.eq(CodeAnalysisRepo.REPO_NAME, this.codeAnalysisRepo.getRepoName()),
Filters.eq(CodeAnalysisRepo.PROJECT_NAME, this.codeAnalysisRepo.getProjectName()),
sourceCodeFilter
);

CodeAnalysisRepoDao.instance.updateOneNoUpsert(filters, Updates.set(CodeAnalysisRepo.LAST_RUN, Context.now()));
loggerMaker.infoAndAddToDb("Updated last run for project:" + codeAnalysisRepo.getProjectName() + " repo:" + codeAnalysisRepo.getRepoName(), LogDb.DASHBOARD);
}

return SUCCESS.toUpperCase();
}
Expand Down Expand Up @@ -396,7 +421,6 @@ public String runCodeAnalysisRepo() {
return SUCCESS.toUpperCase();
}

CodeAnalysisRepo codeAnalysisRepo;
public String deleteCodeAnalysisRepo() {
if (codeAnalysisRepo == null) {
addActionError("Can't delete null repo");
Expand Down Expand Up @@ -430,6 +454,46 @@ public String fetchCodeAnalysisRepos() {
return SUCCESS.toUpperCase();
}

public String updateRepoLastRun() {
Bson sourceCodeFilter;
if (codeAnalysisRepo == null) {
loggerMaker.errorAndAddToDb("Code analysis repo is null", LogDb.DASHBOARD);
addActionError("Code analysis repo is null");
return ERROR.toUpperCase();
}

if (this.codeAnalysisRepo.getSourceCodeType() == CodeAnalysisRepo.SourceCodeType.BITBUCKET) {
sourceCodeFilter = Filters.or(
Filters.eq(CodeAnalysisRepo.SOURCE_CODE_TYPE, this.codeAnalysisRepo.getSourceCodeType()),
Filters.exists(CodeAnalysisRepo.SOURCE_CODE_TYPE, false)

);
} else {
sourceCodeFilter = Filters.eq(CodeAnalysisRepo.SOURCE_CODE_TYPE, this.codeAnalysisRepo.getSourceCodeType());
}

Bson filters = Filters.and(
Filters.eq(CodeAnalysisRepo.REPO_NAME, this.codeAnalysisRepo.getRepoName()),
Filters.eq(CodeAnalysisRepo.PROJECT_NAME, this.codeAnalysisRepo.getProjectName()),
sourceCodeFilter
);

CodeAnalysisRepoDao.instance.updateOneNoUpsert(filters, Updates.set(CodeAnalysisRepo.LAST_RUN, Context.now()));
loggerMaker.infoAndAddToDb("Updated last run for project:" + codeAnalysisRepo.getProjectName() + " repo:" + codeAnalysisRepo.getRepoName(), LogDb.DASHBOARD);
return SUCCESS.toUpperCase();
}

List<CodeAnalysisRepo> reposToRun = new ArrayList<>();

public String findReposToRun() {
reposToRun = CodeAnalysisRepoDao.instance.findAll(
Filters.expr(
Document.parse("{ $gt: [ \"$" + CodeAnalysisRepo.SCHEDULE_TIME + "\", \"$" + CodeAnalysisRepo.LAST_RUN + "\" ] }")
)
);
return SUCCESS.toUpperCase();
}


public String getProjectDir() {
return projectDir;
Expand Down Expand Up @@ -474,4 +538,28 @@ public CodeAnalysisRepo.SourceCodeType getSourceCodeType() {
public void setSourceCodeType(CodeAnalysisRepo.SourceCodeType sourceCodeType) {
this.sourceCodeType = sourceCodeType;
}

public List<CodeAnalysisRepo> getReposToRun() {
return reposToRun;
}

public void setRepoName(String repoName) {
this.repoName = repoName;
}

public void setProjectName(String projectName) {
this.projectName = projectName;
}

public boolean getIsLastBatch() {
return isLastBatch;
}

public void setIsLastBatch(boolean isLastBatch) {
this.isLastBatch = isLastBatch;
}

public CodeAnalysisRepo getCodeAnalysisRepo() {
return codeAnalysisRepo;
}
}
22 changes: 22 additions & 0 deletions apps/dashboard/src/main/resources/struts.xml
Original file line number Diff line number Diff line change
Expand Up @@ -6315,6 +6315,28 @@
</result>
</action>

<action name="api/findReposToRun" class="com.akto.action.CodeAnalysisAction" method="findReposToRun">
<interceptor-ref name="json"/>
<interceptor-ref name="defaultStack" />
<result name="SUCCESS" type="json"/>
<result name="ERROR" type="json">
<param name="statusCode">422</param>
<param name="ignoreHierarchy">false</param>
<param name="includeProperties">^actionErrors.*</param>
</result>
</action>

<action name="api/updateRepoLastRun" class="com.akto.action.CodeAnalysisAction" method="updateRepoLastRun">
<interceptor-ref name="json"/>
<interceptor-ref name="defaultStack" />
<result name="SUCCESS" type="json"/>
<result name="ERROR" type="json">
<param name="statusCode">422</param>
<param name="ignoreHierarchy">false</param>
<param name="includeProperties">^actionErrors.*</param>
</result>
</action>

<action name="tools/convertSamleDataToBurpRequest" class="com.akto.action.ExportSampleDataAction" method="generateBurpRequest">
<interceptor-ref name="json"/>
<interceptor-ref name="defaultStack" />
Expand Down
Loading
Loading