Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

multiple account handling for source-code-analyser #1703

Open
wants to merge 10 commits into
base: master
Choose a base branch
from
35 changes: 2 additions & 33 deletions .github/workflows/staging.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,27 +25,8 @@ jobs:
- name: Convert github branch name to be compatible with docker tag name convention and generate tag name
id: docker_tag
run: echo "IMAGE_TAG=a-$(echo ${{ github.ref_name }} | sed 's/[^a-zA-Z0-9]/-/g')" >> $GITHUB_OUTPUT
- name: Download Akto templates zip and PII files
working-directory: ./apps/dashboard/src/main/resources
run: |
wget -O tests-library-master.zip https://github.com/akto-api-security/tests-library/archive/refs/heads/master.zip
wget -O general.json https://raw.githubusercontent.com/akto-api-security/pii-types/master/general.json
wget -O fintech.json https://raw.githubusercontent.com/akto-api-security/akto/master/pii-types/fintech.json
wget -O filetypes.json https://raw.githubusercontent.com/akto-api-security/akto/master/pii-types/filetypes.json
- name: Prepare Dashboard polaris UI
working-directory: ./apps/dashboard/web/polaris_web
run: npm install && export RELEASE_VERSION=${{steps.docker_tag.outputs.IMAGE_TAG}} && npm run build
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{secrets.AWS_ACCESS_KEY_ID}}
aws-secret-access-key: ${{secrets.AWS_SECRET_ACCESS_KEY}}
aws-region: ap-south-1

- name: Deploy polaris site to S3 bucket
run: aws s3 sync ./apps/dashboard/web/polaris_web/web/dist s3://dashboard-on-cdn/polaris_web/${{steps.docker_tag.outputs.IMAGE_TAG}}/dist --delete

- run: mvn package -Dakto-image-tag=${{ github.event.inputs.Tag }} -Dakto-build-time=$(eval "date +%s") -Dakto-release-version=${{steps.docker_tag.outputs.IMAGE_TAG}}
- run: mvn package -Dakto-image-tag=${{ github.event.inputs.Tag }} -Dakto-build-time=$(eval "date +%s") -Dakto-release-version=${{steps.docker_tag.outputs.IMAGE_TAG}} -DskipTests=true
- name: DockerHub login
env:
DOCKER_USERNAME: ${{secrets.DOCKER_USERNAME}}
Expand All @@ -65,19 +46,7 @@ jobs:
echo $IMAGE_TAG >> $GITHUB_STEP_SUMMARY
docker buildx create --use
# Build a docker container and push it to DockerHub
cd apps/dashboard
docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/$ECR_REPOSITORY-dashboard:$IMAGE_TAG $IMAGE_TAG_DASHBOARD . --push
cd ../testing
docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/akto-api-testing:$IMAGE_TAG $IMAGE_TAG_TESTING . --push
cd ../testing-cli
docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/akto-api-testing-cli:$IMAGE_TAG $IMAGE_TAG_TESTING_CLI . --push
cd ../billing
docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/akto-billing:$IMAGE_TAG . --push
cd ../internal
docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/akto-internal:$IMAGE_TAG . --push
cd ../api-threat-detection
docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/akto-api-protection:$IMAGE_TAG . --push
cd ../source-code-analyser
cd apps/source-code-analyser
docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/source-code-analyser:$IMAGE_TAG . --push

- name: Set up JDK 11
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -368,12 +368,12 @@ public void setProjectName(String projectName) {
this.projectName = projectName;
}

public boolean isLastBatch() {
public boolean getIsLastBatch() {
return isLastBatch;
}

public void setLastBatch(boolean lastBatch) {
isLastBatch = lastBatch;
public void setIsLastBatch(boolean isLastBatch) {
this.isLastBatch = isLastBatch;
}

public CodeAnalysisRepo getCodeAnalysisRepo() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,7 @@ public BasicDBObject getCodeAnalysisBody(String path) {
requestBody.put("repoName",this.getRepoToBeAnalysed().getRepoName());
requestBody.put("bitbucketHost",BITBUCKET_URL);
requestBody.put("is_bitbucket",true);
requestBody.put("is_aktogpt",false);
return requestBody;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ public String getRepoUrl() {

@Override
public BasicDBObject getCodeAnalysisBody(String path) {
if (path == null || StringUtils.isEmpty(GITHUB_ACCESS_TOKEN)) {
if (path == null) {
return null;
}

Expand All @@ -43,6 +43,7 @@ public BasicDBObject getCodeAnalysisBody(String path) {
requestBody.put("orgName",this.getRepoToBeAnalysed().getProjectName());
requestBody.put("repoName",this.getRepoToBeAnalysed().getRepoName());
requestBody.put("is_github",true);
requestBody.put("is_aktogpt",false);
return requestBody;
}

Expand Down
107 changes: 78 additions & 29 deletions apps/source-code-analyser/src/main/java/com/akto/Main.java
Original file line number Diff line number Diff line change
@@ -1,9 +1,16 @@
package com.akto;

import com.akto.dao.AccountsDao;
import com.akto.dao.context.Context;
import com.akto.data_actor.DataActor;
import com.akto.data_actor.DataActorFactory;
import com.akto.dto.CodeAnalysisRepo;
import com.akto.log.LoggerMaker;
import com.akto.util.AccountTask;
import com.mongodb.ConnectionString;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.List;

Expand All @@ -12,6 +19,39 @@ public class Main {
private static final LoggerMaker loggerMaker = new LoggerMaker(Main.class, LoggerMaker.LogDb.RUNTIME);
private static final DataActor dataActor = DataActorFactory.fetchInstance();
private static final int SLEEP_TIME = 10 * 1000;
private static final Logger logger = LoggerFactory.getLogger(Main.class);

private static boolean connectToMongo() {

String mongoURI = System.getenv("AKTO_MONGO_CONN");
if (StringUtils.isEmpty(mongoURI)) {
return false;
}
boolean connectedToMongo = false;
boolean calledOnce = false;
do {
try {

if (!calledOnce) {
DaoInit.init(new ConnectionString(mongoURI));
calledOnce = true;
}
AccountsDao.instance.getStats();
connectedToMongo = true;

logger.info("triggering merging cron for db abstractor " + Context.now());
} catch (Exception e) {
logger.error("error running initializer method for db abstractor", e);
} finally {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
}
}
} while (!connectedToMongo);
return connectedToMongo;
}


private static List<CodeAnalysisRepo> fetchReposToSync() {
List<CodeAnalysisRepo> repos = dataActor.findReposToRun();
Expand All @@ -21,39 +61,48 @@ private static List<CodeAnalysisRepo> fetchReposToSync() {
return null;
}

public static void main(String[] args) throws InterruptedException {
while (true) {
if (!BitbucketRepo.doesEnvVariablesExists() && !GithubRepo.doesEnvVariablesExists()) {
loggerMaker.infoAndAddToDb("No tokens found");
Thread.sleep(SLEEP_TIME);
continue;
}
List<CodeAnalysisRepo> repos = fetchReposToSync();
if (repos == null) {
loggerMaker.infoAndAddToDb("No repos to run, skipping");
Thread.sleep(SLEEP_TIME);
continue;
private static void runForRepo(List<CodeAnalysisRepo> repos) {
if (repos == null) {
loggerMaker.infoAndAddToDb("No repos to run, skipping");
return;
}
if (BitbucketRepo.doesEnvVariablesExists()) {
BitbucketRepo.fetchAllProjectKeys();
}
for (CodeAnalysisRepo repo : repos) {
SourceCodeAnalyserRepo sourceCodeAnalyserRepo;
if (repo.getSourceCodeType() == CodeAnalysisRepo.SourceCodeType.BITBUCKET) {
sourceCodeAnalyserRepo = new BitbucketRepo(repo);
} else {
sourceCodeAnalyserRepo = new GithubRepo(repo);
}
if (BitbucketRepo.doesEnvVariablesExists()) {
BitbucketRepo.fetchAllProjectKeys();
}
for (CodeAnalysisRepo repo : repos) {
SourceCodeAnalyserRepo sourceCodeAnalyserRepo;
if (repo.getSourceCodeType() == CodeAnalysisRepo.SourceCodeType.BITBUCKET) {
sourceCodeAnalyserRepo = new BitbucketRepo(repo);
} else {
sourceCodeAnalyserRepo = new GithubRepo(repo);
}
try {
sourceCodeAnalyserRepo.fetchEndpointsUsingAnalyser();
} catch (Exception e) {
loggerMaker.errorAndAddToDb("Error while fetching endpoints:" + e.getMessage());
}
try {
sourceCodeAnalyserRepo.fetchEndpointsUsingAnalyser();
} catch (Exception e) {
loggerMaker.errorAndAddToDb("Error while fetching endpoints:" + e.getMessage());
}
}
}

public static void main(String[] args) throws InterruptedException {
boolean isConnectedToMongo = connectToMongo();//When mongo connection, fetch for all accounts

while (true) {
// if (!BitbucketRepo.doesEnvVariablesExists() && !GithubRepo.doesEnvVariablesExists()) {
// loggerMaker.infoAndAddToDb("No tokens found");
// Thread.sleep(SLEEP_TIME);
// continue;
// }
if (isConnectedToMongo) {
AccountTask.instance.executeTask(t -> {
List<CodeAnalysisRepo> repos = fetchReposToSync();
runForRepo(repos);
}, "initialize-runtime-task");
} else {
List<CodeAnalysisRepo> repos = fetchReposToSync();
runForRepo(repos);
}
Thread.sleep(SLEEP_TIME);
}

}

}
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@

abstract public class SourceCodeAnalyserRepo {
private CodeAnalysisRepo repoToBeAnalysed;
private boolean isAktoGPTEnabled;
private static final LoggerMaker loggerMaker = new LoggerMaker(SourceCodeAnalyserRepo.class, LoggerMaker.LogDb.RUNTIME);
abstract public String getToken();
abstract public String getRepoUrl();
Expand Down Expand Up @@ -58,16 +59,18 @@ abstract public class SourceCodeAnalyserRepo {
public String downloadRepository () {
String finalUrl = this.getRepoUrl();
String token = this.getToken();
if (finalUrl == null || token == null) {
if (finalUrl == null) {
return null;
}
String outputFilePath = repoToBeAnalysed.getRepoName()+ ".zip"; // The local file where the repository will be saved
String outputFilePath = System.getenv("DOCKER_VOLUME") + repoToBeAnalysed.getRepoName()+ ".zip"; // The local file where the repository will be saved
File file = new File(outputFilePath);

Request.Builder builder = new Request.Builder();
builder.url(finalUrl);
builder.get();
builder.addHeader("Authorization", "Bearer " + token);
if (token != null) {
builder.addHeader("Authorization", "Bearer " + token);
}
Request request = builder.build();

try {
Expand Down Expand Up @@ -158,6 +161,13 @@ public void fetchEndpointsUsingAnalyser() {
syncRepoToDashboard(originalHttpResponse.getBody(), repoToBeAnalysed);
} catch (Exception e) {
loggerMaker.errorAndAddToDb("Error while fetching api's from code-analysis for repository:" + repoToBeAnalysed);
} finally {
if (repositoryPath != null) {
File file = new File(repositoryPath);
if(file.delete()) {
loggerMaker.infoAndAddToDb("successfully deleted the zip file", LoggerMaker.LogDb.RUNTIME);
}
}
}
}

Expand All @@ -172,4 +182,12 @@ public CodeAnalysisRepo getRepoToBeAnalysed() {
public void setRepoToBeAnalysed(CodeAnalysisRepo repoToBeAnalysed) {
this.repoToBeAnalysed = repoToBeAnalysed;
}

public boolean isAktoGPTEnabled() {
return isAktoGPTEnabled;
}

public void setAktoGPTEnabled(boolean aktoGPTEnabled) {
isAktoGPTEnabled = aktoGPTEnabled;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -59,9 +59,7 @@ public class ClientActor extends DataActor {

public static String buildDbAbstractorUrl() {
String dbAbsHost = CYBORG_URL;
if (checkAccount()) {
dbAbsHost = System.getenv("DATABASE_ABSTRACTOR_SERVICE_URL");
}
dbAbsHost = System.getenv("DATABASE_ABSTRACTOR_SERVICE_URL");
System.out.println("dbHost value " + dbAbsHost);
if (dbAbsHost.endsWith("/")) {
dbAbsHost = dbAbsHost.substring(0, dbAbsHost.length() - 1);
Expand Down
Loading
Loading