From 51d84907b96af1091fdc017165a4f3c94bc9c090 Mon Sep 17 00:00:00 2001 From: ayushaga14 Date: Tue, 27 Aug 2024 17:24:31 +0530 Subject: [PATCH 01/11] add runtime traffic metrics insertion and fetch logic --- .../traffic_metrics/TrafficMetricsAction.java | 68 +++++++++++++++ apps/dashboard/src/main/resources/struts.xml | 22 +++++ .../main/java/com/akto/action/DbAction.java | 19 +++++ .../src/main/resources/struts.xml | 11 +++ libs/dao/src/main/java/com/akto/DaoInit.java | 6 +- .../traffic_metrics/RuntimeMetricsDao.java | 82 +++++++++++++++++++ .../dto/traffic_metrics/RuntimeMetrics.java | 52 ++++++++++++ .../java/com/akto/data_actor/ClientActor.java | 18 ++++ .../java/com/akto/data_actor/DataActor.java | 5 ++ .../java/com/akto/data_actor/DbActor.java | 5 ++ .../java/com/akto/data_actor/DbLayer.java | 32 ++++++++ .../java/com/akto/metrics/AllMetrics.java | 30 +++++++ 12 files changed, 349 insertions(+), 1 deletion(-) create mode 100644 libs/dao/src/main/java/com/akto/dao/traffic_metrics/RuntimeMetricsDao.java create mode 100644 libs/dao/src/main/java/com/akto/dto/traffic_metrics/RuntimeMetrics.java diff --git a/apps/dashboard/src/main/java/com/akto/action/traffic_metrics/TrafficMetricsAction.java b/apps/dashboard/src/main/java/com/akto/action/traffic_metrics/TrafficMetricsAction.java index e85ca6b47f..95ef4bdfb4 100644 --- a/apps/dashboard/src/main/java/com/akto/action/traffic_metrics/TrafficMetricsAction.java +++ b/apps/dashboard/src/main/java/com/akto/action/traffic_metrics/TrafficMetricsAction.java @@ -1,13 +1,18 @@ package com.akto.action.traffic_metrics; import com.akto.action.UserAction; +import com.akto.dao.traffic_metrics.RuntimeMetricsDao; import com.akto.dao.traffic_metrics.TrafficMetricsDao; +import com.akto.dto.traffic_metrics.RuntimeMetrics; import com.akto.dto.traffic_metrics.TrafficMetrics; import com.mongodb.BasicDBObject; import com.mongodb.client.MongoCursor; import com.mongodb.client.model.Accumulators; import com.mongodb.client.model.Aggregates; import com.mongodb.client.model.Filters; +import com.mongodb.client.model.Projections; +import com.mongodb.client.model.Sorts; + import org.bson.Document; import org.bson.conversions.Bson; @@ -16,11 +21,16 @@ public class TrafficMetricsAction extends UserAction { private int startTimestamp; private int endTimestamp; + private String instanceId; + private List names; private String groupBy; private String host; private int vxlanID; + List runtimeMetrics; + List instanceIds; + public static final String ID = "_id."; @@ -113,6 +123,39 @@ public String execute() { return SUCCESS.toUpperCase(); } + public String fetchRuntimeInstances() { + instanceIds = new ArrayList<>(); + Bson filters = RuntimeMetricsDao.buildFilters(startTimestamp, endTimestamp); + runtimeMetrics = RuntimeMetricsDao.instance.findAll(filters, 0, 0, Sorts.descending("timestamp"), Projections.include("instanceId")); + for (RuntimeMetrics metric: runtimeMetrics) { + instanceIds.add(metric.getInstanceId()); + } + + return SUCCESS.toUpperCase(); + } + + public String fetchRuntimeMetrics() { + Bson filters = RuntimeMetricsDao.buildFilters(startTimestamp, endTimestamp, instanceId); + // runtimeMetrics = RuntimeMetricsDao.instance.findAll(filters, 0, 0, Sorts.descending("timestamp")); + runtimeMetrics = new ArrayList<>(); + + try (MongoCursor cursor = RuntimeMetricsDao.instance.getMCollection().aggregate( + Arrays.asList( + Aggregates.match(filters), + Aggregates.sort(Sorts.descending("timestamp")), + Aggregates.group(new BasicDBObject("name", "$name"), Accumulators.first("latestDoc", "$$ROOT")) + ), BasicDBObject.class + ).cursor()) { + while (cursor.hasNext()) { + BasicDBObject basicDBObject = cursor.next(); + BasicDBObject latestDoc = (BasicDBObject) basicDBObject.get("latestDoc"); + runtimeMetrics.add(new RuntimeMetrics(latestDoc.getString("name"), 0, instanceId, latestDoc.getDouble("val"))); + } + } + + return SUCCESS.toUpperCase(); + } + public String fetchTrafficMetricsDesciptions(){ names = Arrays.asList(TrafficMetrics.Name.values()); return SUCCESS.toUpperCase(); @@ -150,4 +193,29 @@ public void setHost(String host) { public void setVxlanID(int vxlanID) { this.vxlanID = vxlanID; } + + public List getRuntimeMetrics() { + return runtimeMetrics; + } + + public void setRuntimeMetrics(List runtimeMetrics) { + this.runtimeMetrics = runtimeMetrics; + } + + public List getInstanceIds() { + return instanceIds; + } + + public void setInstanceIds(List instanceIds) { + this.instanceIds = instanceIds; + } + + public String getInstanceId() { + return instanceId; + } + + public void setInstanceId(String instanceId) { + this.instanceId = instanceId; + } + } diff --git a/apps/dashboard/src/main/resources/struts.xml b/apps/dashboard/src/main/resources/struts.xml index 7a3e924065..631cab2cf2 100644 --- a/apps/dashboard/src/main/resources/struts.xml +++ b/apps/dashboard/src/main/resources/struts.xml @@ -2849,6 +2849,28 @@ ^actionErrors.* + + + + + + + 422 + false + ^actionErrors.* + + + + + + + + + 422 + false + ^actionErrors.* + + diff --git a/apps/database-abstractor/src/main/java/com/akto/action/DbAction.java b/apps/database-abstractor/src/main/java/com/akto/action/DbAction.java index e0eab40889..52d2bb5f11 100644 --- a/apps/database-abstractor/src/main/java/com/akto/action/DbAction.java +++ b/apps/database-abstractor/src/main/java/com/akto/action/DbAction.java @@ -213,6 +213,8 @@ public void setIssuesIds(BasicDBList issuesIds) { String endpointLogicalGroupId; DataControlSettings dataControlSettings; + BasicDBList metricsData; + public String fetchDataControlSettings() { try { String prevCommand = ""; @@ -1604,6 +1606,15 @@ public String fetchLatestEndpointsForTesting() { return Action.SUCCESS.toUpperCase(); } + public String insertRuntimeMetricsData() { + try { + DbLayer.insertRuntimeMetricsData(metricsData); + } catch (Exception e) { + return Action.ERROR.toUpperCase(); + } + return Action.SUCCESS.toUpperCase(); + } + public List getCustomDataTypes() { return customDataTypes; } @@ -2482,4 +2493,12 @@ public void setNewEps(List newEps) { this.newEps = newEps; } + public BasicDBList getMetricsData() { + return metricsData; + } + + public void setMetricsData(BasicDBList metricsData) { + this.metricsData = metricsData; + } + } diff --git a/apps/database-abstractor/src/main/resources/struts.xml b/apps/database-abstractor/src/main/resources/struts.xml index e8c3a6b49b..04f7049996 100644 --- a/apps/database-abstractor/src/main/resources/struts.xml +++ b/apps/database-abstractor/src/main/resources/struts.xml @@ -1113,6 +1113,17 @@ + + + + + + 422 + false + ^actionErrors.* + + + diff --git a/libs/dao/src/main/java/com/akto/DaoInit.java b/libs/dao/src/main/java/com/akto/DaoInit.java index fa21968747..72388801df 100644 --- a/libs/dao/src/main/java/com/akto/DaoInit.java +++ b/libs/dao/src/main/java/com/akto/DaoInit.java @@ -8,6 +8,7 @@ import com.akto.dao.testing.TestingRunResultDao; import com.akto.dao.testing.TestingRunResultSummariesDao; import com.akto.dao.testing_run_findings.TestingRunIssuesDao; +import com.akto.dao.traffic_metrics.RuntimeMetricsDao; import com.akto.dao.traffic_metrics.TrafficMetricsDao; import com.akto.dao.usage.UsageMetricsDao; import com.akto.dto.*; @@ -36,6 +37,7 @@ import com.akto.dto.third_party_access.Credential; import com.akto.dto.third_party_access.ThirdPartyAccess; import com.akto.dto.traffic.SampleData; +import com.akto.dto.traffic_metrics.RuntimeMetrics; import com.akto.dto.traffic_metrics.TrafficMetrics; import com.akto.dto.traffic_metrics.TrafficMetricsAlert; import com.akto.dto.type.SingleTypeInfo; @@ -246,6 +248,7 @@ public static CodecRegistry createCodecRegistry(){ ClassModel codeAnalysisApiInfoKeyClassModel = ClassModel.builder(CodeAnalysisApiInfo.CodeAnalysisApiInfoKey.class).enableDiscriminator(true).build(); ClassModel riskScoreTestingEndpointsClassModel = ClassModel.builder(RiskScoreTestingEndpoints.class).enableDiscriminator(true).build(); ClassModel OrganizationFlagsClassModel = ClassModel.builder(OrganizationFlags.class).enableDiscriminator(true).build(); + ClassModel RuntimeMetricsClassModel = ClassModel.builder(RuntimeMetrics.class).enableDiscriminator(true).build(); CodecRegistry pojoCodecRegistry = fromProviders(PojoCodecProvider.builder().register( configClassModel, signupInfoClassModel, apiAuthClassModel, attempResultModel, urlTemplateModel, @@ -275,7 +278,7 @@ public static CodecRegistry createCodecRegistry(){ yamlNodeDetails, multiExecTestResultClassModel, workflowTestClassModel, dependencyNodeClassModel, paramInfoClassModel, nodeClassModel, connectionClassModel, edgeClassModel, replaceDetailClassModel, modifyHostDetailClassModel, fileUploadClassModel ,fileUploadLogClassModel, codeAnalysisCollectionClassModel, codeAnalysisApiLocationClassModel, codeAnalysisApiInfoClassModel, codeAnalysisApiInfoKeyClassModel, - riskScoreTestingEndpointsClassModel, OrganizationFlagsClassModel).automatic(true).build()); + riskScoreTestingEndpointsClassModel, OrganizationFlagsClassModel, RuntimeMetricsClassModel).automatic(true).build()); final CodecRegistry customEnumCodecs = CodecRegistries.fromCodecs( new EnumCodec<>(Conditions.Operator.class), @@ -374,6 +377,7 @@ public static void createIndices() { DependencyFlowNodesDao.instance.createIndicesIfAbsent(); CodeAnalysisCollectionDao.instance.createIndicesIfAbsent(); CodeAnalysisApiInfoDao.instance.createIndicesIfAbsent(); + RuntimeMetricsDao.instance.createIndicesIfAbsent(); } } diff --git a/libs/dao/src/main/java/com/akto/dao/traffic_metrics/RuntimeMetricsDao.java b/libs/dao/src/main/java/com/akto/dao/traffic_metrics/RuntimeMetricsDao.java new file mode 100644 index 0000000000..e8bb16ae9a --- /dev/null +++ b/libs/dao/src/main/java/com/akto/dao/traffic_metrics/RuntimeMetricsDao.java @@ -0,0 +1,82 @@ +package com.akto.dao.traffic_metrics; + +import java.util.ArrayList; + +import org.bson.conversions.Bson; + +import com.akto.dao.AccountsContextDao; +import com.akto.dao.MCollection; +import com.akto.dao.context.Context; +import com.akto.dto.traffic_metrics.RuntimeMetrics; +import com.akto.dto.type.URLMethods; +import com.akto.util.DbMode; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.Filters; +import com.mongodb.client.model.WriteModel; + +public class RuntimeMetricsDao extends AccountsContextDao { + + public static final RuntimeMetricsDao instance = new RuntimeMetricsDao(); + public static final int maxDocuments = 100_000; + public static final int sizeInBytes = 100_000_000; + + @Override + public String getCollName() { + return "runtime_metrics"; + } + + @Override + public Class getClassT() { + return RuntimeMetrics.class; + } + + public void createIndicesIfAbsent() { + boolean exists = false; + String dbName = Context.accountId.get()+""; + MongoDatabase db = clients[0].getDatabase(dbName); + for (String col: db.listCollectionNames()){ + if (getCollName().equalsIgnoreCase(col)){ + exists = true; + break; + } + }; + + if (!exists) { + db.createCollection(getCollName()); + } + + if (!exists) { + if (DbMode.allowCappedCollections()) { + db.createCollection(getCollName(), new CreateCollectionOptions().capped(true).maxDocuments(maxDocuments).sizeInBytes(sizeInBytes)); + } else { + db.createCollection(getCollName()); + } + } + + MCollection.createIndexIfAbsent(getDBName(), getCollName(), + new String[] { "timestamp" }, true); + MCollection.createIndexIfAbsent(getDBName(), getCollName(), + new String[] { "timestamp", "instanceId" }, true); + } + + public static void bulkInsertMetrics(ArrayList> bulkUpdates) { + RuntimeMetricsDao.instance. getMCollection().bulkWrite(bulkUpdates); + } + + public static Bson buildFilters(int startTs, int endTs) { + return Filters.and( + Filters.gte("timestamp", startTs), + Filters.lte("timestamp", endTs) + ); + } + + public static Bson buildFilters(int startTs, int endTs, String instanceId) { + return Filters.and( + Filters.gte("timestamp", startTs), + Filters.lte("timestamp", endTs), + Filters.eq("instanceId", instanceId) + ); + } + +} diff --git a/libs/dao/src/main/java/com/akto/dto/traffic_metrics/RuntimeMetrics.java b/libs/dao/src/main/java/com/akto/dto/traffic_metrics/RuntimeMetrics.java new file mode 100644 index 0000000000..53899ac03d --- /dev/null +++ b/libs/dao/src/main/java/com/akto/dto/traffic_metrics/RuntimeMetrics.java @@ -0,0 +1,52 @@ +package com.akto.dto.traffic_metrics; + +public class RuntimeMetrics { + + private String name; + private int timestamp; + private String instanceId; + private Double val; + + public RuntimeMetrics() { + } + + public RuntimeMetrics(String name, int timestamp, String instanceId, Double val) { + this.name = name; + this.timestamp = timestamp; + this.instanceId = instanceId; + this.val = val; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public int getTimestamp() { + return timestamp; + } + + public void setTimestamp(int timestamp) { + this.timestamp = timestamp; + } + + public String getInstanceId() { + return instanceId; + } + + public void setInstanceId(String instanceId) { + this.instanceId = instanceId; + } + + public Double getVal() { + return val; + } + + public void setVal(Double val) { + this.val = val; + } + +} diff --git a/libs/utils/src/main/java/com/akto/data_actor/ClientActor.java b/libs/utils/src/main/java/com/akto/data_actor/ClientActor.java index b1451f8043..f3eaa224cd 100644 --- a/libs/utils/src/main/java/com/akto/data_actor/ClientActor.java +++ b/libs/utils/src/main/java/com/akto/data_actor/ClientActor.java @@ -2974,4 +2974,22 @@ public List fetchLatestEndpointsForTesting(int startTimestam return respList; } + public void insertRuntimeMetricsData(BasicDBList metricsData) { + Map> headers = buildHeaders(); + BasicDBObject obj = new BasicDBObject(); + obj.put("metricsData", metricsData); + OriginalHttpRequest request = new OriginalHttpRequest(url + "/insertRuntimeMetricsData", "", "POST", obj.toString(), headers, ""); + try { + OriginalHttpResponse response = ApiExecutor.sendRequest(request, true, null, false, null); + String responsePayload = response.getBody(); + if (response.getStatusCode() != 200 || responsePayload == null) { + System.out.println("non 2xx response in insertRuntimeMetricsData"); + return; + } + } catch (Exception e) { + System.out.println("error in insertRuntimeMetricsData" + e); + return; + } + } + } diff --git a/libs/utils/src/main/java/com/akto/data_actor/DataActor.java b/libs/utils/src/main/java/com/akto/data_actor/DataActor.java index 434935be21..ace030954d 100644 --- a/libs/utils/src/main/java/com/akto/data_actor/DataActor.java +++ b/libs/utils/src/main/java/com/akto/data_actor/DataActor.java @@ -23,6 +23,7 @@ import com.akto.dto.traffic.TrafficInfo; import com.akto.dto.type.SingleTypeInfo; import com.akto.dto.type.URLMethods; +import com.mongodb.BasicDBList; import com.mongodb.client.model.WriteModel; import java.util.List; @@ -223,5 +224,9 @@ public abstract class DataActor { public abstract DataControlSettings fetchDataControlSettings(String prevResult, String prevCommand); public abstract void bulkWriteDependencyNodes(List dependencyNodeList); + public abstract List fetchLatestEndpointsForTesting(int startTimestamp, int endTimestamp, int apiCollectionId); + + public abstract void insertRuntimeMetricsData(BasicDBList metricsData); + } diff --git a/libs/utils/src/main/java/com/akto/data_actor/DbActor.java b/libs/utils/src/main/java/com/akto/data_actor/DbActor.java index f9c49f1b1c..f98bea4545 100644 --- a/libs/utils/src/main/java/com/akto/data_actor/DbActor.java +++ b/libs/utils/src/main/java/com/akto/data_actor/DbActor.java @@ -26,6 +26,7 @@ import com.akto.dto.type.SingleTypeInfo; import com.akto.dto.type.URLMethods; import com.akto.dto.type.URLMethods.Method; +import com.mongodb.BasicDBList; import com.mongodb.client.model.WriteModel; import java.util.ArrayList; @@ -457,4 +458,8 @@ public List fetchLatestEndpointsForTesting(int startTimestam return DbLayer.fetchLatestEndpointsForTesting(startTimestamp, endTimestamp, apiCollectionId); } + public void insertRuntimeMetricsData(BasicDBList metricsData) { + DbLayer.insertRuntimeMetricsData(metricsData); + } + } diff --git a/libs/utils/src/main/java/com/akto/data_actor/DbLayer.java b/libs/utils/src/main/java/com/akto/data_actor/DbLayer.java index 3cb420a423..43f90ac595 100644 --- a/libs/utils/src/main/java/com/akto/data_actor/DbLayer.java +++ b/libs/utils/src/main/java/com/akto/data_actor/DbLayer.java @@ -16,6 +16,7 @@ import com.akto.dependency_analyser.DependencyAnalyserUtils; import com.akto.dto.*; import com.akto.dto.settings.DataControlSettings; +import com.mongodb.BasicDBList; import com.mongodb.client.model.*; import org.bson.conversions.Bson; import org.bson.types.ObjectId; @@ -36,6 +37,7 @@ import com.akto.dao.testing.WorkflowTestsDao; import com.akto.dao.testing.sources.TestSourceConfigsDao; import com.akto.dao.testing_run_findings.TestingRunIssuesDao; +import com.akto.dao.traffic_metrics.RuntimeMetricsDao; import com.akto.dao.traffic_metrics.TrafficMetricsDao; import com.akto.dto.ApiInfo.ApiInfoKey; import com.akto.dto.billing.Organization; @@ -58,6 +60,7 @@ import com.akto.dto.testing.sources.TestSourceConfig; import com.akto.dto.traffic.SampleData; import com.akto.dto.traffic.TrafficInfo; +import com.akto.dto.traffic_metrics.RuntimeMetrics; import com.akto.dto.traffic_metrics.TrafficMetrics; import com.akto.dto.type.SingleTypeInfo; import com.akto.dto.type.URLMethods; @@ -833,4 +836,33 @@ public static void bulkWriteDependencyNodes(List dependencyNodeL public static List fetchLatestEndpointsForTesting(int startTimestamp, int endTimestamp, int apiCollectionId) { return SingleTypeInfoDao.fetchLatestEndpointsForTesting(startTimestamp, endTimestamp, apiCollectionId); } + + public static void insertRuntimeMetricsData(BasicDBList metricsData) { + + ArrayList> bulkUpdates = new ArrayList<>(); + RuntimeMetrics runtimeMetrics; + for (Object metrics: metricsData) { + try { + Map obj = (Map) metrics; + String name = (String) obj.get("metric_id"); + String instanceId = (String) obj.get("instance_id"); + Long tsVal = (Long) obj.get("timestamp"); + int ts = tsVal.intValue(); + Double valDouble = (Double) obj.get("val"); + int val = valDouble.intValue(); + if (name == null || name.length() == 0) { + continue; + } + runtimeMetrics = new RuntimeMetrics(name, ts, instanceId, val); + bulkUpdates.add(new InsertOneModel<>(runtimeMetrics)); + } catch (Exception e) { + loggerMaker.errorAndAddToDb("error writing bulk update " + e.getMessage()); + } + } + + if (bulkUpdates.size() > 0) { + loggerMaker.infoAndAddToDb("insertRuntimeMetricsData bulk write size " + metricsData.size()); + RuntimeMetricsDao.bulkInsertMetrics(bulkUpdates); + } + } } diff --git a/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java b/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java index e7e79c88ee..8c9625e06c 100644 --- a/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java +++ b/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java @@ -1,6 +1,7 @@ package com.akto.metrics; import com.akto.dao.context.Context; +import com.akto.data_actor.DataActor; import com.akto.data_actor.DataActorFactory; import com.akto.dto.billing.Organization; import com.akto.log.LoggerMaker; @@ -19,6 +20,7 @@ public class AllMetrics { + public static final DataActor dataActor = DataActorFactory.fetchInstance(); public void init(){ int accountId = Context.accountId.get(); @@ -83,11 +85,13 @@ public void init(){ metricsData.put("org_id", m.orgId); metricsData.put("instance_id", instance_id); metricsData.put("account_id", m.accountId); + metricsData.put("timestamp", Context.now()); list.add(metricsData); } if(!list.isEmpty()) { sendDataToAkto(list); + dataActor.insertRuntimeMetricsData(list); } } catch (Exception e){ loggerMaker.errorAndAddToDb("Error while sending metrics to akto: " + e.getMessage(), LoggerMaker.LogDb.RUNTIME); @@ -431,4 +435,30 @@ public static void sendDataToAkto(BasicDBList list){ loggerMaker.infoAndAddToDb("Traffic_metrics not sent", LoggerMaker.LogDb.RUNTIME); } } + + public static void sendData(BasicDBList list){ + + MediaType mediaType = MediaType.parse("application/json"); + RequestBody body = RequestBody.create(new BasicDBObject("data", list).toJson(), mediaType); + Request request = new Request.Builder() + .url(URL) + .method("POST", body) + .addHeader("Content-Type", "application/json") + .build(); + Response response = null; + try { + response = client.newCall(request).execute(); + } catch (IOException e) { + loggerMaker.errorAndAddToDb("Error while executing request " + request.url() + ": " + e.getMessage(), LoggerMaker.LogDb.RUNTIME); + } finally { + if (response != null) { + response.close(); + } + } + if (response!= null && response.isSuccessful()) { + loggerMaker.infoAndAddToDb("Updated traffic_metrics", LoggerMaker.LogDb.RUNTIME); + } else { + loggerMaker.infoAndAddToDb("Traffic_metrics not sent", LoggerMaker.LogDb.RUNTIME); + } + } } From 434ea2081b7a33cc3b52650c5f5a2be99b90daf8 Mon Sep 17 00:00:00 2001 From: ayushaga14 Date: Wed, 28 Aug 2024 12:40:45 +0530 Subject: [PATCH 02/11] fix --- libs/utils/src/main/java/com/akto/data_actor/DbLayer.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/libs/utils/src/main/java/com/akto/data_actor/DbLayer.java b/libs/utils/src/main/java/com/akto/data_actor/DbLayer.java index 43f90ac595..8a3c09a930 100644 --- a/libs/utils/src/main/java/com/akto/data_actor/DbLayer.java +++ b/libs/utils/src/main/java/com/akto/data_actor/DbLayer.java @@ -848,8 +848,7 @@ public static void insertRuntimeMetricsData(BasicDBList metricsData) { String instanceId = (String) obj.get("instance_id"); Long tsVal = (Long) obj.get("timestamp"); int ts = tsVal.intValue(); - Double valDouble = (Double) obj.get("val"); - int val = valDouble.intValue(); + Double val = (Double) obj.get("val"); if (name == null || name.length() == 0) { continue; } From 6807ca8ef89ecbd33175f751892cb2dcd6f3bff7 Mon Sep 17 00:00:00 2001 From: ayushaga14 Date: Thu, 29 Aug 2024 16:00:39 +0530 Subject: [PATCH 03/11] write instanceid to file and add creds in kafka --- .../java/com/akto/hybrid_runtime/Main.java | 39 +++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java index 39605bdba7..e87a92534f 100644 --- a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java +++ b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java @@ -1,5 +1,10 @@ package com.akto.hybrid_runtime; +import java.io.BufferedReader; +import java.io.File; +import java.io.FileReader; +import java.io.FileWriter; +import java.io.IOException; import java.time.Duration; import java.util.*; import java.util.concurrent.Executors; @@ -23,6 +28,8 @@ import com.akto.data_actor.DataActorFactory; import com.akto.util.DashboardMode; import com.google.gson.Gson; +import com.mongodb.BasicDBObject; + import org.apache.kafka.clients.consumer.*; import org.apache.kafka.common.Metric; import org.apache.kafka.common.MetricName; @@ -109,6 +116,37 @@ private static void buildKafka() { } } + private static void insertCredsRecordInKafka(String brokerUrl) { + File f = new File("creds.txt"); + String instanceId = UUID.randomUUID().toString(); + if (f.exists()) { + try (FileReader reader = new FileReader(f); + BufferedReader bufferedReader = new BufferedReader(reader)) { + String line; + while ((line = bufferedReader.readLine()) != null) { + instanceId = line; + } + } catch (IOException e) { + loggerMaker.errorAndAddToDb("Error reading instanceId from file: " + e.getMessage()); + } + } else { + try (FileWriter writer = new FileWriter(f)) { + writer.write(instanceId); + } catch (IOException e) { + loggerMaker.errorAndAddToDb("Error writing instanceId to file: " + e.getMessage()); + } + } + + int batchSize = Integer.parseInt(System.getenv("AKTO_KAFKA_PRODUCER_BATCH_SIZE")); + int kafkaLingerMS = Integer.parseInt(System.getenv("AKTO_KAFKA_PRODUCER_LINGER_MS")); + kafkaProducer = new Kafka(brokerUrl, kafkaLingerMS, batchSize); + BasicDBObject creds = new BasicDBObject(); + creds.put("id", instanceId); + creds.put("token", System.getenv("DATABASE_ABSTRACTOR_SERVICE_TOKEN")); + creds.put("url", System.getenv("DATABASE_ABSTRACTOR_SERVICE_URL")); + kafkaProducer.send(creds.toJson(), "credentials"); + } + public static final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(2); public static class AccountInfo { @@ -171,6 +209,7 @@ public static void main(String[] args) { fetchAllSTI = false; } int maxPollRecordsConfig = Integer.parseInt(System.getenv("AKTO_KAFKA_MAX_POLL_RECORDS_CONFIG")); + insertCredsRecordInKafka(kafkaBrokerUrl); AccountSettings aSettings = dataActor.fetchAccountSettings(); if (aSettings == null) { From 15ca6ff947b1b7cf9a5b3a302fae0dddaca41a94 Mon Sep 17 00:00:00 2001 From: ayushaga14 Date: Sat, 31 Aug 2024 17:01:45 +0530 Subject: [PATCH 04/11] modify prod yml --- .github/workflows/prod.yml | 19 ++++--------------- 1 file changed, 4 insertions(+), 15 deletions(-) diff --git a/.github/workflows/prod.yml b/.github/workflows/prod.yml index 064df6898d..1cf8a890b7 100644 --- a/.github/workflows/prod.yml +++ b/.github/workflows/prod.yml @@ -58,19 +58,13 @@ jobs: ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} ECR_REPOSITORY: akto-api-security REGISTRY_ALIAS: p7q3h0z2 - IMAGE_TAG: latest - IMAGE_TAG1: testruntime - IMAGE_TAG2: local - IMAGE_TAG3: 1.41.18_local + IMAGE_TAG: traffictest run: | docker buildx create --use # Build a docker container and push it to DockerHub cd apps/mini-runtime - docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-runtime:$IMAGE_TAG -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-runtime:$IMAGE_TAG1 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-runtime:$IMAGE_TAG2 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-runtime:$IMAGE_TAG3 . --push + docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-runtime:$IMAGE_TAG . --push echo "::set-output name=image::$ECR_REGISTRY/akto-api-security-mini-runtime:$IMAGE_TAG" - cd ../mini-testing - docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/$REGISTRY_ALIAS/akto-api-security-mini-testing:$IMAGE_TAG3 . --push - echo "::set-output name=image::$ECR_REGISTRY/akto-api-security-mini-testing:$IMAGE_TAG3" - name: DockerHub login env: @@ -83,15 +77,10 @@ jobs: env: ECR_REGISTRY: aktosecurity ECR_REPOSITORY: akto-api-security - IMAGE_TAG: latest - IMAGE_TAG1: testruntime - IMAGE_TAG2: local - IMAGE_TAG3: 1.41.18_local + IMAGE_TAG: traffictest run: | echo $IMAGE_TAG >> $GITHUB_STEP_SUMMARY docker buildx create --use # Build a docker container and push it to DockerHub cd apps/mini-runtime - docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/mini-runtime:$IMAGE_TAG -t $ECR_REGISTRY/mini-runtime:$IMAGE_TAG1 -t $ECR_REGISTRY/mini-runtime:$IMAGE_TAG2 -t $ECR_REGISTRY/mini-runtime:$IMAGE_TAG3 . --push - cd ../mini-testing - docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/mini-testing:$IMAGE_TAG3 . --push \ No newline at end of file + docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/mini-runtime:$IMAGE_TAG . --push \ No newline at end of file From 8dea0d6d6db98548fd15d1fc57c24c477b5222b5 Mon Sep 17 00:00:00 2001 From: ayushaga14 Date: Tue, 10 Sep 2024 11:39:33 +0530 Subject: [PATCH 05/11] use instanceid from file --- .../java/com/akto/hybrid_runtime/Main.java | 19 ++++++++++++------- .../java/com/akto/metrics/AllMetrics.java | 18 ++++++++++++++---- 2 files changed, 26 insertions(+), 11 deletions(-) diff --git a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java index e87a92534f..21c8e5f3ff 100644 --- a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java +++ b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java @@ -116,7 +116,7 @@ private static void buildKafka() { } } - private static void insertCredsRecordInKafka(String brokerUrl) { + private static String insertCredsRecordInKafka(String brokerUrl) { File f = new File("creds.txt"); String instanceId = UUID.randomUUID().toString(); if (f.exists()) { @@ -144,7 +144,12 @@ private static void insertCredsRecordInKafka(String brokerUrl) { creds.put("id", instanceId); creds.put("token", System.getenv("DATABASE_ABSTRACTOR_SERVICE_TOKEN")); creds.put("url", System.getenv("DATABASE_ABSTRACTOR_SERVICE_URL")); - kafkaProducer.send(creds.toJson(), "credentials"); + try { + kafkaProducer.send(creds.toJson(), "credentials"); + } catch (Exception e) { + loggerMaker.errorAndAddToDb("Error inserting creds record in kafka: " + e.getMessage()); + } + return instanceId; } public static final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(2); @@ -191,7 +196,7 @@ public static void main(String[] args) { //String mongoURI = System.getenv("AKTO_MONGO_CONN");; String configName = System.getenv("AKTO_CONFIG_NAME"); String topicName = getTopicName(); - String kafkaBrokerUrl = "kafka1:19092"; //System.getenv("AKTO_KAFKA_BROKER_URL"); + String kafkaBrokerUrl = "localhost:29092"; //System.getenv("AKTO_KAFKA_BROKER_URL"); String isKubernetes = System.getenv("IS_KUBERNETES"); if (isKubernetes != null && isKubernetes.equalsIgnoreCase("true")) { loggerMaker.infoAndAddToDb("is_kubernetes: true", LogDb.RUNTIME); @@ -209,7 +214,7 @@ public static void main(String[] args) { fetchAllSTI = false; } int maxPollRecordsConfig = Integer.parseInt(System.getenv("AKTO_KAFKA_MAX_POLL_RECORDS_CONFIG")); - insertCredsRecordInKafka(kafkaBrokerUrl); + String instanceId = insertCredsRecordInKafka(kafkaBrokerUrl); AccountSettings aSettings = dataActor.fetchAccountSettings(); if (aSettings == null) { @@ -227,7 +232,7 @@ public static void main(String[] args) { dataActor.modifyHybridSaasSetting(RuntimeMode.isHybridDeployment()); - initializeRuntime(); + initializeRuntime(instanceId); String centralKafkaTopicName = AccountSettings.DEFAULT_CENTRAL_KAFKA_TOPIC_NAME; @@ -560,12 +565,12 @@ public static void changeTargetCollection(Map> apiC } } - public static void initializeRuntime(){ + public static void initializeRuntime(String instanceId){ Account account = dataActor.fetchActiveAccount(); Context.accountId.set(account.getId()); - AllMetrics.instance.init(); + AllMetrics.instance.init(instanceId); loggerMaker.infoAndAddToDb("All metrics initialized", LogDb.RUNTIME); Setup setup = dataActor.fetchSetup(); diff --git a/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java b/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java index 8c9625e06c..0a7b48d54e 100644 --- a/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java +++ b/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java @@ -13,7 +13,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; -import java.util.UUID; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; @@ -21,8 +20,11 @@ public class AllMetrics { public static final DataActor dataActor = DataActorFactory.fetchInstance(); - public void init(){ + private String instanceId; + + public void init(String instanceId){ int accountId = Context.accountId.get(); + this.setInstanceId(instanceId); Organization organization = DataActorFactory.fetchInstance().fetchOrganization(accountId); String orgId = organization.getId(); @@ -83,7 +85,7 @@ public void init(){ metricsData.put("metric_id", m.metricId); metricsData.put("val", metric); metricsData.put("org_id", m.orgId); - metricsData.put("instance_id", instance_id); + metricsData.put("instance_id", this.getInstanceId()); metricsData.put("account_id", m.accountId); metricsData.put("timestamp", Context.now()); list.add(metricsData); @@ -114,7 +116,6 @@ private AllMetrics(){} private final static LoggerMaker loggerMaker = new LoggerMaker(AllMetrics.class); - private static final String instance_id = UUID.randomUUID().toString(); private Metric runtimeKafkaRecordCount; private Metric runtimeKafkaRecordSize; private Metric runtimeProcessLatency = null; @@ -461,4 +462,13 @@ public static void sendData(BasicDBList list){ loggerMaker.infoAndAddToDb("Traffic_metrics not sent", LoggerMaker.LogDb.RUNTIME); } } + + public String getInstanceId() { + return instanceId; + } + + public void setInstanceId(String instanceId) { + this.instanceId = instanceId; + } + } From afeeb33077647181282ce3344f34f7d6c565f288 Mon Sep 17 00:00:00 2001 From: ayushaga14 Date: Tue, 10 Sep 2024 11:48:05 +0530 Subject: [PATCH 06/11] revert kafka broker url val --- .../src/main/java/com/akto/hybrid_runtime/Main.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java index 21c8e5f3ff..c71daa5d3d 100644 --- a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java +++ b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java @@ -196,7 +196,7 @@ public static void main(String[] args) { //String mongoURI = System.getenv("AKTO_MONGO_CONN");; String configName = System.getenv("AKTO_CONFIG_NAME"); String topicName = getTopicName(); - String kafkaBrokerUrl = "localhost:29092"; //System.getenv("AKTO_KAFKA_BROKER_URL"); + String kafkaBrokerUrl = "kafka1:19092"; //System.getenv("AKTO_KAFKA_BROKER_URL"); String isKubernetes = System.getenv("IS_KUBERNETES"); if (isKubernetes != null && isKubernetes.equalsIgnoreCase("true")) { loggerMaker.infoAndAddToDb("is_kubernetes: true", LogDb.RUNTIME); From fd49845bb3bf326c50976c884e8932f24f3cbeb8 Mon Sep 17 00:00:00 2001 From: TangoBeeAkto Date: Tue, 10 Sep 2024 15:21:53 +0530 Subject: [PATCH 07/11] feat: inserting runtime version in runtime metrics dao --- .../traffic_metrics/TrafficMetricsAction.java | 2 +- .../java/com/akto/hybrid_runtime/Main.java | 18 ++++++++++-------- .../akto/hybrid_runtime/RuntimeVersion.java | 3 ++- .../dto/traffic_metrics/RuntimeMetrics.java | 12 +++++++++++- .../main/java/com/akto/data_actor/DbLayer.java | 3 ++- .../main/java/com/akto/metrics/AllMetrics.java | 12 +++++++++++- 6 files changed, 37 insertions(+), 13 deletions(-) diff --git a/apps/dashboard/src/main/java/com/akto/action/traffic_metrics/TrafficMetricsAction.java b/apps/dashboard/src/main/java/com/akto/action/traffic_metrics/TrafficMetricsAction.java index 95ef4bdfb4..75d41cda15 100644 --- a/apps/dashboard/src/main/java/com/akto/action/traffic_metrics/TrafficMetricsAction.java +++ b/apps/dashboard/src/main/java/com/akto/action/traffic_metrics/TrafficMetricsAction.java @@ -149,7 +149,7 @@ public String fetchRuntimeMetrics() { while (cursor.hasNext()) { BasicDBObject basicDBObject = cursor.next(); BasicDBObject latestDoc = (BasicDBObject) basicDBObject.get("latestDoc"); - runtimeMetrics.add(new RuntimeMetrics(latestDoc.getString("name"), 0, instanceId, latestDoc.getDouble("val"))); + runtimeMetrics.add(new RuntimeMetrics(latestDoc.getString("name"), 0, instanceId, latestDoc.getString("version"), latestDoc.getDouble("val"))); } } diff --git a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java index c71daa5d3d..aa183f90b8 100644 --- a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java +++ b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java @@ -570,7 +570,16 @@ public static void initializeRuntime(String instanceId){ Account account = dataActor.fetchActiveAccount(); Context.accountId.set(account.getId()); - AllMetrics.instance.init(instanceId); + + String version = ""; + RuntimeVersion runtimeVersion = new RuntimeVersion(); + try { + version = runtimeVersion.updateVersion(AccountSettings.API_RUNTIME_VERSION, dataActor); + } catch (Exception e) { + loggerMaker.errorAndAddToDb("error while updating dashboard version: " + e.getMessage(), LogDb.RUNTIME); + } + + AllMetrics.instance.init(instanceId, version); loggerMaker.infoAndAddToDb("All metrics initialized", LogDb.RUNTIME); Setup setup = dataActor.fetchSetup(); @@ -581,13 +590,6 @@ public static void initializeRuntime(String instanceId){ } isOnprem = dashboardMode.equalsIgnoreCase(DashboardMode.ON_PREM.name()); - - RuntimeVersion runtimeVersion = new RuntimeVersion(); - try { - runtimeVersion.updateVersion(AccountSettings.API_RUNTIME_VERSION, dataActor); - } catch (Exception e) { - loggerMaker.errorAndAddToDb("error while updating dashboard version: " + e.getMessage(), LogDb.RUNTIME); - } initFromRuntime(account.getId()); diff --git a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/RuntimeVersion.java b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/RuntimeVersion.java index 2c37e49551..53a4037374 100644 --- a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/RuntimeVersion.java +++ b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/RuntimeVersion.java @@ -9,7 +9,7 @@ public class RuntimeVersion { - public void updateVersion(String fieldName, DataActor dataActor) throws Exception { + public String updateVersion(String fieldName, DataActor dataActor) throws Exception { try (InputStream in = getClass().getResourceAsStream("/version.txt")) { if (in != null) { BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in)); @@ -18,6 +18,7 @@ public void updateVersion(String fieldName, DataActor dataActor) throws Exceptio String version = imageTag + " - " + buildTime; dataActor.updateRuntimeVersion(fieldName, version); + return version; } else { throw new Exception("Input stream null"); } diff --git a/libs/dao/src/main/java/com/akto/dto/traffic_metrics/RuntimeMetrics.java b/libs/dao/src/main/java/com/akto/dto/traffic_metrics/RuntimeMetrics.java index 53899ac03d..295c89f020 100644 --- a/libs/dao/src/main/java/com/akto/dto/traffic_metrics/RuntimeMetrics.java +++ b/libs/dao/src/main/java/com/akto/dto/traffic_metrics/RuntimeMetrics.java @@ -5,15 +5,17 @@ public class RuntimeMetrics { private String name; private int timestamp; private String instanceId; + private String version; private Double val; public RuntimeMetrics() { } - public RuntimeMetrics(String name, int timestamp, String instanceId, Double val) { + public RuntimeMetrics(String name, int timestamp, String instanceId, String version, Double val) { this.name = name; this.timestamp = timestamp; this.instanceId = instanceId; + this.version = version; this.val = val; } @@ -41,6 +43,14 @@ public void setInstanceId(String instanceId) { this.instanceId = instanceId; } + public String getVersion() { + return version; + } + + public void setVersion(String version) { + this.version = version; + } + public Double getVal() { return val; } diff --git a/libs/utils/src/main/java/com/akto/data_actor/DbLayer.java b/libs/utils/src/main/java/com/akto/data_actor/DbLayer.java index 8a3c09a930..c10821ebb7 100644 --- a/libs/utils/src/main/java/com/akto/data_actor/DbLayer.java +++ b/libs/utils/src/main/java/com/akto/data_actor/DbLayer.java @@ -846,13 +846,14 @@ public static void insertRuntimeMetricsData(BasicDBList metricsData) { Map obj = (Map) metrics; String name = (String) obj.get("metric_id"); String instanceId = (String) obj.get("instance_id"); + String version = (String) obj.get("version"); Long tsVal = (Long) obj.get("timestamp"); int ts = tsVal.intValue(); Double val = (Double) obj.get("val"); if (name == null || name.length() == 0) { continue; } - runtimeMetrics = new RuntimeMetrics(name, ts, instanceId, val); + runtimeMetrics = new RuntimeMetrics(name, ts, instanceId, version, val); bulkUpdates.add(new InsertOneModel<>(runtimeMetrics)); } catch (Exception e) { loggerMaker.errorAndAddToDb("error writing bulk update " + e.getMessage()); diff --git a/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java b/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java index 0a7b48d54e..6b7aaa8d08 100644 --- a/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java +++ b/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java @@ -21,10 +21,12 @@ public class AllMetrics { public static final DataActor dataActor = DataActorFactory.fetchInstance(); private String instanceId; + private String version; - public void init(String instanceId){ + public void init(String instanceId, String version){ int accountId = Context.accountId.get(); this.setInstanceId(instanceId); + this.setVersion(version); Organization organization = DataActorFactory.fetchInstance().fetchOrganization(accountId); String orgId = organization.getId(); @@ -86,6 +88,7 @@ public void init(String instanceId){ metricsData.put("val", metric); metricsData.put("org_id", m.orgId); metricsData.put("instance_id", this.getInstanceId()); + metricsData.put("version", this.getVersion()); metricsData.put("account_id", m.accountId); metricsData.put("timestamp", Context.now()); list.add(metricsData); @@ -471,4 +474,11 @@ public void setInstanceId(String instanceId) { this.instanceId = instanceId; } + public String getVersion() { + return version; + } + + public void setVersion(String version) { + this.version = version; + } } From d151d2dd295f148bc3fc41062927f742f801b526 Mon Sep 17 00:00:00 2001 From: TangoBeeAkto Date: Tue, 10 Sep 2024 17:10:34 +0530 Subject: [PATCH 08/11] Revert "feat: inserting runtime version in runtime metrics dao" This reverts commit fd49845bb3bf326c50976c884e8932f24f3cbeb8. --- .../traffic_metrics/TrafficMetricsAction.java | 2 +- .../java/com/akto/hybrid_runtime/Main.java | 18 ++++++++---------- .../akto/hybrid_runtime/RuntimeVersion.java | 3 +-- .../dto/traffic_metrics/RuntimeMetrics.java | 12 +----------- .../main/java/com/akto/data_actor/DbLayer.java | 3 +-- .../main/java/com/akto/metrics/AllMetrics.java | 12 +----------- 6 files changed, 13 insertions(+), 37 deletions(-) diff --git a/apps/dashboard/src/main/java/com/akto/action/traffic_metrics/TrafficMetricsAction.java b/apps/dashboard/src/main/java/com/akto/action/traffic_metrics/TrafficMetricsAction.java index 75d41cda15..95ef4bdfb4 100644 --- a/apps/dashboard/src/main/java/com/akto/action/traffic_metrics/TrafficMetricsAction.java +++ b/apps/dashboard/src/main/java/com/akto/action/traffic_metrics/TrafficMetricsAction.java @@ -149,7 +149,7 @@ public String fetchRuntimeMetrics() { while (cursor.hasNext()) { BasicDBObject basicDBObject = cursor.next(); BasicDBObject latestDoc = (BasicDBObject) basicDBObject.get("latestDoc"); - runtimeMetrics.add(new RuntimeMetrics(latestDoc.getString("name"), 0, instanceId, latestDoc.getString("version"), latestDoc.getDouble("val"))); + runtimeMetrics.add(new RuntimeMetrics(latestDoc.getString("name"), 0, instanceId, latestDoc.getDouble("val"))); } } diff --git a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java index aa183f90b8..c71daa5d3d 100644 --- a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java +++ b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java @@ -570,16 +570,7 @@ public static void initializeRuntime(String instanceId){ Account account = dataActor.fetchActiveAccount(); Context.accountId.set(account.getId()); - - String version = ""; - RuntimeVersion runtimeVersion = new RuntimeVersion(); - try { - version = runtimeVersion.updateVersion(AccountSettings.API_RUNTIME_VERSION, dataActor); - } catch (Exception e) { - loggerMaker.errorAndAddToDb("error while updating dashboard version: " + e.getMessage(), LogDb.RUNTIME); - } - - AllMetrics.instance.init(instanceId, version); + AllMetrics.instance.init(instanceId); loggerMaker.infoAndAddToDb("All metrics initialized", LogDb.RUNTIME); Setup setup = dataActor.fetchSetup(); @@ -590,6 +581,13 @@ public static void initializeRuntime(String instanceId){ } isOnprem = dashboardMode.equalsIgnoreCase(DashboardMode.ON_PREM.name()); + + RuntimeVersion runtimeVersion = new RuntimeVersion(); + try { + runtimeVersion.updateVersion(AccountSettings.API_RUNTIME_VERSION, dataActor); + } catch (Exception e) { + loggerMaker.errorAndAddToDb("error while updating dashboard version: " + e.getMessage(), LogDb.RUNTIME); + } initFromRuntime(account.getId()); diff --git a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/RuntimeVersion.java b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/RuntimeVersion.java index 53a4037374..2c37e49551 100644 --- a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/RuntimeVersion.java +++ b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/RuntimeVersion.java @@ -9,7 +9,7 @@ public class RuntimeVersion { - public String updateVersion(String fieldName, DataActor dataActor) throws Exception { + public void updateVersion(String fieldName, DataActor dataActor) throws Exception { try (InputStream in = getClass().getResourceAsStream("/version.txt")) { if (in != null) { BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in)); @@ -18,7 +18,6 @@ public String updateVersion(String fieldName, DataActor dataActor) throws Except String version = imageTag + " - " + buildTime; dataActor.updateRuntimeVersion(fieldName, version); - return version; } else { throw new Exception("Input stream null"); } diff --git a/libs/dao/src/main/java/com/akto/dto/traffic_metrics/RuntimeMetrics.java b/libs/dao/src/main/java/com/akto/dto/traffic_metrics/RuntimeMetrics.java index 295c89f020..53899ac03d 100644 --- a/libs/dao/src/main/java/com/akto/dto/traffic_metrics/RuntimeMetrics.java +++ b/libs/dao/src/main/java/com/akto/dto/traffic_metrics/RuntimeMetrics.java @@ -5,17 +5,15 @@ public class RuntimeMetrics { private String name; private int timestamp; private String instanceId; - private String version; private Double val; public RuntimeMetrics() { } - public RuntimeMetrics(String name, int timestamp, String instanceId, String version, Double val) { + public RuntimeMetrics(String name, int timestamp, String instanceId, Double val) { this.name = name; this.timestamp = timestamp; this.instanceId = instanceId; - this.version = version; this.val = val; } @@ -43,14 +41,6 @@ public void setInstanceId(String instanceId) { this.instanceId = instanceId; } - public String getVersion() { - return version; - } - - public void setVersion(String version) { - this.version = version; - } - public Double getVal() { return val; } diff --git a/libs/utils/src/main/java/com/akto/data_actor/DbLayer.java b/libs/utils/src/main/java/com/akto/data_actor/DbLayer.java index c10821ebb7..8a3c09a930 100644 --- a/libs/utils/src/main/java/com/akto/data_actor/DbLayer.java +++ b/libs/utils/src/main/java/com/akto/data_actor/DbLayer.java @@ -846,14 +846,13 @@ public static void insertRuntimeMetricsData(BasicDBList metricsData) { Map obj = (Map) metrics; String name = (String) obj.get("metric_id"); String instanceId = (String) obj.get("instance_id"); - String version = (String) obj.get("version"); Long tsVal = (Long) obj.get("timestamp"); int ts = tsVal.intValue(); Double val = (Double) obj.get("val"); if (name == null || name.length() == 0) { continue; } - runtimeMetrics = new RuntimeMetrics(name, ts, instanceId, version, val); + runtimeMetrics = new RuntimeMetrics(name, ts, instanceId, val); bulkUpdates.add(new InsertOneModel<>(runtimeMetrics)); } catch (Exception e) { loggerMaker.errorAndAddToDb("error writing bulk update " + e.getMessage()); diff --git a/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java b/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java index 6b7aaa8d08..0a7b48d54e 100644 --- a/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java +++ b/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java @@ -21,12 +21,10 @@ public class AllMetrics { public static final DataActor dataActor = DataActorFactory.fetchInstance(); private String instanceId; - private String version; - public void init(String instanceId, String version){ + public void init(String instanceId){ int accountId = Context.accountId.get(); this.setInstanceId(instanceId); - this.setVersion(version); Organization organization = DataActorFactory.fetchInstance().fetchOrganization(accountId); String orgId = organization.getId(); @@ -88,7 +86,6 @@ public void init(String instanceId, String version){ metricsData.put("val", metric); metricsData.put("org_id", m.orgId); metricsData.put("instance_id", this.getInstanceId()); - metricsData.put("version", this.getVersion()); metricsData.put("account_id", m.accountId); metricsData.put("timestamp", Context.now()); list.add(metricsData); @@ -474,11 +471,4 @@ public void setInstanceId(String instanceId) { this.instanceId = instanceId; } - public String getVersion() { - return version; - } - - public void setVersion(String version) { - this.version = version; - } } From f1bc676b32b669d6997c7622da5619ac131a337c Mon Sep 17 00:00:00 2001 From: TangoBeeAkto Date: Tue, 10 Sep 2024 17:20:00 +0530 Subject: [PATCH 09/11] feat: inserting runtime version in runtime metrics --- .../traffic_metrics/TrafficMetricsAction.java | 2 +- .../main/java/com/akto/hybrid_runtime/Main.java | 17 +++++++++-------- .../com/akto/hybrid_runtime/RuntimeVersion.java | 3 ++- .../dto/traffic_metrics/RuntimeMetrics.java | 12 +++++++++++- .../main/java/com/akto/data_actor/DbLayer.java | 3 ++- .../main/java/com/akto/metrics/AllMetrics.java | 12 +++++++++++- 6 files changed, 36 insertions(+), 13 deletions(-) diff --git a/apps/dashboard/src/main/java/com/akto/action/traffic_metrics/TrafficMetricsAction.java b/apps/dashboard/src/main/java/com/akto/action/traffic_metrics/TrafficMetricsAction.java index 95ef4bdfb4..75d41cda15 100644 --- a/apps/dashboard/src/main/java/com/akto/action/traffic_metrics/TrafficMetricsAction.java +++ b/apps/dashboard/src/main/java/com/akto/action/traffic_metrics/TrafficMetricsAction.java @@ -149,7 +149,7 @@ public String fetchRuntimeMetrics() { while (cursor.hasNext()) { BasicDBObject basicDBObject = cursor.next(); BasicDBObject latestDoc = (BasicDBObject) basicDBObject.get("latestDoc"); - runtimeMetrics.add(new RuntimeMetrics(latestDoc.getString("name"), 0, instanceId, latestDoc.getDouble("val"))); + runtimeMetrics.add(new RuntimeMetrics(latestDoc.getString("name"), 0, instanceId, latestDoc.getString("version"), latestDoc.getDouble("val"))); } } diff --git a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java index c71daa5d3d..27197c07b4 100644 --- a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java +++ b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/Main.java @@ -570,7 +570,15 @@ public static void initializeRuntime(String instanceId){ Account account = dataActor.fetchActiveAccount(); Context.accountId.set(account.getId()); - AllMetrics.instance.init(instanceId); + String version = ""; + RuntimeVersion runtimeVersion = new RuntimeVersion(); + try { + version = runtimeVersion.updateVersion(AccountSettings.API_RUNTIME_VERSION, dataActor); + } catch (Exception e) { + loggerMaker.errorAndAddToDb("error while updating dashboard version: " + e.getMessage(), LogDb.RUNTIME); + } + + AllMetrics.instance.init(instanceId, version); loggerMaker.infoAndAddToDb("All metrics initialized", LogDb.RUNTIME); Setup setup = dataActor.fetchSetup(); @@ -581,13 +589,6 @@ public static void initializeRuntime(String instanceId){ } isOnprem = dashboardMode.equalsIgnoreCase(DashboardMode.ON_PREM.name()); - - RuntimeVersion runtimeVersion = new RuntimeVersion(); - try { - runtimeVersion.updateVersion(AccountSettings.API_RUNTIME_VERSION, dataActor); - } catch (Exception e) { - loggerMaker.errorAndAddToDb("error while updating dashboard version: " + e.getMessage(), LogDb.RUNTIME); - } initFromRuntime(account.getId()); diff --git a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/RuntimeVersion.java b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/RuntimeVersion.java index 2c37e49551..53a4037374 100644 --- a/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/RuntimeVersion.java +++ b/apps/mini-runtime/src/main/java/com/akto/hybrid_runtime/RuntimeVersion.java @@ -9,7 +9,7 @@ public class RuntimeVersion { - public void updateVersion(String fieldName, DataActor dataActor) throws Exception { + public String updateVersion(String fieldName, DataActor dataActor) throws Exception { try (InputStream in = getClass().getResourceAsStream("/version.txt")) { if (in != null) { BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in)); @@ -18,6 +18,7 @@ public void updateVersion(String fieldName, DataActor dataActor) throws Exceptio String version = imageTag + " - " + buildTime; dataActor.updateRuntimeVersion(fieldName, version); + return version; } else { throw new Exception("Input stream null"); } diff --git a/libs/dao/src/main/java/com/akto/dto/traffic_metrics/RuntimeMetrics.java b/libs/dao/src/main/java/com/akto/dto/traffic_metrics/RuntimeMetrics.java index 53899ac03d..295c89f020 100644 --- a/libs/dao/src/main/java/com/akto/dto/traffic_metrics/RuntimeMetrics.java +++ b/libs/dao/src/main/java/com/akto/dto/traffic_metrics/RuntimeMetrics.java @@ -5,15 +5,17 @@ public class RuntimeMetrics { private String name; private int timestamp; private String instanceId; + private String version; private Double val; public RuntimeMetrics() { } - public RuntimeMetrics(String name, int timestamp, String instanceId, Double val) { + public RuntimeMetrics(String name, int timestamp, String instanceId, String version, Double val) { this.name = name; this.timestamp = timestamp; this.instanceId = instanceId; + this.version = version; this.val = val; } @@ -41,6 +43,14 @@ public void setInstanceId(String instanceId) { this.instanceId = instanceId; } + public String getVersion() { + return version; + } + + public void setVersion(String version) { + this.version = version; + } + public Double getVal() { return val; } diff --git a/libs/utils/src/main/java/com/akto/data_actor/DbLayer.java b/libs/utils/src/main/java/com/akto/data_actor/DbLayer.java index 8a3c09a930..c10821ebb7 100644 --- a/libs/utils/src/main/java/com/akto/data_actor/DbLayer.java +++ b/libs/utils/src/main/java/com/akto/data_actor/DbLayer.java @@ -846,13 +846,14 @@ public static void insertRuntimeMetricsData(BasicDBList metricsData) { Map obj = (Map) metrics; String name = (String) obj.get("metric_id"); String instanceId = (String) obj.get("instance_id"); + String version = (String) obj.get("version"); Long tsVal = (Long) obj.get("timestamp"); int ts = tsVal.intValue(); Double val = (Double) obj.get("val"); if (name == null || name.length() == 0) { continue; } - runtimeMetrics = new RuntimeMetrics(name, ts, instanceId, val); + runtimeMetrics = new RuntimeMetrics(name, ts, instanceId, version, val); bulkUpdates.add(new InsertOneModel<>(runtimeMetrics)); } catch (Exception e) { loggerMaker.errorAndAddToDb("error writing bulk update " + e.getMessage()); diff --git a/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java b/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java index 0a7b48d54e..b0a789e4cc 100644 --- a/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java +++ b/libs/utils/src/main/java/com/akto/metrics/AllMetrics.java @@ -21,10 +21,12 @@ public class AllMetrics { public static final DataActor dataActor = DataActorFactory.fetchInstance(); private String instanceId; + private String version; - public void init(String instanceId){ + public void init(String instanceId, String version) { int accountId = Context.accountId.get(); this.setInstanceId(instanceId); + this.setVersion(version); Organization organization = DataActorFactory.fetchInstance().fetchOrganization(accountId); String orgId = organization.getId(); @@ -86,6 +88,7 @@ public void init(String instanceId){ metricsData.put("val", metric); metricsData.put("org_id", m.orgId); metricsData.put("instance_id", this.getInstanceId()); + metricsData.put("version", this.getVersion()); metricsData.put("account_id", m.accountId); metricsData.put("timestamp", Context.now()); list.add(metricsData); @@ -471,4 +474,11 @@ public void setInstanceId(String instanceId) { this.instanceId = instanceId; } + public String getVersion() { + return version; + } + + public void setVersion(String version) { + this.version = version; + } } From e19ddebd9940c95e4d3a684139163337e94b6226 Mon Sep 17 00:00:00 2001 From: TangoBeeAkto Date: Sun, 15 Sep 2024 12:56:54 +0530 Subject: [PATCH 10/11] feat: created runtime metrics ui and showing data --- .../traffic_metrics/TrafficMetricsAction.java | 11 +- apps/dashboard/src/main/resources/struts.xml | 11 + .../apps/dashboard/components/GraphMetric.jsx | 24 +- .../src/apps/dashboard/pages/settings/api.js | 20 ++ .../pages/settings/metrics/Metrics.jsx | 252 +++++++++++++++++- .../apps/dashboard/pages/settings/module.js | 14 + 6 files changed, 309 insertions(+), 23 deletions(-) diff --git a/apps/dashboard/src/main/java/com/akto/action/traffic_metrics/TrafficMetricsAction.java b/apps/dashboard/src/main/java/com/akto/action/traffic_metrics/TrafficMetricsAction.java index 75d41cda15..9e15572e45 100644 --- a/apps/dashboard/src/main/java/com/akto/action/traffic_metrics/TrafficMetricsAction.java +++ b/apps/dashboard/src/main/java/com/akto/action/traffic_metrics/TrafficMetricsAction.java @@ -126,7 +126,7 @@ public String execute() { public String fetchRuntimeInstances() { instanceIds = new ArrayList<>(); Bson filters = RuntimeMetricsDao.buildFilters(startTimestamp, endTimestamp); - runtimeMetrics = RuntimeMetricsDao.instance.findAll(filters, 0, 0, Sorts.descending("timestamp"), Projections.include("instanceId")); + runtimeMetrics = RuntimeMetricsDao.instance.findAll(filters, 0, 0, Sorts.descending("timestamp")); for (RuntimeMetrics metric: runtimeMetrics) { instanceIds.add(metric.getInstanceId()); } @@ -156,6 +156,15 @@ public String fetchRuntimeMetrics() { return SUCCESS.toUpperCase(); } + public String fetchAllRuntimeMetrics() { + Bson filters = RuntimeMetricsDao.buildFilters(startTimestamp, endTimestamp, instanceId); + runtimeMetrics = new ArrayList<>(); + + runtimeMetrics.addAll(RuntimeMetricsDao.instance.findAll(filters)); + + return SUCCESS.toUpperCase(); + } + public String fetchTrafficMetricsDesciptions(){ names = Arrays.asList(TrafficMetrics.Name.values()); return SUCCESS.toUpperCase(); diff --git a/apps/dashboard/src/main/resources/struts.xml b/apps/dashboard/src/main/resources/struts.xml index 631cab2cf2..9a8e948023 100644 --- a/apps/dashboard/src/main/resources/struts.xml +++ b/apps/dashboard/src/main/resources/struts.xml @@ -2871,6 +2871,17 @@ ^actionErrors.* + + + + + + + 422 + false + ^actionErrors.* + + diff --git a/apps/dashboard/web/polaris_web/web/src/apps/dashboard/components/GraphMetric.jsx b/apps/dashboard/web/polaris_web/web/src/apps/dashboard/components/GraphMetric.jsx index e3deb0ce6f..e6b18c05d3 100644 --- a/apps/dashboard/web/polaris_web/web/src/apps/dashboard/components/GraphMetric.jsx +++ b/apps/dashboard/web/polaris_web/web/src/apps/dashboard/components/GraphMetric.jsx @@ -30,31 +30,15 @@ function GraphMetric(props) { }); const series = [ - ...dataForChart, - inputMetrics.length > 0 && inputMetrics.map((x, i) => { - return { - data: x.data, - color: '#FF4DCA', - name: x.name, - marker: { - enabled: false, - symbol: 'circle', - }, - fillColor: { - linearGradient: { x1: 0, y1: 0, x2: 0, y2: 1 }, - stops: [ - [0, '#000000'], - [1, '#000000'], - ], - }, - yAxis: i + 1, - }; - }), + ...dataForChart ]; const chartOptions = { chart: { type, + zooming: { + type: 'x' + }, height: `${height}px`, backgroundColor, }, diff --git a/apps/dashboard/web/polaris_web/web/src/apps/dashboard/pages/settings/api.js b/apps/dashboard/web/polaris_web/web/src/apps/dashboard/pages/settings/api.js index c31c3a7601..fbf8b74140 100644 --- a/apps/dashboard/web/polaris_web/web/src/apps/dashboard/pages/settings/api.js +++ b/apps/dashboard/web/polaris_web/web/src/apps/dashboard/pages/settings/api.js @@ -136,6 +136,26 @@ const settingRequests = { data: {} }) }, + fetchRuntimeInstances(startTimestamp, endTimestamp) { + return request({ + url: '/api/fetchRuntimeInstances', + method: 'post', + data: { + startTimestamp, + endTimestamp + } + }) + }, + fetchRuntimeMetrics(startTimestamp, endTimestamp, instanceId) { + return request({ + url: '/api/fetchAllRuntimeMetrics', + method: 'post', + data: { + startTimestamp, endTimestamp, + instanceId + } + }) + }, fetchTrafficMetrics(groupBy, startTimestamp, endTimestamp, names, host) { return request({ url: '/api/fetchTrafficMetrics', diff --git a/apps/dashboard/web/polaris_web/web/src/apps/dashboard/pages/settings/metrics/Metrics.jsx b/apps/dashboard/web/polaris_web/web/src/apps/dashboard/pages/settings/metrics/Metrics.jsx index a133ecc70b..61976c78f6 100644 --- a/apps/dashboard/web/polaris_web/web/src/apps/dashboard/pages/settings/metrics/Metrics.jsx +++ b/apps/dashboard/web/polaris_web/web/src/apps/dashboard/pages/settings/metrics/Metrics.jsx @@ -1,4 +1,4 @@ -import { EmptyState, LegacyCard, Page } from '@shopify/polaris' +import { Divider, EmptyState, LegacyCard, Page } from '@shopify/polaris' import React, { useEffect, useReducer, useState } from 'react' import DateRangeFilter from '../../../components/layouts/DateRangeFilter' import Dropdown from '../../../components/layouts/Dropdown' @@ -10,6 +10,8 @@ import settingFunctions from '../module' import GraphMetric from '../../../components/GraphMetric' import values from '@/util/values' import PersistStore from '../../../../main/PersistStore' +import GithubSimpleTable from '../../../components/tables/GithubSimpleTable' +import FlyLayout from '../../../components/layouts/FlyLayout' function Metrics() { @@ -20,6 +22,12 @@ function Metrics() { const [hostsActive, setHostsActive] = useState(false) const [currentHost, setCurrentHost] = useState(null) + const [runtimeMetricsData, setRuntimeMetricsData] = useState([]) + const [showRuntimeGraph, setShowRuntimeGraph] = useState(false) + const [graphs, setGraphs] = useState([]) + const [runtimeFilterVal, setRuntimeFilterVal] = useState('last7days') + const [loading, setLoading] = useState(false) + const [currDateRange, dispatchCurrDateRange] = useReducer(produce((draft, action) => func.dateRangeReducer(draft, action)), values.ranges[2]); const getTimeEpoch = (key) => { return Math.floor(Date.parse(currDateRange.period[key]) / 1000) @@ -34,6 +42,16 @@ function Metrics() { { label: "Group by Target group", value: "VXLANID" }, ] + const runtimeMetricsNameMap = { + "rt_kafka_record_count": "Kafka Record Count", + "rt_kafka_record_size": "Kafka Record Size", + "rt_kafka_latency": "Kafka Latency", + "rt_kafka_records_lag_max": "Kafka Records Lag Max", + "rt_kafka_records_consumed_rate": "Kafka Records Consumed Rate", + "rt_kafka_fetch_avg_latency": "Kafka Fetch Average Latency", + "rt_kafka_bytes_consumed_rate": "Kafka Bytes Consumed Rate" + } + const [menuItems,setMenuItems] = useState(initialItems) const [groupBy, setGroupBy] = useState("ALL") @@ -41,6 +59,64 @@ function Metrics() { let arr = await settingFunctions.fetchMetricData() setMetricList(arr) } + + const runtimeFilterOptionsValueMap = { + "15minutes": Math.floor((Date.now() - (15 * 60 * 1000)) / 1000), + "30minutes": Math.floor((Date.now() - (30 * 60 * 1000)) / 1000), + "1hour": Math.floor(Date.now() / 1000) - 3600, + "6hours": Math.floor(Date.now() / 1000) - 21600, + "1day": Math.floor((Date.now() - (24 * 60 * 60 * 1000)) / 1000), + "3days": Math.floor((Date.now() - (3 * 24 * 60 * 60 * 1000)) / 1000), + "last7days": Math.floor((Date.now() - (7 * 24 * 60 * 60 * 1000)) / 1000) + } + + const getRuntimeMetrics = async () => { + const currentEpoch = Math.floor(Date.now() / 1000) + let runtimeRes = await settingFunctions.fetchRuntimeInstances(runtimeFilterOptionsValueMap[runtimeFilterVal], currentEpoch) + + const uniqueInstanceIds = new Set(runtimeRes.instanceIds) + const runtimeMetrics = runtimeRes.runtimeMetrics + + const namesArray = Object.keys(runtimeMetricsNameMap) + + const groupedData = Array.from(uniqueInstanceIds).map(instanceId => { + const instanceData = runtimeMetrics.filter(item => {return item.instanceId === instanceId && namesArray.includes(item.name.toLowerCase())}) + + if (!instanceData.length) return null + + const startTime = Math.min(...instanceData.map(item => item.timestamp)) + const heartbeat = Math.max(...instanceData.map(item => item.timestamp)) + + const result = { + id: instanceId, + startTime: func.prettifyEpoch(startTime), + heartbeat: func.prettifyEpoch(heartbeat), + version: instanceData[0].version, + } + + const latestValuesByName = {}; + + instanceData.forEach(item => { + const name = item.name.toLowerCase(); + + if (!latestValuesByName[name] || latestValuesByName[name].timestamp < item.timestamp) { + latestValuesByName[name] = { + val: item.val, + timestamp: item.timestamp + }; + } + }); + + Object.keys(latestValuesByName).forEach(name => { + result[name] = latestValuesByName[name].val; + }); + + return result + }).filter(item => item !== null) + + setRuntimeMetricsData(groupedData) + } + const names = ['INCOMING_PACKETS_MIRRORING','OUTGOING_PACKETS_MIRRORING','OUTGOING_REQUESTS_MIRRORING','TOTAL_REQUESTS_RUNTIME','FILTERED_REQUESTS_RUNTIME'] const nameMap = new Map(metricsList.map(obj => [obj._name, { description: obj.description, descriptionName: obj.descriptionName }])); @@ -69,6 +145,10 @@ function Metrics() { setHosts(func.getListOfHosts(apiCollections)) },[]) + useEffect(() => { + getRuntimeMetrics() + }, [runtimeFilterVal]) + useEffect(()=>{ getGraphData(startTime,endTime) },[currDateRange,groupBy]) @@ -135,8 +215,168 @@ function Metrics() { )) ) + const runtimeFilterOptions = [ + { label: '15 Minutes ago', value: '15minutes' }, + { label: '30 Minutes ago', value: '30minutes' }, + { label: '1 hour ago', value: '1hour' }, + { label: '6 hours ago', value: '6hours' }, + { label: '1 Day ago', value: '1day' }, + { label: '3 Days ago', value: '3days' }, + { label: 'Last 7 days', value: 'last7days' } + ] + + const fillMissingTimestamps = (data) => { + const sortedData = data.slice().sort((a, b) => a[0] - b[0]) + const smallestTime = sortedData[0][0] + const largestTime = sortedData[sortedData.length - 1][0] + + const result = [] + const timestampMap = new Map() + + for (let timestamp = smallestTime; timestamp <= largestTime; timestamp += 60000) { + timestampMap.set(timestamp, 0) + } + + sortedData.forEach(([timestamp, value]) => { + timestampMap.set(timestamp, value) + }) + + timestampMap.forEach((value, timestamp) => { + result.push([timestamp, value]) + }) + + result.sort((a, b) => a[0] - b[0]) + + return result + } + + const handleOnRuntimeRowClick = async (data) => { + setLoading(true) + const currentEpoch = Math.floor(Date.now() / 1000) + const instanceId = data.id + const runtimeMetricsRes = await settingFunctions.fetchRuntimeMetrics(runtimeFilterOptionsValueMap[runtimeFilterVal], currentEpoch, instanceId) + + const valuesByName = getRuntimeValuesByName(runtimeMetricsRes) + + setShowRuntimeGraph(true) + const componentsArray = [] + + Object.entries(valuesByName).forEach(([name, values]) => { + const readableName = runtimeMetricsNameMap[name.toLowerCase()] + const valuesWithMissingTimestamp = fillMissingTimestamps(values) + + const component = runtimeGraphContainer(valuesWithMissingTimestamp, readableName) + componentsArray.push() + componentsArray.push(component) + }) + + setGraphs(componentsArray) + + setTimeout(() => { + setLoading(false) + }, 100); + } + + const getRuntimeValuesByName = (data) => { + const valueByName = {}; + + data.forEach(item => { + if (!valueByName[item.name]) { + valueByName[item.name] = []; + } + valueByName[item.name].push([(item.timestamp*1000), item.val]) + }); + + for (const name in valueByName) { + if (Object.hasOwnProperty.call(valueByName, name)) { + valueByName[name].sort((a, b) => a.val - b.val); + } + } + + return valueByName; + } + + const headers = [ + { title: "Instance ID", text: "Instance ID", value: "id", showFilter: false }, + { title: "Heartbeat", text: "Heartbeat", value: "heartbeat", showFilter: false }, + { title: "Start Time", text: "Start Time", value: "startTime", showFilter: false }, + { title: "Runtime Version", text: "Runtime Version", value: "version", showFilter: false }, + { title: "Kafka Record Count", text: "Kafka Record Count", value: "rt_kafka_record_count", showFilter: false }, + { title: "Kafka Record Size", text: "Kafka Record Size", value: "rt_kafka_record_size", showFilter: false }, + { title: "Kafka Latency", text: "Kafka Latency", value: "rt_kafka_latency", showFilter: false }, + { title: "Kafka Records Lag Max", text: "Kafka Records Lag Max", value: "rt_kafka_records_lag_max", showFilter: false }, + { title: "Kafka Records Consumed Rate", text: "Kafka Records Consumed Rate", value: "rt_kafka_records_consumed_rate", showFilter: false }, + { title: "Kafka Fetch Average Latency", text: "Kafka Records Consumed Rate", value: "rt_kafka_fetch_avg_latency", showFilter: false }, + { title: "Kafka Bytes Consumed Rate", text: "Kafka Bytes Consumed Rate", value: "rt_kafka_bytes_consumed_rate", showFilter: false }, + ] + + const promotedBulkActions = (selectedResources) => { + const actions = [ + { + content: `Copy Instance ID${func.addPlurality(selectedResources.length)}`, + onAction: () => navigator.clipboard.writeText(selectedResources.toString()) + } + ] + + return actions; + } + + const runtimeTableContainer = ( + + ) + + const processChartData = (data) => { + return [ + { + data: data, + color: "#AEE9D1", + name: "Runtime Values" + }, + ] + } + + const runtimeGraphContainer = (data, title) => ( + + + + ) + return ( - + + + + + setRuntimeFilterVal(val)} /> + + + + { runtimeTableContainer } + + @@ -148,6 +388,14 @@ function Metrics() { {graphContainer} + + ) } diff --git a/apps/dashboard/web/polaris_web/web/src/apps/dashboard/pages/settings/module.js b/apps/dashboard/web/polaris_web/web/src/apps/dashboard/pages/settings/module.js index 3f1843758e..39ad567a4e 100644 --- a/apps/dashboard/web/polaris_web/web/src/apps/dashboard/pages/settings/module.js +++ b/apps/dashboard/web/polaris_web/web/src/apps/dashboard/pages/settings/module.js @@ -220,6 +220,20 @@ const settingFunctions = { }) return arr }, + fetchRuntimeInstances: async function(startTimestamp, endTimestamp) { + let res = [] + await settingRequests.fetchRuntimeInstances(startTimestamp, endTimestamp).then((resp) => { + res = resp + }) + return res + }, + fetchRuntimeMetrics: async function(startTimestamp, endTimestamp, instanceId) { + let res = [] + await settingRequests.fetchRuntimeMetrics(startTimestamp, endTimestamp, instanceId).then((resp) => { + res = resp.runtimeMetrics + }) + return res + }, fetchGraphData: async function(groupBy, startTimestamp, endTimestamp, names, host){ let trafficData = {} await settingRequests.fetchTrafficMetrics(groupBy, startTimestamp, endTimestamp, names, host).then((resp)=>{ From 7ae2a5bd241e4dc678a950674e74056366ee080d Mon Sep 17 00:00:00 2001 From: TangoBeeAkto Date: Sun, 15 Sep 2024 13:08:49 +0530 Subject: [PATCH 11/11] fix: change default time filter of runtime metrics --- .../web/src/apps/dashboard/pages/settings/metrics/Metrics.jsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/dashboard/web/polaris_web/web/src/apps/dashboard/pages/settings/metrics/Metrics.jsx b/apps/dashboard/web/polaris_web/web/src/apps/dashboard/pages/settings/metrics/Metrics.jsx index 61976c78f6..6d09e63ba3 100644 --- a/apps/dashboard/web/polaris_web/web/src/apps/dashboard/pages/settings/metrics/Metrics.jsx +++ b/apps/dashboard/web/polaris_web/web/src/apps/dashboard/pages/settings/metrics/Metrics.jsx @@ -25,7 +25,7 @@ function Metrics() { const [runtimeMetricsData, setRuntimeMetricsData] = useState([]) const [showRuntimeGraph, setShowRuntimeGraph] = useState(false) const [graphs, setGraphs] = useState([]) - const [runtimeFilterVal, setRuntimeFilterVal] = useState('last7days') + const [runtimeFilterVal, setRuntimeFilterVal] = useState('1day') const [loading, setLoading] = useState(false) const [currDateRange, dispatchCurrDateRange] = useReducer(produce((draft, action) => func.dateRangeReducer(draft, action)), values.ranges[2]);