diff --git a/apps/api-analyser/src/main/java/com/akto/analyser/Main.java b/apps/api-analyser/src/main/java/com/akto/analyser/Main.java index 898b3cefcd..127d8686ae 100644 --- a/apps/api-analyser/src/main/java/com/akto/analyser/Main.java +++ b/apps/api-analyser/src/main/java/com/akto/analyser/Main.java @@ -6,6 +6,7 @@ import com.akto.dao.context.Context; import com.akto.dto.AccountSettings; import com.akto.dto.HttpResponseParams; +import com.akto.log.LoggerMaker; import com.akto.parsers.HttpCallParser; import com.mongodb.ConnectionString; import com.mongodb.client.model.Updates; @@ -23,7 +24,7 @@ public class Main { private Consumer consumer; - private static final Logger logger = LoggerFactory.getLogger(Main.class); + private static final LoggerMaker loggerMaker = new LoggerMaker(Main.class); public static void main(String[] args) { String mongoURI = System.getenv("AKTO_MONGO_CONN");; @@ -70,7 +71,7 @@ public void run() { ConsumerRecords records = main.consumer.poll(Duration.ofMillis(10000)); main.consumer.commitSync(); for (ConsumerRecord r: records) { - if ( (i<1000 && i%100 == 0) || (i>10_000 && i%10_000==0)) logger.info(i+""); + if ( (i<1000 && i%100 == 0) || (i>10_000 && i%10_000==0)) loggerMaker.infoAndAddToDb("Count: " + i, LoggerMaker.LogDb.ANALYSER); i ++; try { @@ -84,7 +85,7 @@ public void run() { resourceAnalyser.analyse(httpResponseParams); } catch (Exception e) { // todo: check cause - logger.error("Error parsing http response params : " + e.getMessage() + " " + e.getCause()); + loggerMaker.errorAndAddToDb("Error parsing http response params : " + e.getMessage() + " " + e.getCause(), LoggerMaker.LogDb.ANALYSER); } } } diff --git a/apps/api-analyser/src/main/java/com/akto/analyser/ResourceAnalyser.java b/apps/api-analyser/src/main/java/com/akto/analyser/ResourceAnalyser.java index d6771f23cf..f20f7b1482 100644 --- a/apps/api-analyser/src/main/java/com/akto/analyser/ResourceAnalyser.java +++ b/apps/api-analyser/src/main/java/com/akto/analyser/ResourceAnalyser.java @@ -4,6 +4,7 @@ import com.akto.dao.context.Context; import com.akto.dto.*; import com.akto.dto.type.*; +import com.akto.log.LoggerMaker; import com.akto.parsers.HttpCallParser; import com.akto.runtime.APICatalogSync; import com.akto.runtime.URLAggregator; @@ -13,6 +14,7 @@ import com.google.common.hash.BloomFilter; import com.google.common.hash.Funnels; import com.mongodb.BasicDBObject; +import com.mongodb.bulk.BulkWriteResult; import com.mongodb.client.model.*; import org.bson.conversions.Bson; @@ -25,6 +27,8 @@ public class ResourceAnalyser { int last_sync = 0; + private static final LoggerMaker loggerMaker = new LoggerMaker(ResourceAnalyser.class); + public ResourceAnalyser(int duplicateCheckerBfSize, double duplicateCheckerBfFpp, int valuesBfSize, double valuesBfFpp) { duplicateCheckerBF = BloomFilter.create( Funnels.stringFunnel(Charsets.UTF_8), duplicateCheckerBfSize, duplicateCheckerBfFpp @@ -81,6 +85,8 @@ public URLStatic matchWithUrlStatic(int apiCollectionId, String url, String meth return null; } + private final Set hostsSeen = new HashSet<>(); + public void analyse(HttpResponseParams responseParams) { if (responseParams.statusCode < 200 || responseParams.statusCode >= 300) return; @@ -94,9 +100,15 @@ public void analyse(HttpResponseParams responseParams) { // user id Map> headers = requestParams.getHeaders(); - if (headers == null) return; + if (headers == null) { + loggerMaker.infoAndAddToDb("No headers", LoggerMaker.LogDb.ANALYSER); + return; + } List ipList = headers.get(X_FORWARDED_FOR); - if (ipList == null || ipList.isEmpty()) return; + if (ipList == null || ipList.isEmpty()) { + loggerMaker.infoAndAddToDb("IP not found: " + headers.keySet(), LoggerMaker.LogDb.ANALYSER); + return; + } String userId = ipList.get(0); // get actual api collection id @@ -104,7 +116,12 @@ public void analyse(HttpResponseParams responseParams) { String hostName = HttpCallParser.getHeaderValue(requestParams.getHeaders(), "host"); apiCollectionId = findTrueApiCollectionId(apiCollectionId, hostName, responseParams.getSource()); - if (apiCollectionId == null) return; + if (hostName != null) hostsSeen.add(hostName); + + if (apiCollectionId == null) { + loggerMaker.infoAndAddToDb("API collection not found: " + apiCollectionId + " " + hostName + " " + responseParams.getSource(), LoggerMaker.LogDb.ANALYSER); + return; + } String method = requestParams.getMethod(); @@ -217,6 +234,8 @@ public void analysePayload(Object paramObject, String param, String combinedUrl, public void buildCatalog() { List apis = SingleTypeInfoDao.instance.fetchEndpointsInCollection(-1); + loggerMaker.infoAndAddToDb("APIs fetched from db: " + apis.size(), LoggerMaker.LogDb.ANALYSER); + for (ApiInfo.ApiInfoKey apiInfoKey: apis) { int apiCollectionId = apiInfoKey.getApiCollectionId(); @@ -245,20 +264,29 @@ public void buildCatalog() { public void syncWithDb() { + loggerMaker.infoAndAddToDb("Hosts seen till now: " + hostsSeen, LoggerMaker.LogDb.ANALYSER); + buildCatalog(); populateHostNameToIdMap(); List> dbUpdates = getDbUpdatesForSingleTypeInfo(); - System.out.println("total count: " + dbUpdates.size()); + loggerMaker.infoAndAddToDb("total db updates count: " + dbUpdates.size(), LoggerMaker.LogDb.ANALYSER); countMap = new HashMap<>(); last_sync = Context.now(); if (dbUpdates.size() > 0) { - SingleTypeInfoDao.instance.getMCollection().bulkWrite(dbUpdates); + BulkWriteResult bulkWriteResult = SingleTypeInfoDao.instance.getMCollection().bulkWrite(dbUpdates); + loggerMaker.infoAndAddToDb("Modified count: " + bulkWriteResult.getModifiedCount(), LoggerMaker.LogDb.ANALYSER); + loggerMaker.infoAndAddToDb("Inserted count: " + bulkWriteResult.getInsertedCount(), LoggerMaker.LogDb.ANALYSER); + loggerMaker.infoAndAddToDb("Matched count: " + bulkWriteResult.getMatchedCount(), LoggerMaker.LogDb.ANALYSER); + loggerMaker.infoAndAddToDb("Deleted count: " + bulkWriteResult.getDeletedCount(), LoggerMaker.LogDb.ANALYSER); + loggerMaker.infoAndAddToDb("Db updates done", LoggerMaker.LogDb.ANALYSER); } } public List> getDbUpdatesForSingleTypeInfo() { List> bulkUpdates = new ArrayList<>(); + loggerMaker.infoAndAddToDb("countMap keySet size: " + countMap.size(), LoggerMaker.LogDb.ANALYSER); + for (SingleTypeInfo singleTypeInfo: countMap.values()) { if (singleTypeInfo.getUniqueCount() == 0 && singleTypeInfo.getPublicCount() == 0) continue; Bson filter = SingleTypeInfoDao.createFiltersWithoutSubType(singleTypeInfo); @@ -269,6 +297,11 @@ public List> getDbUpdatesForSingleTypeInfo() { bulkUpdates.add(new UpdateManyModel<>(filter, update, new UpdateOptions().upsert(false))); } + int i = bulkUpdates.size(); + int total = countMap.values().size(); + + loggerMaker.infoAndAddToDb("bulkUpdates: " + i + " total countMap size: " + total, LoggerMaker.LogDb.ANALYSER); + return bulkUpdates; } @@ -328,6 +361,7 @@ public void populateHostNameToIdMap() { String key = apiCollection.getHostName() + "$" + apiCollection.getVxlanId(); hostNameToIdMap.put(key, apiCollection.getId()); } + loggerMaker.infoAndAddToDb("hostNameToIdMap: " + hostNameToIdMap, LoggerMaker.LogDb.ANALYSER); } diff --git a/apps/dashboard/src/main/java/com/akto/action/ParamStateAction.java b/apps/dashboard/src/main/java/com/akto/action/ParamStateAction.java index 8b3f36ba4f..509c15be94 100644 --- a/apps/dashboard/src/main/java/com/akto/action/ParamStateAction.java +++ b/apps/dashboard/src/main/java/com/akto/action/ParamStateAction.java @@ -2,8 +2,10 @@ import com.akto.dao.SingleTypeInfoDao; import com.akto.dto.type.SingleTypeInfo; +import com.akto.log.LoggerMaker; import com.mongodb.client.MongoCursor; import com.mongodb.client.model.Aggregates; +import com.mongodb.client.model.EstimatedDocumentCountOptions; import com.mongodb.client.model.Filters; import com.mongodb.client.model.Projections; @@ -15,6 +17,7 @@ public class ParamStateAction extends UserAction { + private static final LoggerMaker loggerMaker = new LoggerMaker(ParamStateAction.class); @Override public String execute() { return SUCCESS.toUpperCase(); @@ -56,6 +59,17 @@ public String fetchParamsStatus() { privateSingleTypeInfo.add(singleTypeInfo); } + loggerMaker.infoAndAddToDb("Found " + privateSingleTypeInfo.size() + " private STIs", LoggerMaker.LogDb.DASHBOARD); + + if (privateSingleTypeInfo.isEmpty()) { + Bson filter = Filters.or( + Filters.exists(SingleTypeInfo._UNIQUE_COUNT), + Filters.gt(SingleTypeInfo._UNIQUE_COUNT, 0) + ); + SingleTypeInfo singleTypeInfo = SingleTypeInfoDao.instance.findOne(filter); + loggerMaker.infoAndAddToDb("Did find STI with unique count url=" + singleTypeInfo.getUrl() + "count="+ singleTypeInfo.uniqueCount, LoggerMaker.LogDb.DASHBOARD); + } + return SUCCESS.toUpperCase(); } diff --git a/apps/dashboard/src/main/java/com/akto/listener/InitializerListener.java b/apps/dashboard/src/main/java/com/akto/listener/InitializerListener.java index 07f7588e0e..84be2b13b6 100644 --- a/apps/dashboard/src/main/java/com/akto/listener/InitializerListener.java +++ b/apps/dashboard/src/main/java/com/akto/listener/InitializerListener.java @@ -767,6 +767,7 @@ public void runInitializerFunctions() { RuntimeLogsDao.instance.createIndicesIfAbsent(); LogsDao.instance.createIndicesIfAbsent(); DashboardLogsDao.instance.createIndicesIfAbsent(); + AnalyserLogsDao.instance.createIndicesIfAbsent(); BackwardCompatibility backwardCompatibility = BackwardCompatibilityDao.instance.findOne(new BasicDBObject()); if (backwardCompatibility == null) { backwardCompatibility = new BackwardCompatibility(); diff --git a/apps/dashboard/web/src/apps/dashboard/views/settings/components/Health.vue b/apps/dashboard/web/src/apps/dashboard/views/settings/components/Health.vue index f137215875..03af2fa146 100644 --- a/apps/dashboard/web/src/apps/dashboard/views/settings/components/Health.vue +++ b/apps/dashboard/web/src/apps/dashboard/views/settings/components/Health.vue @@ -13,7 +13,8 @@ :items="[ {text: 'TESTING', value: 'TESTING'}, {text: 'RUNTIME', value: 'RUNTIME'}, - {text: 'DASHBOARD', value: 'DASHBOARD'} + {text: 'DASHBOARD', value: 'DASHBOARD'}, + {text: 'ANALYSER', value: 'ANALYSER'} ]" v-model="logGroupName" label="Select log group" diff --git a/libs/dao/src/main/java/com/akto/dao/AnalyserLogsDao.java b/libs/dao/src/main/java/com/akto/dao/AnalyserLogsDao.java new file mode 100644 index 0000000000..36bf3bf4cb --- /dev/null +++ b/libs/dao/src/main/java/com/akto/dao/AnalyserLogsDao.java @@ -0,0 +1,54 @@ +package com.akto.dao; + +import com.akto.dao.context.Context; +import com.akto.dto.Log; +import com.mongodb.client.MongoCursor; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.Indexes; +import org.bson.Document; + +import java.util.ArrayList; +import java.util.List; + +public class AnalyserLogsDao extends AccountsContextDao { + + public static final AnalyserLogsDao instance = new AnalyserLogsDao(); + public void createIndicesIfAbsent() { + boolean exists = false; + String dbName = Context.accountId.get()+""; + MongoDatabase db = clients[0].getDatabase(dbName); + for (String col: db.listCollectionNames()){ + if (getCollName().equalsIgnoreCase(col)){ + exists = true; + break; + } + }; + + if (!exists) { + db.createCollection(getCollName(), new CreateCollectionOptions().capped(true).maxDocuments(100_000).sizeInBytes(100_000_000)); + } + + MongoCursor cursor = db.getCollection(getCollName()).listIndexes().cursor(); + List indices = new ArrayList<>(); + + while (cursor.hasNext()) { + indices.add(cursor.next()); + } + + if (indices.size() == 1) { + instance.getMCollection().createIndex(Indexes.descending(Log.TIMESTAMP)); + } + } + + @Override + public String getCollName() { + return "logs_analyser"; + } + + @Override + public Class getClassT() { + return Log.class; + } + +} diff --git a/libs/utils/src/main/java/com/akto/log/LoggerMaker.java b/libs/utils/src/main/java/com/akto/log/LoggerMaker.java index 531297897d..49df68d13c 100644 --- a/libs/utils/src/main/java/com/akto/log/LoggerMaker.java +++ b/libs/utils/src/main/java/com/akto/log/LoggerMaker.java @@ -1,5 +1,6 @@ package com.akto.log; +import com.akto.dao.AnalyserLogsDao; import com.akto.dao.DashboardLogsDao; import com.akto.dao.LogsDao; import com.akto.dao.RuntimeLogsDao; @@ -24,7 +25,7 @@ public class LoggerMaker { private static final int oneMinute = 60; public enum LogDb { - TESTING,RUNTIME,DASHBOARD + TESTING,RUNTIME,DASHBOARD, ANALYSER } public LoggerMaker(Class c) { @@ -76,6 +77,8 @@ private void insert(String info, String key, LogDb db) { break; case DASHBOARD: DashboardLogsDao.instance.insertOne(log); + case ANALYSER: + AnalyserLogsDao.instance.insertOne(log); } logCount++; } @@ -91,7 +94,7 @@ public List fetchLogRecords(int logFetchStartTime, int logFetchEndTime, Log Bson filters = Filters.and( Filters.gte(Log.TIMESTAMP, logFetchStartTime), - Filters.lte(Log.TIMESTAMP, logFetchEndTime) + Filters.lt(Log.TIMESTAMP, logFetchEndTime) ); switch(db){ case TESTING: @@ -102,6 +105,9 @@ public List fetchLogRecords(int logFetchStartTime, int logFetchEndTime, Log break; case DASHBOARD: logs = DashboardLogsDao.instance.findAll(filters, Projections.include("log", Log.TIMESTAMP)); + break; + case ANALYSER: + logs = AnalyserLogsDao.instance.findAll(filters, Projections.include("log", Log.TIMESTAMP)); } return logs; }