Skip to content

Commit

Permalink
Merge pull request #631 from akto-api-security/feature/add_analyser_logs
Browse files Browse the repository at this point in the history
added logs to analyser
  • Loading branch information
ankush-jain-akto authored Feb 21, 2023
2 parents 97b162a + 3c5a4eb commit a8e0523
Show file tree
Hide file tree
Showing 7 changed files with 122 additions and 11 deletions.
7 changes: 4 additions & 3 deletions apps/api-analyser/src/main/java/com/akto/analyser/Main.java
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import com.akto.dao.context.Context;
import com.akto.dto.AccountSettings;
import com.akto.dto.HttpResponseParams;
import com.akto.log.LoggerMaker;
import com.akto.parsers.HttpCallParser;
import com.mongodb.ConnectionString;
import com.mongodb.client.model.Updates;
Expand All @@ -23,7 +24,7 @@

public class Main {
private Consumer<String, String> consumer;
private static final Logger logger = LoggerFactory.getLogger(Main.class);
private static final LoggerMaker loggerMaker = new LoggerMaker(Main.class);

public static void main(String[] args) {
String mongoURI = System.getenv("AKTO_MONGO_CONN");;
Expand Down Expand Up @@ -70,7 +71,7 @@ public void run() {
ConsumerRecords<String, String> records = main.consumer.poll(Duration.ofMillis(10000));
main.consumer.commitSync();
for (ConsumerRecord<String,String> r: records) {
if ( (i<1000 && i%100 == 0) || (i>10_000 && i%10_000==0)) logger.info(i+"");
if ( (i<1000 && i%100 == 0) || (i>10_000 && i%10_000==0)) loggerMaker.infoAndAddToDb("Count: " + i, LoggerMaker.LogDb.ANALYSER);
i ++;

try {
Expand All @@ -84,7 +85,7 @@ public void run() {
resourceAnalyser.analyse(httpResponseParams);
} catch (Exception e) {
// todo: check cause
logger.error("Error parsing http response params : " + e.getMessage() + " " + e.getCause());
loggerMaker.errorAndAddToDb("Error parsing http response params : " + e.getMessage() + " " + e.getCause(), LoggerMaker.LogDb.ANALYSER);
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import com.akto.dao.context.Context;
import com.akto.dto.*;
import com.akto.dto.type.*;
import com.akto.log.LoggerMaker;
import com.akto.parsers.HttpCallParser;
import com.akto.runtime.APICatalogSync;
import com.akto.runtime.URLAggregator;
Expand All @@ -13,6 +14,7 @@
import com.google.common.hash.BloomFilter;
import com.google.common.hash.Funnels;
import com.mongodb.BasicDBObject;
import com.mongodb.bulk.BulkWriteResult;
import com.mongodb.client.model.*;
import org.bson.conversions.Bson;

Expand All @@ -25,6 +27,8 @@ public class ResourceAnalyser {

int last_sync = 0;

private static final LoggerMaker loggerMaker = new LoggerMaker(ResourceAnalyser.class);

public ResourceAnalyser(int duplicateCheckerBfSize, double duplicateCheckerBfFpp, int valuesBfSize, double valuesBfFpp) {
duplicateCheckerBF = BloomFilter.create(
Funnels.stringFunnel(Charsets.UTF_8), duplicateCheckerBfSize, duplicateCheckerBfFpp
Expand Down Expand Up @@ -81,6 +85,8 @@ public URLStatic matchWithUrlStatic(int apiCollectionId, String url, String meth
return null;
}

private final Set<String> hostsSeen = new HashSet<>();


public void analyse(HttpResponseParams responseParams) {
if (responseParams.statusCode < 200 || responseParams.statusCode >= 300) return;
Expand All @@ -94,17 +100,28 @@ public void analyse(HttpResponseParams responseParams) {

// user id
Map<String,List<String>> headers = requestParams.getHeaders();
if (headers == null) return;
if (headers == null) {
loggerMaker.infoAndAddToDb("No headers", LoggerMaker.LogDb.ANALYSER);
return;
}
List<String> ipList = headers.get(X_FORWARDED_FOR);
if (ipList == null || ipList.isEmpty()) return;
if (ipList == null || ipList.isEmpty()) {
loggerMaker.infoAndAddToDb("IP not found: " + headers.keySet(), LoggerMaker.LogDb.ANALYSER);
return;
}
String userId = ipList.get(0);

// get actual api collection id
Integer apiCollectionId = requestParams.getApiCollectionId();
String hostName = HttpCallParser.getHeaderValue(requestParams.getHeaders(), "host");
apiCollectionId = findTrueApiCollectionId(apiCollectionId, hostName, responseParams.getSource());

if (apiCollectionId == null) return;
if (hostName != null) hostsSeen.add(hostName);

if (apiCollectionId == null) {
loggerMaker.infoAndAddToDb("API collection not found: " + apiCollectionId + " " + hostName + " " + responseParams.getSource(), LoggerMaker.LogDb.ANALYSER);
return;
}

String method = requestParams.getMethod();

Expand Down Expand Up @@ -217,6 +234,8 @@ public void analysePayload(Object paramObject, String param, String combinedUrl,

public void buildCatalog() {
List<ApiInfo.ApiInfoKey> apis = SingleTypeInfoDao.instance.fetchEndpointsInCollection(-1);
loggerMaker.infoAndAddToDb("APIs fetched from db: " + apis.size(), LoggerMaker.LogDb.ANALYSER);

for (ApiInfo.ApiInfoKey apiInfoKey: apis) {

int apiCollectionId = apiInfoKey.getApiCollectionId();
Expand Down Expand Up @@ -245,20 +264,29 @@ public void buildCatalog() {


public void syncWithDb() {
loggerMaker.infoAndAddToDb("Hosts seen till now: " + hostsSeen, LoggerMaker.LogDb.ANALYSER);

buildCatalog();
populateHostNameToIdMap();

List<WriteModel<SingleTypeInfo>> dbUpdates = getDbUpdatesForSingleTypeInfo();
System.out.println("total count: " + dbUpdates.size());
loggerMaker.infoAndAddToDb("total db updates count: " + dbUpdates.size(), LoggerMaker.LogDb.ANALYSER);
countMap = new HashMap<>();
last_sync = Context.now();
if (dbUpdates.size() > 0) {
SingleTypeInfoDao.instance.getMCollection().bulkWrite(dbUpdates);
BulkWriteResult bulkWriteResult = SingleTypeInfoDao.instance.getMCollection().bulkWrite(dbUpdates);
loggerMaker.infoAndAddToDb("Modified count: " + bulkWriteResult.getModifiedCount(), LoggerMaker.LogDb.ANALYSER);
loggerMaker.infoAndAddToDb("Inserted count: " + bulkWriteResult.getInsertedCount(), LoggerMaker.LogDb.ANALYSER);
loggerMaker.infoAndAddToDb("Matched count: " + bulkWriteResult.getMatchedCount(), LoggerMaker.LogDb.ANALYSER);
loggerMaker.infoAndAddToDb("Deleted count: " + bulkWriteResult.getDeletedCount(), LoggerMaker.LogDb.ANALYSER);
loggerMaker.infoAndAddToDb("Db updates done", LoggerMaker.LogDb.ANALYSER);
}
}

public List<WriteModel<SingleTypeInfo>> getDbUpdatesForSingleTypeInfo() {
List<WriteModel<SingleTypeInfo>> bulkUpdates = new ArrayList<>();
loggerMaker.infoAndAddToDb("countMap keySet size: " + countMap.size(), LoggerMaker.LogDb.ANALYSER);

for (SingleTypeInfo singleTypeInfo: countMap.values()) {
if (singleTypeInfo.getUniqueCount() == 0 && singleTypeInfo.getPublicCount() == 0) continue;
Bson filter = SingleTypeInfoDao.createFiltersWithoutSubType(singleTypeInfo);
Expand All @@ -269,6 +297,11 @@ public List<WriteModel<SingleTypeInfo>> getDbUpdatesForSingleTypeInfo() {
bulkUpdates.add(new UpdateManyModel<>(filter, update, new UpdateOptions().upsert(false)));
}

int i = bulkUpdates.size();
int total = countMap.values().size();

loggerMaker.infoAndAddToDb("bulkUpdates: " + i + " total countMap size: " + total, LoggerMaker.LogDb.ANALYSER);

return bulkUpdates;
}

Expand Down Expand Up @@ -328,6 +361,7 @@ public void populateHostNameToIdMap() {
String key = apiCollection.getHostName() + "$" + apiCollection.getVxlanId();
hostNameToIdMap.put(key, apiCollection.getId());
}
loggerMaker.infoAndAddToDb("hostNameToIdMap: " + hostNameToIdMap, LoggerMaker.LogDb.ANALYSER);
}


Expand Down
14 changes: 14 additions & 0 deletions apps/dashboard/src/main/java/com/akto/action/ParamStateAction.java
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,10 @@

import com.akto.dao.SingleTypeInfoDao;
import com.akto.dto.type.SingleTypeInfo;
import com.akto.log.LoggerMaker;
import com.mongodb.client.MongoCursor;
import com.mongodb.client.model.Aggregates;
import com.mongodb.client.model.EstimatedDocumentCountOptions;
import com.mongodb.client.model.Filters;
import com.mongodb.client.model.Projections;

Expand All @@ -15,6 +17,7 @@

public class ParamStateAction extends UserAction {

private static final LoggerMaker loggerMaker = new LoggerMaker(ParamStateAction.class);
@Override
public String execute() {
return SUCCESS.toUpperCase();
Expand Down Expand Up @@ -56,6 +59,17 @@ public String fetchParamsStatus() {
privateSingleTypeInfo.add(singleTypeInfo);
}

loggerMaker.infoAndAddToDb("Found " + privateSingleTypeInfo.size() + " private STIs", LoggerMaker.LogDb.DASHBOARD);

if (privateSingleTypeInfo.isEmpty()) {
Bson filter = Filters.or(
Filters.exists(SingleTypeInfo._UNIQUE_COUNT),
Filters.gt(SingleTypeInfo._UNIQUE_COUNT, 0)
);
SingleTypeInfo singleTypeInfo = SingleTypeInfoDao.instance.findOne(filter);
loggerMaker.infoAndAddToDb("Did find STI with unique count url=" + singleTypeInfo.getUrl() + "count="+ singleTypeInfo.uniqueCount, LoggerMaker.LogDb.DASHBOARD);
}

return SUCCESS.toUpperCase();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -767,6 +767,7 @@ public void runInitializerFunctions() {
RuntimeLogsDao.instance.createIndicesIfAbsent();
LogsDao.instance.createIndicesIfAbsent();
DashboardLogsDao.instance.createIndicesIfAbsent();
AnalyserLogsDao.instance.createIndicesIfAbsent();
BackwardCompatibility backwardCompatibility = BackwardCompatibilityDao.instance.findOne(new BasicDBObject());
if (backwardCompatibility == null) {
backwardCompatibility = new BackwardCompatibility();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@
:items="[
{text: 'TESTING', value: 'TESTING'},
{text: 'RUNTIME', value: 'RUNTIME'},
{text: 'DASHBOARD', value: 'DASHBOARD'}
{text: 'DASHBOARD', value: 'DASHBOARD'},
{text: 'ANALYSER', value: 'ANALYSER'}
]"
v-model="logGroupName"
label="Select log group"
Expand Down
54 changes: 54 additions & 0 deletions libs/dao/src/main/java/com/akto/dao/AnalyserLogsDao.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
package com.akto.dao;

import com.akto.dao.context.Context;
import com.akto.dto.Log;
import com.mongodb.client.MongoCursor;
import com.mongodb.client.MongoDatabase;
import com.mongodb.client.model.CreateCollectionOptions;
import com.mongodb.client.model.Indexes;
import org.bson.Document;

import java.util.ArrayList;
import java.util.List;

public class AnalyserLogsDao extends AccountsContextDao<Log> {

public static final AnalyserLogsDao instance = new AnalyserLogsDao();
public void createIndicesIfAbsent() {
boolean exists = false;
String dbName = Context.accountId.get()+"";
MongoDatabase db = clients[0].getDatabase(dbName);
for (String col: db.listCollectionNames()){
if (getCollName().equalsIgnoreCase(col)){
exists = true;
break;
}
};

if (!exists) {
db.createCollection(getCollName(), new CreateCollectionOptions().capped(true).maxDocuments(100_000).sizeInBytes(100_000_000));
}

MongoCursor<Document> cursor = db.getCollection(getCollName()).listIndexes().cursor();
List<Document> indices = new ArrayList<>();

while (cursor.hasNext()) {
indices.add(cursor.next());
}

if (indices.size() == 1) {
instance.getMCollection().createIndex(Indexes.descending(Log.TIMESTAMP));
}
}

@Override
public String getCollName() {
return "logs_analyser";
}

@Override
public Class<Log> getClassT() {
return Log.class;
}

}
10 changes: 8 additions & 2 deletions libs/utils/src/main/java/com/akto/log/LoggerMaker.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package com.akto.log;

import com.akto.dao.AnalyserLogsDao;
import com.akto.dao.DashboardLogsDao;
import com.akto.dao.LogsDao;
import com.akto.dao.RuntimeLogsDao;
Expand All @@ -24,7 +25,7 @@ public class LoggerMaker {
private static final int oneMinute = 60;

public enum LogDb {
TESTING,RUNTIME,DASHBOARD
TESTING,RUNTIME,DASHBOARD, ANALYSER
}

public LoggerMaker(Class<?> c) {
Expand Down Expand Up @@ -76,6 +77,8 @@ private void insert(String info, String key, LogDb db) {
break;
case DASHBOARD:
DashboardLogsDao.instance.insertOne(log);
case ANALYSER:
AnalyserLogsDao.instance.insertOne(log);
}
logCount++;
}
Expand All @@ -91,7 +94,7 @@ public List<Log> fetchLogRecords(int logFetchStartTime, int logFetchEndTime, Log

Bson filters = Filters.and(
Filters.gte(Log.TIMESTAMP, logFetchStartTime),
Filters.lte(Log.TIMESTAMP, logFetchEndTime)
Filters.lt(Log.TIMESTAMP, logFetchEndTime)
);
switch(db){
case TESTING:
Expand All @@ -102,6 +105,9 @@ public List<Log> fetchLogRecords(int logFetchStartTime, int logFetchEndTime, Log
break;
case DASHBOARD:
logs = DashboardLogsDao.instance.findAll(filters, Projections.include("log", Log.TIMESTAMP));
break;
case ANALYSER:
logs = AnalyserLogsDao.instance.findAll(filters, Projections.include("log", Log.TIMESTAMP));
}
return logs;
}
Expand Down

0 comments on commit a8e0523

Please sign in to comment.