Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/modify sample data retrospective #1893

Open
wants to merge 9 commits into
base: master
Choose a base branch
from
173 changes: 171 additions & 2 deletions apps/dashboard/src/main/java/com/akto/utils/Utils.java
Original file line number Diff line number Diff line change
Expand Up @@ -12,15 +12,20 @@
import com.akto.dao.SensitiveSampleDataDao;
import com.akto.dao.SingleTypeInfoDao;
import com.akto.dto.AccountSettings;
import com.akto.dto.ApiInfo;
import com.akto.dto.HttpResponseParams;
import com.akto.dto.OriginalHttpRequest;
import com.akto.dto.OriginalHttpResponse;
import com.akto.dto.SensitiveSampleData;
import com.akto.dto.ApiInfo.ApiInfoKey;
import com.akto.dto.dependency_flow.DependencyFlow;
import com.akto.dto.third_party_access.Credential;
import com.akto.dto.third_party_access.PostmanCredential;
import com.akto.dto.third_party_access.ThirdPartyAccess;
import com.akto.dto.traffic.Key;
import com.akto.dto.traffic.SampleData;
import com.akto.dto.type.SingleTypeInfo;
import com.akto.dto.type.URLMethods;
import com.akto.dto.upload.FileUploadError;
import com.akto.listener.KafkaListener;
import com.akto.listener.RuntimeListener;
Expand All @@ -29,14 +34,24 @@
import com.akto.parsers.HttpCallParser;
import com.akto.runtime.APICatalogSync;
import com.akto.testing.ApiExecutor;
import com.akto.util.Constants;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.google.gson.Gson;
import com.mongodb.BasicDBObject;
import com.mongodb.client.model.BulkWriteOptions;
import com.mongodb.client.model.DeleteManyModel;
import com.mongodb.client.model.Filters;
import com.mongodb.client.model.UpdateManyModel;
import com.mongodb.client.model.UpdateOneModel;
import com.mongodb.client.model.UpdateOptions;
import com.mongodb.client.model.Updates;
import com.mongodb.client.model.WriteModel;

import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.bson.conversions.Bson;

import java.net.MalformedURLException;
import java.net.URISyntaxException;
Expand All @@ -52,6 +67,7 @@ public class Utils {

private static final LoggerMaker loggerMaker = new LoggerMaker(Utils.class);
private final static ObjectMapper mapper = new ObjectMapper();
private static String id = "_id.";

public static Map<String, String> getAuthMap(JsonNode auth, Map<String, String> variableMap) {
Map<String,String> result = new HashMap<>();
Expand Down Expand Up @@ -600,8 +616,6 @@ public static float getRiskScoreValueFromSeverityScore(float severityScore){

public static void deleteApis(List<Key> toBeDeleted) {

String id = "_id.";

AccountsContextDaoWithRbac.deleteApisPerDao(toBeDeleted, SingleTypeInfoDao.instance, "");
AccountsContextDaoWithRbac.deleteApisPerDao(toBeDeleted, ApiInfoDao.instance, id);
AccountsContextDaoWithRbac.deleteApisPerDao(toBeDeleted, SampleDataDao.instance, id);
Expand All @@ -612,6 +626,161 @@ public static void deleteApis(List<Key> toBeDeleted) {

}

private static Key copy(Key oldKey) {
Key newKey = new Key();
newKey.setApiCollectionId(oldKey.getApiCollectionId());
newKey.setUrl(oldKey.getUrl());
newKey.setMethod(oldKey.getMethod());
newKey.setResponseCode(oldKey.getResponseCode());
newKey.setBucketStartEpoch(oldKey.getBucketStartEpoch());
newKey.setBucketEndEpoch(oldKey.getBucketEndEpoch());
return newKey;
}

private static List<Integer> getModifiedCollectionIds(List<Integer> oldList, int newCollId, int oldCollId){
oldList.removeIf(num -> num == oldCollId);
oldList.add(newCollId);
return oldList;
}

public static void moveApisFromSampleData(Map<Key,Integer> sampleDataToBeMovedMap, Set<String> alreadySeenApis){

Map<ApiInfoKey, Key> mapApiInfoKeyToKey = new HashMap<>();
List<Key> toBeDeleted = new ArrayList<>();
// insert new sample data
Bson sampleDataFilter = AccountsContextDaoWithRbac.generateCommonFilter(sampleDataToBeMovedMap, SampleDataDao.instance, id);
List<SampleData> sampleDataList = SampleDataDao.instance.findAll(sampleDataFilter);
Iterator<SampleData> sampleDataIterator = sampleDataList.iterator();
UpdateOptions upsertOptions = new UpdateOptions().upsert(true);

ArrayList<WriteModel<SampleData>> bulkUpdatesForSampleData = new ArrayList<>();

while(sampleDataIterator.hasNext()){
SampleData sampleData = sampleDataIterator.next();
Key key = sampleData.getId();
Key oldKey = copy(key);
int newCollId = sampleDataToBeMovedMap.getOrDefault(key, key.getApiCollectionId());
ApiInfoKey apiInfoKey = new ApiInfoKey(key.getApiCollectionId(), key.getUrl(), key.getMethod());
mapApiInfoKeyToKey.put(apiInfoKey, oldKey);
if(key.getApiCollectionId() != newCollId){
key.setApiCollectionId(newCollId);
sampleData.setId(key);
sampleData.setCollectionIds(getModifiedCollectionIds(sampleData.getCollectionIds(), newCollId, oldKey.getApiCollectionId()));
Bson update = SampleDataDao.instance.getUpdateFromSampleData(sampleData);
bulkUpdatesForSampleData.add(
new UpdateOneModel<>(Filters.eq(Constants.ID, sampleData.getId()), update, upsertOptions)
);
toBeDeleted.add(oldKey);
}else{
sampleDataIterator.remove();
}
}

if(!bulkUpdatesForSampleData.isEmpty()){
SampleDataDao.instance.getMCollection().bulkWrite(bulkUpdatesForSampleData);
loggerMaker.infoAndAddToDb("Inserted " + sampleDataList.size() + " new sample data into database.");
AccountsContextDaoWithRbac.deleteApisPerDao(toBeDeleted, SampleDataDao.instance, id);
}
toBeDeleted.clear();

ArrayList<WriteModel<ApiInfo>> bulkUpdatesForApiInfo = new ArrayList<>();
// insert new api info data
Bson apiInfoFilter = AccountsContextDaoWithRbac.generateCommonFilter(sampleDataToBeMovedMap, ApiInfoDao.instance, id);
List<ApiInfo> apiInfos = ApiInfoDao.instance.findAll(apiInfoFilter);
Iterator<ApiInfo> apiInfoIterator = apiInfos.iterator();
while(apiInfoIterator.hasNext()){
ApiInfo apiInfo = apiInfoIterator.next();
ApiInfoKey apiInfoKey = apiInfo.getId();
Key mappedKey = mapApiInfoKeyToKey.get(apiInfoKey);
int newCollId = sampleDataToBeMovedMap.getOrDefault(mappedKey, mappedKey.getApiCollectionId());
if(mappedKey.getApiCollectionId() != newCollId){
apiInfoKey.setApiCollectionId(newCollId);
apiInfo.setId(apiInfoKey);
apiInfo.setCollectionIds(getModifiedCollectionIds(apiInfo.getCollectionIds(), newCollId, mappedKey.getApiCollectionId()));
toBeDeleted.add(mappedKey);
Bson update = ApiInfoDao.instance.getUpdateFromApiInfo(apiInfo);
bulkUpdatesForSampleData.add(
new UpdateOneModel<>(Filters.eq(Constants.ID, apiInfoKey), update, upsertOptions)
);

}else{
apiInfoIterator.remove();
}
}

if(!bulkUpdatesForApiInfo.isEmpty()){
ApiInfoDao.instance.getMCollection().bulkWrite(bulkUpdatesForApiInfo);
loggerMaker.infoAndAddToDb("Inserted " + apiInfos.size() + " new api infos into database.");
AccountsContextDaoWithRbac.deleteApisPerDao(toBeDeleted, ApiInfoDao.instance, id);
}
toBeDeleted.clear();

ArrayList<WriteModel<SensitiveSampleData>> bulkUpdatesForSensitiveSampleData = new ArrayList<>();
// insert new sensitive sample data
Bson sensitiveDataFilter = AccountsContextDaoWithRbac.generateCommonFilter(sampleDataToBeMovedMap, SensitiveSampleDataDao.instance, id);
List<SensitiveSampleData> sensitiveSampleDataList = SensitiveSampleDataDao.instance.findAll(sensitiveDataFilter);
Iterator<SensitiveSampleData> sensitiveSampleDataIterator = sensitiveSampleDataList.iterator();
while(sensitiveSampleDataIterator.hasNext()){
SensitiveSampleData sampleData = sensitiveSampleDataIterator.next();
SingleTypeInfo.ParamId key = sampleData.getId();
ApiInfoKey apiInfoKey = new ApiInfoKey(key.getApiCollectionId(), key.getUrl(), URLMethods.Method.valueOf(key.getMethod()));
Key mappedKey = mapApiInfoKeyToKey.get(apiInfoKey);
int newCollId = sampleDataToBeMovedMap.getOrDefault(mappedKey, mappedKey.getApiCollectionId());

if(key.getApiCollectionId() != newCollId){
key.setApiCollectionId(newCollId);
sampleData.setId(key);
sampleData.setCollectionIds(getModifiedCollectionIds(sampleData.getCollectionIds(), newCollId, mappedKey.getApiCollectionId()));
Bson update = SensitiveSampleDataDao.instance.getUpdateFromSampleData(sampleData);
bulkUpdatesForSampleData.add(
new UpdateOneModel<>(Filters.eq(Constants.ID, sampleData.getId()), update, upsertOptions)
);
toBeDeleted.add(mappedKey);
}else{
sensitiveSampleDataIterator.remove();
}
}
if(!bulkUpdatesForSensitiveSampleData.isEmpty()){
SensitiveSampleDataDao.instance.getMCollection().bulkWrite(bulkUpdatesForSensitiveSampleData);
loggerMaker.infoAndAddToDb("Inserted " + sensitiveSampleDataList.size() + " new sensitive sample data into database.");
AccountsContextDaoWithRbac.deleteApisPerDao(toBeDeleted, SensitiveSampleDataDao.instance, id);
}
toBeDeleted.clear();

// update single type info data
ArrayList<WriteModel<SingleTypeInfo>> bulkUpdatesForSti = new ArrayList<>();
ArrayList<WriteModel<SingleTypeInfo>> deleteListForStis = new ArrayList<>();
for(Key key: sampleDataToBeMovedMap.keySet()){
String currentApiInfoKey = key.getUrl() + "?#?" + key.getMethod();
Bson filterQ = Filters.and(
Filters.eq(SingleTypeInfo._API_COLLECTION_ID, key.getApiCollectionId()),
Filters.eq(SingleTypeInfo._URL, key.getUrl()),
Filters.eq(SingleTypeInfo._METHOD, key.getMethod())
);
if(alreadySeenApis.contains(currentApiInfoKey)){
bulkUpdatesForSti.add(new UpdateManyModel<>(filterQ,
Updates.combine(
Updates.set(SingleTypeInfo._API_COLLECTION_ID, sampleDataToBeMovedMap.get(key)),
Updates.addToSet(SingleTypeInfo._COLLECTION_IDS, sampleDataToBeMovedMap.get(key))
)));
alreadySeenApis.remove(currentApiInfoKey);
}else{
deleteListForStis.add(new DeleteManyModel<>(filterQ));
}
}

if(!bulkUpdatesForSti.isEmpty()){
loggerMaker.infoAndAddToDb("Updated " + bulkUpdatesForSti.size() + " stis into database.");
SingleTypeInfoDao.instance.getMCollection().bulkWrite(bulkUpdatesForSti, new BulkWriteOptions().ordered(false));
}

if(!deleteListForStis.isEmpty()){
loggerMaker.infoAndAddToDb("Deleting " + deleteListForStis.size() + " stis into database.");
SingleTypeInfoDao.instance.getMCollection().bulkWrite(deleteListForStis, new BulkWriteOptions().ordered(false));
}

}

public static List<String> getUniqueValuesOfList(List<String> input){
if(input == null || input.isEmpty()){
return new ArrayList<>();
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,5 @@
package com.akto.utils.jobs;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
Expand All @@ -19,12 +16,12 @@
import java.util.regex.Pattern;
import java.util.stream.Collectors;

import com.akto.dto.CodeAnalysisRepo;
import com.mongodb.client.model.Updates;
import org.bson.conversions.Bson;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.akto.dao.ApiCollectionsDao;
import com.akto.dao.ApiInfoDao;
import com.akto.dao.SampleDataDao;
import com.akto.dao.SensitiveSampleDataDao;
Expand Down Expand Up @@ -52,9 +49,11 @@
import com.akto.util.Pair;
import com.mongodb.BasicDBObject;
import com.mongodb.client.model.Filters;
import com.mongodb.client.model.FindOneAndUpdateOptions;
import com.mongodb.client.model.Sorts;

import static com.akto.utils.Utils.deleteApis;
import static com.akto.utils.Utils.moveApisFromSampleData;
import static com.akto.runtime.utils.Utils.createRegexPatternFromList;

public class CleanInventory {
Expand Down Expand Up @@ -139,10 +138,22 @@ public static void cleanFilteredSampleDataFromAdvancedFilters(List<ApiCollection
int limit = 100;
Bson sort = Sorts.ascending("_id.apiCollectionId", "_id.url", "_id.method");
Map<Integer,Integer> collectionWiseDeletionCountMap = new HashMap<>();
Map<Integer,Integer> moveHostCollectionWiseCountMap = new HashMap<>();

FindOneAndUpdateOptions updateOptions = new FindOneAndUpdateOptions();
updateOptions.upsert(true);

Bson updatesForCollection = Updates.combine(
Updates.setOnInsert("startTs", Context.now()),
Updates.setOnInsert("urls", new HashSet<>())
);

Set<String> alreadySeenApis = new HashSet<>();

Map<String,FilterConfig> filterMap = FilterYamlTemplateDao.instance.fetchFilterConfig(false, yamlTemplates, true);
Pattern pattern = createRegexPatternFromList(redundantUrlList);
do {
Map<Key, Integer> sampleDataToBeMovedIntoCollection = new HashMap<>();
sampleDataList = SampleDataDao.instance.findAll(filters, skip, limit, sort);
skip += limit;
List<Key> toBeDeleted = new ArrayList<>();
Expand All @@ -169,6 +180,7 @@ public static void cleanFilteredSampleDataFromAdvancedFilters(List<ApiCollection
boolean isRedundant = false;
boolean isNetsparkerPresent = false;
boolean movingApi = false;

for (String sample : samples) {
HttpResponseParams httpResponseParams = HttpCallParser.parseKafkaMessage(sample);
isNetsparkerPresent |= sample.toLowerCase().contains("netsparker");
Expand All @@ -186,14 +198,29 @@ public static void cleanFilteredSampleDataFromAdvancedFilters(List<ApiCollection
// filter passed and modified
movingApi = true;
remainingSamples.add(sample);

// check modification of hostname only
List<String> currentHostArr = param.getRequestParams().getHeaders().get("host");
String hostStringFromParam = "";
if(currentHostArr != null && !currentHostArr.isEmpty()){
hostStringFromParam = currentHostArr.get(0);
}
boolean hostNameModified = JobUtils.hasHostModified(hostStringFromParam, apiCollection);
if(hostNameModified){
int collectionId = hostStringFromParam.hashCode();
// creating new ApiCollection from this id
Bson currentUpdate = Updates.combine(updatesForCollection, Updates.setOnInsert("_id", collectionId));
ApiCollectionsDao.instance.getMCollection().findOneAndUpdate(Filters.eq(ApiCollection.HOST_NAME, hostStringFromParam), currentUpdate, updateOptions);
sampleDataToBeMovedIntoCollection.put(sampleData.getId(), collectionId);
}
break;
}else if(filterType.equals(FILTER_TYPE.ALLOWED)){
// filter passed and not modified
remainingSamples.add(sample);
}else if(filterMap.size() == 1){
// filter failed and id was default_delete
String key = filterMap.entrySet().iterator().next().getKey();
if(key.equals("DEFAULT_BLOCK_FILTER")){
if(!key.equals("DEFAULT_ALLOW_FILTER")){
remainingSamples.add(sample);
}
}
Expand All @@ -205,6 +232,10 @@ public static void cleanFilteredSampleDataFromAdvancedFilters(List<ApiCollection
if(movingApi){
// any 1 of the sample is modifiable, we print this block
toMove.add(sampleData.getId());
int initialCount = moveHostCollectionWiseCountMap.getOrDefault(sampleData.getId().getApiCollectionId(), 0);
moveHostCollectionWiseCountMap.put(sampleData.getId().getApiCollectionId(),initialCount + 1);
String apiInfoId = sampleData.getId().getUrl() + "?#?" + sampleData.getId().getMethod();
alreadySeenApis.add(apiInfoId);
if(saveLogsToDB){
loggerMaker.infoAndAddToDb("Filter passed, modify sample data of API: " + sampleData.getId(), LogDb.DASHBOARD);
}else{
Expand Down Expand Up @@ -259,7 +290,24 @@ public static void cleanFilteredSampleDataFromAdvancedFilters(List<ApiCollection
deleteApis(toBeDeleted);
}

// String shouldMove = System.getenv("MOVE_REDUNDANT_APIS");
boolean shouldMove = false;

String commaSeparatedAllowedAccounts = System.getenv("ALLOWED_ACCOUNTS_WITH_MOVE_ACCESS");
if(commaSeparatedAllowedAccounts != null){
String[] accountStringList = commaSeparatedAllowedAccounts.split(",");
Set<String> accounts = new HashSet<>(Arrays.asList(accountStringList));
String currentAccount = String.valueOf(Context.accountId.get());
shouldMove = accounts.contains(currentAccount);
}else if(System.getenv("MOVE_REDUNDANT_APIS") != null && System.getenv("MOVE_REDUNDANT_APIS").equals("true")){
shouldMove =true;
}
if(shouldMove){
if(!sampleDataToBeMovedIntoCollection.isEmpty()){
moveApisFromSampleData(sampleDataToBeMovedIntoCollection, alreadySeenApis);
sampleDataToBeMovedIntoCollection.clear();
}

}

} while (!sampleDataList.isEmpty());

Expand All @@ -273,6 +321,16 @@ public static void cleanFilteredSampleDataFromAdvancedFilters(List<ApiCollection
}
}

for(Map.Entry<Integer,Integer> iterator: moveHostCollectionWiseCountMap.entrySet()){
int collId = iterator.getKey();
int deletionCount = iterator.getValue();
String name = apiCollectionMap.get(collId).getDisplayName();

if(saveLogsToDB){
loggerMaker.infoAndAddToDb("Total apis moved from collection: " + name + " are: " + deletionCount, LogDb.DASHBOARD);
}
}

// writer.flush();
// writer.close();
}
Expand Down
Loading
Loading