Skip to content

Commit

Permalink
Merge pull request #1014 from akto-api-security/feature/dp_in_testing
Browse files Browse the repository at this point in the history
Feature/dp in testing
  • Loading branch information
avneesh-akto authored Apr 16, 2024
2 parents a3f89c6 + 6b76963 commit 6c9af33
Show file tree
Hide file tree
Showing 24 changed files with 440 additions and 104 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,10 @@ public void analyse(String message, int finalApiCollectionId) {
SingleTypeInfo.SuperType superType = urlTemplate.getTypes()[i];
if (superType == null) continue;
int idx = ogUrl.startsWith("http") ? i:i+1;
String s = ogUrlSplit[idx]; // because ogUrl=/api/books/123 while template url=api/books/INTEGER
Object s = ogUrlSplit[idx]; // because ogUrl=/api/books/123 while template url=api/books/INTEGER
if (superType.equals(SingleTypeInfo.SuperType.INTEGER)) {
s = Integer.parseInt(ogUrlSplit[idx]);
}
Set<Object> val = new HashSet<>();
val.add(s);
processRequestParam(i+"", val, combinedUrl, true, false);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ public void testAnalyse1() {
TreeHelper treeHelper = new TreeHelper();
treeHelper.buildTree("1000", "/api/m7", "POST");
Map<Integer, Node> result = treeHelper.result;
assertEquals(2, result.size()); // this is because /api/m6 gets best value from /api/m1
assertEquals(3, result.size()); // this is because /api/m6 gets best value from /api/m1

Map<String, Connection> connections = result.get(Objects.hash("1000", "/api/m7", "POST")).getConnections();
assertEquals(1, connections.size());
Expand Down Expand Up @@ -161,7 +161,7 @@ public void testAnalyse3() {
TreeHelper treeHelper = new TreeHelper();
treeHelper.buildTree("1000", "/api/m7", "POST");
Map<Integer, Node> result = treeHelper.result;
assertEquals(6, result.size()); // this is because /api/m6 has 2 parameters getting data
assertEquals(7, result.size()); // this is because /api/m6 has 2 parameters getting data

Map<String, Connection> connections = result.get(Objects.hash("1000", "/api/m7", "POST")).getConnections();
assertEquals(1, connections.size());
Expand Down Expand Up @@ -197,7 +197,7 @@ public void testAnalyse4() {
TreeHelper treeHelper = new TreeHelper();
treeHelper.buildTree("1000", "api/cars/INTEGER", "POST");
Map<Integer, Node> result = treeHelper.result;
assertEquals(1, result.size()); // this is because /api/m6 has 2 parameters getting data
assertEquals(2, result.size()); // this is because /api/m6 has 2 parameters getting data

Map<String, Connection> connections = result.get(Objects.hash("1000", "api/cars/INTEGER", "POST")).getConnections();
assertEquals(1, connections.size());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
import com.akto.dto.type.URLMethods.Method;
import com.akto.log.LoggerMaker;
import com.akto.runtime.RelationshipSync;
import com.akto.utils.Build;
import com.akto.test_editor.execution.Build;
import com.akto.utils.Utils;
import com.mongodb.BasicDBObject;
import com.mongodb.ConnectionString;
Expand Down
22 changes: 11 additions & 11 deletions apps/dashboard/src/main/java/com/akto/action/HarAction.java
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ public String execute() throws IOException {
return ERROR.toUpperCase();
}
} else {
Collection<String> actionErrors = apiCollectionsAction.getActionErrors();
Collection<String> actionErrors = apiCollectionsAction.getActionErrors();
if (actionErrors != null && actionErrors.size() > 0) {
for (String actionError: actionErrors) {
addActionError(actionError);
Expand Down Expand Up @@ -178,7 +178,7 @@ public void setTcpContent(byte[] tcpContent) {
Awesome awesome = null;

public String uploadTcp() {

File tmpDir = FileUtils.getTempDirectory();
String filename = UUID.randomUUID().toString() + ".pcap";
File tcpDump = new File(tmpDir, filename);
Expand All @@ -188,23 +188,23 @@ public String uploadTcp() {
Awesome.GoString.ByValue str = new Awesome.GoString.ByValue();
str.p = tcpDump.getAbsolutePath();
str.n = str.p.length();

Awesome.GoString.ByValue str2 = new Awesome.GoString.ByValue();
str2.p = System.getenv("AKTO_KAFKA_BROKER_URL");
str2.n = str2.p.length();

awesome.readTcpDumpFile(str, str2 , apiCollectionId);
return Action.SUCCESS.toUpperCase();

return Action.SUCCESS.toUpperCase();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return Action.ERROR.toUpperCase();
return Action.ERROR.toUpperCase();
}

}

interface Awesome extends Library {
interface Awesome extends Library {
public static class GoString extends Structure {
/** C type : const char* */
public String p;
Expand All @@ -224,8 +224,8 @@ public GoString(String p, long n) {
public static class ByReference extends GoString implements Structure.ByReference {}
public static class ByValue extends GoString implements Structure.ByValue {}
}

public void readTcpDumpFile(GoString.ByValue filepath, GoString.ByValue kafkaURL, long apiCollectionId);

}
}
}
2 changes: 1 addition & 1 deletion apps/dashboard/src/main/java/com/akto/utils/Utils.java
Original file line number Diff line number Diff line change
Expand Up @@ -470,7 +470,7 @@ public static void pushDataToKafka(int apiCollectionId, String topic, List<Strin
info = new AccountHTTPCallParserAktoPolicyInfo();
HttpCallParser callParser = new HttpCallParser("userIdentifier", 1, 1, 1, false);
info.setHttpCallParser(callParser);
// info.setResourceAnalyser(new ResourceAnalyser(300_000, 0.01, 100_000, 0.01));
// info.setResourceAnalyser(new ResourceAnalyser(300_000, 0.01, 100_000, 0.01));
RuntimeListener.accountHTTPParserMap.put(accountId, info);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,9 @@ function formatRawNodeData(nodes, currentApiCollectionId, currentEndpoint, curre
let connections = node["connections"]
let edgesMap = new Map()
Object.values(connections).forEach(connection => {
let edge = connection["edges"][0] // todo: null check
let edge = connection["edges"][0]

if (!edge) return

let source = calculateNodeId(edge["apiCollectionId"], edge["url"], edge["method"]);
let edgeId = source + "-" + id;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,30 +1,26 @@
package com.akto.utils;
package com.akto.test_editor.execution;

import com.akto.DaoInit;
import com.akto.dao.DependencyFlowNodesDao;
import com.akto.dao.ModifyHostDetailsDao;
import com.akto.dao.ReplaceDetailsDao;
import com.akto.dao.SampleDataDao;
import com.akto.dao.context.Context;
import com.akto.dto.*;
import com.akto.dto.ApiInfo;
import com.akto.dto.OriginalHttpRequest;
import com.akto.dto.OriginalHttpResponse;
import com.akto.dto.RawApi;
import com.akto.dto.dependency_flow.*;
import com.akto.dto.testing.TestingRunConfig;
import com.akto.dto.traffic.Key;
import com.akto.dto.traffic.SampleData;
import com.akto.dto.type.URLMethods;
import com.akto.log.LoggerMaker;
import com.akto.parsers.HttpCallParser;
import com.akto.runtime.policies.AuthPolicy;
import com.akto.test_editor.execution.Operations;
import com.akto.testing.ApiExecutor;
import com.akto.util.Constants;
import com.akto.util.HttpRequestResponseUtils;
import com.akto.util.JSONUtils;

import com.akto.util.modifier.SetValueModifier;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.mongodb.ConnectionString;
import com.mongodb.client.model.Filters;
import joptsimple.internal.Strings;
import org.bson.conversions.Bson;
Expand All @@ -37,12 +33,11 @@

public class Build {

private Map<Integer, ReverseNode> parentToChildMap = new HashMap<>();
private Map<Integer, ReverseNode> parentToChildMap = new HashMap<>();

private static final LoggerMaker loggerMaker = new LoggerMaker(Build.class);

private void buildParentToChildMap(List<Node> nodes) {
parentToChildMap = new HashMap<>();
public static void buildParentToChildMap(Collection<Node> nodes, Map<Integer, ReverseNode> parentToChildMap) {
for (Node node: nodes) {
if (node.getConnections() == null) continue;
for (Connection connection: node.getConnections().values()) {
Expand All @@ -69,7 +64,7 @@ private void buildParentToChildMap(List<Node> nodes) {

}

private Map<Integer, List<SampleData>> buildLevelsToSampleDataMap(List<Node> nodes) {
public static Map<Integer, List<SampleData>> buildLevelsToSampleDataMap(List<Node> nodes) {

// divide them into levels
Map<Integer,List<SampleData>> levelsToSampleDataMap = new HashMap<>();
Expand Down Expand Up @@ -158,11 +153,11 @@ public void setIsSuccess(boolean success) {
this.success = success;
}


}

Set<ApiInfo.ApiInfoKey> apisReplayedSet = new HashSet<>();
public List<RunResult> runPerLevel(List<SampleData> sdList, Map<String, ModifyHostDetail> modifyHostDetailMap, Map<Integer, ReplaceDetail> replaceDetailsMap) {
public static List<RunResult> runPerLevel(List<SampleData> sdList, Map<String, ModifyHostDetail> modifyHostDetailMap, Map<Integer, ReplaceDetail> replaceDetailsMap, Map<Integer, ReverseNode> parentToChildMap, Set<ApiInfo.ApiInfoKey> apisReplayedSet) {
List<RunResult> runResults = new ArrayList<>();
for (SampleData sampleData: sdList) {
Key id = sampleData.getId();
Expand Down Expand Up @@ -249,7 +244,7 @@ public List<RunResult> run(List<Integer> apiCollectionsIds, List<ModifyHostDetai


List<Node> nodes = DependencyFlowNodesDao.instance.findNodesForCollectionIds(apiCollectionsIds,false,0, 10_000);
buildParentToChildMap(nodes);
buildParentToChildMap(nodes, parentToChildMap);
Map<Integer, List<SampleData>> levelsToSampleDataMap = buildLevelsToSampleDataMap(nodes);

List<RunResult> runResults = new ArrayList<>();
Expand All @@ -261,7 +256,7 @@ public List<RunResult> run(List<Integer> apiCollectionsIds, List<ModifyHostDetai

loggerMaker.infoAndAddToDb("Running level: " + level, LoggerMaker.LogDb.DASHBOARD);
try {
List<RunResult> runResultsPerLevel = runPerLevel(sdList, modifyHostDetailMap, replaceDetailsMap);
List<RunResult> runResultsPerLevel = runPerLevel(sdList, modifyHostDetailMap, replaceDetailsMap, parentToChildMap, apisReplayedSet);
runResults.addAll(runResultsPerLevel);
loggerMaker.infoAndAddToDb("Finished running level " + level, LoggerMaker.LogDb.DASHBOARD);
} catch (Exception e) {
Expand All @@ -282,7 +277,7 @@ public List<RunResult> run(List<Integer> apiCollectionsIds, List<ModifyHostDetai
if (apisReplayedSet.contains(new ApiInfo.ApiInfoKey(key.getApiCollectionId(), key.getUrl(), key.getMethod()))) continue;
filtered.add(sampleData);
}
List<RunResult> runResultsAll = runPerLevel(filtered, modifyHostDetailMap, replaceDetailsMap);
List<RunResult> runResultsAll = runPerLevel(filtered, modifyHostDetailMap, replaceDetailsMap, parentToChildMap, apisReplayedSet);
runResults.addAll(runResultsAll);
skip += limit;
if (all.size() < limit) break;
Expand Down Expand Up @@ -340,7 +335,7 @@ public static void modifyRequest(OriginalHttpRequest request, ReplaceDetail repl

}

public List<SampleData> fillSdList(List<SampleData> sdList) {
public static List<SampleData> fillSdList(List<SampleData> sdList) {
if (sdList == null || sdList.isEmpty()) return new ArrayList<>();

List<Bson> filters = new ArrayList<>();
Expand All @@ -359,30 +354,12 @@ public List<SampleData> fillSdList(List<SampleData> sdList) {

static ObjectMapper mapper = new ObjectMapper();
static JsonFactory factory = mapper.getFactory();
public void fillReplaceDetailsMap(ReverseNode reverseNode, OriginalHttpResponse response, Map<Integer, ReplaceDetail> replaceDetailsMap) {
public static void fillReplaceDetailsMap(ReverseNode reverseNode, OriginalHttpResponse response, Map<Integer, ReplaceDetail> replaceDetailsMap) {
if (reverseNode == null) return;

Map<Integer, ReplaceDetail> deltaReplaceDetailsMap = new HashMap<>();

String respPayload = response.getBody();
Map<String, Set<Object>> valuesMap = extractValuesFromPayload(respPayload);

Map<String, List<String>> responseHeaders = response.getHeaders();
for (String headerKey: responseHeaders.keySet()) {
List<String> values = responseHeaders.get(headerKey);
if (values == null) continue;

if (headerKey.equalsIgnoreCase("set-cookie")) {
Map<String, String> cookieMap = AuthPolicy.parseCookie(values);
for (String cookieKey : cookieMap.keySet()) {
String cookieVal = cookieMap.get(cookieKey);
valuesMap.put(cookieKey, new HashSet<>(Collections.singletonList(cookieVal)));
}
} else {
valuesMap.put(headerKey, new HashSet<>(values));
}

}
Map<String, Set<Object>> valuesMap = getValuesMap(response);

Map<String,ReverseConnection> connections = reverseNode.getReverseConnections();
for (ReverseConnection reverseConnection: connections.values()) {
Expand Down Expand Up @@ -421,4 +398,27 @@ public void fillReplaceDetailsMap(ReverseNode reverseNode, OriginalHttpResponse

}

public static Map<String, Set<Object>> getValuesMap(OriginalHttpResponse response) {
String respPayload = response.getBody();
Map<String, Set<Object>> valuesMap = extractValuesFromPayload(respPayload);

Map<String, List<String>> responseHeaders = response.getHeaders();
for (String headerKey: responseHeaders.keySet()) {
List<String> values = responseHeaders.get(headerKey);
if (values == null) continue;

if (headerKey.equalsIgnoreCase("set-cookie")) {
Map<String, String> cookieMap = AuthPolicy.parseCookie(values);
for (String cookieKey : cookieMap.keySet()) {
String cookieVal = cookieMap.get(cookieKey);
valuesMap.put(cookieKey, new HashSet<>(Collections.singletonList(cookieVal)));
}
} else {
valuesMap.put(headerKey, new HashSet<>(values));
}

}

return valuesMap;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,9 @@ public class Executor {
public final String _HOST = "host";

public YamlTestResult execute(ExecutorNode node, RawApi rawApi, Map<String, Object> varMap, String logId,
AuthMechanism authMechanism, FilterNode validatorNode, ApiInfo.ApiInfoKey apiInfoKey, TestingRunConfig testingRunConfig, List<CustomAuthType> customAuthTypes, boolean debug, List<TestingRunResult.TestLog> testLogs) {
AuthMechanism authMechanism, FilterNode validatorNode, ApiInfo.ApiInfoKey apiInfoKey, TestingRunConfig testingRunConfig,
List<CustomAuthType> customAuthTypes, boolean debug, List<TestingRunResult.TestLog> testLogs,
Memory memory) {
List<GenericTestResult> result = new ArrayList<>();

ExecutionListBuilder executionListBuilder = new ExecutionListBuilder();
Expand Down Expand Up @@ -143,9 +145,14 @@ public YamlTestResult execute(ExecutorNode node, RawApi rawApi, Map<String, Obje
boolean requestSent = false;

String executionType = node.getChildNodes().get(0).getValues().toString();
if (executionType.equals("multiple")) {
workflowTest = buildWorkflowGraph(reqNodes, sampleRawApi, authMechanism, customAuthTypes, apiInfoKey, varMap, validatorNode);
result.add(triggerMultiExecution(workflowTest, reqNodes, rawApi, authMechanism, customAuthTypes, apiInfoKey, varMap, validatorNode, debug, testLogs));
if (executionType.equals("multiple") || executionType.equals("graph")) {
if (executionType.equals("graph")) {
List<ApiInfo.ApiInfoKey> apiInfoKeys = new ArrayList<>();
apiInfoKeys.add(apiInfoKey);
memory = new Memory(apiInfoKeys, new HashMap<>());
}
workflowTest = buildWorkflowGraph(reqNodes, rawApi, authMechanism, customAuthTypes, apiInfoKey, varMap, validatorNode);
result.add(triggerMultiExecution(workflowTest, reqNodes, rawApi, authMechanism, customAuthTypes, apiInfoKey, varMap, validatorNode, debug, testLogs, memory));
yamlTestResult = new YamlTestResult(result, workflowTest);

return yamlTestResult;
Expand Down Expand Up @@ -257,7 +264,7 @@ public WorkflowTest buildWorkflowGraph(ExecutorNode reqNodes, RawApi rawApi, Aut
}

public MultiExecTestResult triggerMultiExecution(WorkflowTest workflowTest, ExecutorNode reqNodes, RawApi rawApi, AuthMechanism authMechanism,
List<CustomAuthType> customAuthTypes, ApiInfo.ApiInfoKey apiInfoKey, Map<String, Object> varMap, FilterNode validatorNode, boolean debug, List<TestingRunResult.TestLog> testLogs) {
List<CustomAuthType> customAuthTypes, ApiInfo.ApiInfoKey apiInfoKey, Map<String, Object> varMap, FilterNode validatorNode, boolean debug, List<TestingRunResult.TestLog> testLogs, Memory memory) {

ApiWorkflowExecutor apiWorkflowExecutor = new ApiWorkflowExecutor();
Graph graph = new Graph();
Expand All @@ -266,7 +273,7 @@ public MultiExecTestResult triggerMultiExecution(WorkflowTest workflowTest, Exec
List<String> executionOrder = new ArrayList<>();
WorkflowTestResult workflowTestResult = new WorkflowTestResult(id, workflowTest.getId(), new HashMap<>(), null, null);
GraphExecutorRequest graphExecutorRequest = new GraphExecutorRequest(graph, graph.getNode("x1"), workflowTest, null, null, varMap, "conditional", workflowTestResult, new HashMap<>(), executionOrder);
GraphExecutorResult graphExecutorResult = apiWorkflowExecutor.init(graphExecutorRequest, debug, testLogs);
GraphExecutorResult graphExecutorResult = apiWorkflowExecutor.init(graphExecutorRequest, debug, testLogs, memory);
return new MultiExecTestResult(graphExecutorResult.getWorkflowTestResult().getNodeResultMap(), graphExecutorResult.getVulnerable(), Confidence.HIGH, graphExecutorRequest.getExecutionOrder());
}

Expand Down
Loading

0 comments on commit 6c9af33

Please sign in to comment.