diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index ff95a5d2bb..e3e2291010 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -51,9 +51,6 @@ jobs: aws-secret-access-key: ${{secrets.AWS_SECRET_ACCESS_KEY}} aws-region: ap-south-1 - - name: Deploy polaris site to S3 bucket - run: aws s3 sync ./apps/dashboard/web/polaris_web/web/dist s3://dashboard-on-cdn/polaris_web/${{steps.docker_tag.outputs.IMAGE_TAG}}/dist --delete - - run: mvn package -Dakto-image-tag=${{ github.event.inputs.Tag }} -Dakto-build-time=$(eval "date +%s") -Dakto-release-version=${{steps.docker_tag.outputs.IMAGE_TAG}} - name: DockerHub login env: @@ -74,22 +71,10 @@ jobs: echo $IMAGE_TAG >> $GITHUB_STEP_SUMMARY docker buildx create --use # Build a docker container and push it to DockerHub - cd apps/dashboard - docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/$ECR_REPOSITORY-dashboard:$IMAGE_TAG $IMAGE_TAG_DASHBOARD . --push - cd ../testing - docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/akto-api-testing:$IMAGE_TAG $IMAGE_TAG_TESTING . --push - cd ../testing-cli - docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/akto-api-testing-cli:$IMAGE_TAG $IMAGE_TAG_TESTING_CLI . --push - cd ../billing - docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/akto-billing:$IMAGE_TAG . --push - cd ../internal - docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/akto-internal:$IMAGE_TAG . --push - cd ../threat-detection + cd apps/threat-detection docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/akto-threat-detection:$IMAGE_TAG . --push cd ../threat-detection-backend docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/akto-threat-detection-backend:$IMAGE_TAG . --push - cd ../source-code-analyser - docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/source-code-analyser:$IMAGE_TAG . --push - name: Set up JDK 11 uses: actions/setup-java@v1 diff --git a/apps/threat-detection/src/main/java/com/akto/threat/detection/Main.java b/apps/threat-detection/src/main/java/com/akto/threat/detection/Main.java index 1d1b061d83..f290ed4442 100644 --- a/apps/threat-detection/src/main/java/com/akto/threat/detection/Main.java +++ b/apps/threat-detection/src/main/java/com/akto/threat/detection/Main.java @@ -56,9 +56,9 @@ public static void main(String[] args) throws Exception { new FlushSampleDataTask( sessionFactory, internalKafka, KafkaTopic.ThreatDetection.MALICIOUS_EVENTS) .run(); - new SendMaliciousEventsToBackend( - sessionFactory, internalKafka, KafkaTopic.ThreatDetection.ALERTS) - .run(); + // new SendMaliciousEventsToBackend( + // sessionFactory, internalKafka, KafkaTopic.ThreatDetection.ALERTS) + // .run(); new CleanupTask(sessionFactory).run(); } diff --git a/apps/threat-detection/src/main/java/com/akto/threat/detection/tasks/MaliciousTrafficDetectorTask.java b/apps/threat-detection/src/main/java/com/akto/threat/detection/tasks/MaliciousTrafficDetectorTask.java index bc652e6714..2df92f0ba8 100644 --- a/apps/threat-detection/src/main/java/com/akto/threat/detection/tasks/MaliciousTrafficDetectorTask.java +++ b/apps/threat-detection/src/main/java/com/akto/threat/detection/tasks/MaliciousTrafficDetectorTask.java @@ -159,7 +159,7 @@ private boolean validateFilterForRequest( } private void processRecord(ConsumerRecord record) { - System.out.println("Kafka record: " + record.value()); + System.out.println("Kafka record: found "); HttpResponseParams responseParam = HttpCallParser.parseKafkaMessage(record.value()); Context.accountId.set(Integer.parseInt(responseParam.getAccountId())); Map filters = this.getFilters(); @@ -211,17 +211,17 @@ private void processRecord(ConsumerRecord record) { .setFilterId(apiFilter.getId()) .build(); - try { - maliciousMessages.add( - MessageEnvelope.generateEnvelope( - responseParam.getAccountId(), actor, maliciousReq)); - } catch (InvalidProtocolBufferException e) { - return; - } + // try { + // maliciousMessages.add( + // MessageEnvelope.generateEnvelope( + // responseParam.getAccountId(), actor, maliciousReq)); + // } catch (InvalidProtocolBufferException e) { + // return; + // } if (!isAggFilter) { - generateAndPushMaliciousEventRequest( - apiFilter, actor, responseParam, maliciousReq, EventType.EVENT_TYPE_SINGLE); + // generateAndPushMaliciousEventRequest( + // apiFilter, actor, responseParam, maliciousReq, EventType.EVENT_TYPE_SINGLE); return; } @@ -232,13 +232,13 @@ private void processRecord(ConsumerRecord record) { if (result.shouldNotify()) { System.out.print("Notifying for aggregation rule: " + rule); - generateAndPushMaliciousEventRequest( - apiFilter, - actor, - responseParam, - maliciousReq, - EventType.EVENT_TYPE_AGGREGATED); - } + // generateAndPushMaliciousEventRequest( + // apiFilter, + // actor, + // responseParam, + // maliciousReq, + // EventType.EVENT_TYPE_AGGREGATED); + } } }); } @@ -246,19 +246,19 @@ private void processRecord(ConsumerRecord record) { // Should we push all the messages in one go // or call kafka.send for each HttpRequestParams - try { - maliciousMessages.forEach( - sample -> { - sample - .marshal() - .ifPresent( - data -> { - internalKafka.send(data, KafkaTopic.ThreatDetection.MALICIOUS_EVENTS); - }); - }); - } catch (Exception e) { - e.printStackTrace(); - } + // try { + // maliciousMessages.forEach( + // sample -> { + // sample + // .marshal() + // .ifPresent( + // data -> { + // internalKafka.send(data, KafkaTopic.ThreatDetection.MALICIOUS_EVENTS); + // }); + // }); + // } catch (Exception e) { + // e.printStackTrace(); + // } } private void generateAndPushMaliciousEventRequest(