diff --git a/.batect/agent_deco.sh b/.batect/agent_deco.sh new file mode 100755 index 000000000..bffdba4d7 --- /dev/null +++ b/.batect/agent_deco.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env sh + +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : agent_deco +# @created : 星期二 1月 25, 2022 18:17:45 CST +# +# @description : +###################################################################### + + + +if [ "$PYTHONAGENT" = "TRUE" ]; then + curl -X GET "${DONGTAI_IAST_BASE_URL}/api/v1/agent/download?url=${DONGTAI_IAST_BASE_URL}&language=python&projectName=${PROJECT_NAME}" -H "Authorization: Token ${DONGTAI_AGNET_TOKEN}" -o dongtai-agent-python.tar.gz -k + pip install dongtai-agent-python.tar.gz +fi + +sh $@ diff --git a/.batect/manage_run_server.sh b/.batect/manage_run_server.sh new file mode 100755 index 000000000..24f180774 --- /dev/null +++ b/.batect/manage_run_server.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env sh + +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : manage_run_server +# @created : 星期二 1月 25, 2022 10:39:21 CST +# +# @description : +###################################################################### + +python manage.py runserver 0.0.0.0:8000 diff --git a/.batect/manage_test.sh b/.batect/manage_test.sh new file mode 100755 index 000000000..28b3ad382 --- /dev/null +++ b/.batect/manage_test.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env sh + +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : manage_run_server +# @created : 星期二 1月 25, 2022 10:39:21 CST +# +# @description : +###################################################################### + +python manage.py test diff --git a/.dockerignore b/.dockerignore index b7f538535..beb484eee 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,6 +1,45 @@ /venv /*.docx +*.DS_Store +/venv +*/__pycache__/* +*/migrations/* +*.pyc +.idea/ +scaapi/cron/logs/sca.log +*.docx +db/* +#Dockerfile +doc/* +doc/example.json +conf/config.ini +logs/*.log +build_docker.sh +dongtai_py_agent.log +/venv +/*.docx .git .idea /logs/*.log build_docker.sh +*.DS_Store +/venv +*/__pycache__/* +*/migrations/* +*.pyc +.idea/ +scaapi/cron/logs/sca.log +iast/upload/reports/* +db/* +conf/config.ini +#Dockerfile +doc +doc/example.json +logs/*.log +build_docker.sh +iast-package +config.ini +/venv +/*.docx +.git +.idea diff --git a/.github/CODE_OF_CONDUCT.md b/.github/CODE_OF_CONDUCT.md new file mode 100644 index 000000000..e77362c88 --- /dev/null +++ b/.github/CODE_OF_CONDUCT.md @@ -0,0 +1,3 @@ +# DongTai Community Code of Conduct + +This project follows the [CNCF Code of Conduct](https://github.com/cncf/foundation/blob/master/code-of-conduct.md). \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/bug_report.yaml b/.github/ISSUE_TEMPLATE/bug_report.yaml new file mode 100644 index 000000000..ba294be21 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.yaml @@ -0,0 +1,83 @@ +name: 🐛 Bug report +description: Report a bug to help us improve DongTai +title: "[Bug]: " +labels: [bug] +assignees: + - exexute +body: +- type: markdown + attributes: + value: | + Thank you for submitting a bug report! + + Please fill out the template below to make it easier to debug your problem. + + If you are not sure if it is a bug or not, you can contact us via the available [support channels](https://github.com/HXSecurity/DongTai/issues/new/choose). +- type: checkboxes + attributes: + label: Preflight Checklist + description: Please ensure you've completed all of the following. + options: + - label: I agree to follow the [Code of Conduct](https://github.com/HXSecurity/DongTai/blob/main/.github/CODE_OF_CONDUCT.md) that this project adheres to. + required: true + - label: I have searched the [issue tracker](https://www.github.com/HXSecurity/DongTai/issues) for an issue that matches the one I want to file, without success. + required: true + - label: I am not looking for support or already pursued the available [support channels](https://github.com/HXSecurity/DongTai/issues/new/choose) without success. + required: true +- type: input + attributes: + label: Version + description: What version of DongTai are you running? + placeholder: 1.0.0 + validations: + required: true +- type: dropdown + attributes: + label: Installation Type + description: How did you install DongTai? + options: + - Official SaaS Service + - Official Kubernetes + - Official Docker Compose + - Other (specify below) + validations: + required: true +- type: dropdown + attributes: + label: Service Name + description: Which service do you want to report the bug? + options: + - Doc(DongTai-Doc) + - Docker(DongTai-Base-Image) + - Deploy(DongTai-deploy) + - DongTai-Web + - DongTai-WebAPI + - DongTai-OpenAPI + - DongTai-Engine + - DongTai-Core + - DongTai-agent-python + - DongTai-agent-java + - DongTai-agent-php + multiple: true + validations: + required: true +- type: textarea + attributes: + label: Describe the details of the bug and the steps to reproduce it + description: Steps to reproduce the behavior if it is not self-explanatory. + placeholder: | + 1. In this environment... + 2. With this config... + 3. Do something... + 4. See error... + validations: + required: true +- type: textarea + attributes: + label: Additional Information + description: Links? References? Anything that will give us more context about the issue that you are encountering! +- type: textarea + attributes: + label: Logs + description: Dex application logs (if relevant). + render: shell diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 000000000..2e607c519 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,9 @@ +blank_issues_enabled: false +contact_links: + - name: ❓ Ask a question + url: https://github.com/HXSecurity/DongTai/discussions/new?category=q-a + about: Ask and discuss questions with other DongTai community members + + - name: 📚 Documentation + url: https://hxsecurity.github.io/DongTaiDoc + about: Check the documentation for help diff --git a/.github/ISSUE_TEMPLATE/feature_request.yaml b/.github/ISSUE_TEMPLATE/feature_request.yaml new file mode 100644 index 000000000..2e549d52a --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.yaml @@ -0,0 +1,44 @@ +name: 🚀 Feature request +description: Suggest an idea for DongTai +title: "[Feature]: " +labels: [feature, ] +assignees: + - exexute +body: +- type: markdown + attributes: + value: | + Thank you for submitting a feature request! + + Please describe what you would like to change/add and why in detail by filling out the template below. + + If you are not sure if your request fits into DongTai, you can contact us via the available [support channels](https://github.com/HXSecurity/DongTai/issues/new/choose). +- type: checkboxes + attributes: + label: Preflight Checklist + description: Please ensure you've completed all of the following. + options: + - label: I agree to follow the [Code of Conduct](https://github.com/HXSecurity/DongTai/blob/main/.github/CODE_OF_CONDUCT.md) that this project adheres to. + required: true + - label: I have searched the [issue tracker](https://www.github.com/HXSecurity/DongTai/issues) for an issue that matches the one I want to file, without success. + required: true +- type: textarea + attributes: + label: Problem Description + description: A clear and concise description of the problem you are seeking to solve with this feature request. + validations: + required: true +- type: textarea + attributes: + label: Proposed Solution + description: A clear and concise description of what would you like to happen. + validations: + required: true +- type: textarea + attributes: + label: Alternatives Considered + description: A clear and concise description of any alternative solutions or features you've considered. +- type: textarea + attributes: + label: Additional Information + description: Add any other context about the problem here. diff --git a/.github/deploy/deploy-dongtai-server-dev.yml b/.github/deploy/deploy-dongtai-server-dev.yml new file mode 100644 index 000000000..8b80634f6 --- /dev/null +++ b/.github/deploy/deploy-dongtai-server-dev.yml @@ -0,0 +1,159 @@ +--- +# dongtai-server服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: dongtai-server + namespace: iast-dev + annotations: + kubesphere.io/description: dongtai-server + labels: + app: dongtai-server +spec: + replicas: 1 + selector: + matchLabels: + app: dongtai-server + template: + metadata: + labels: + app: dongtai-server + spec: + containers: + - name: dongtai-server-container + image: registry.cn-beijing.aliyuncs.com/secnium/dongtai-server-dev:VERSION + imagePullPolicy: Always + volumeMounts: + - name: configfile + mountPath: /opt/dongtai/webapi/conf/config.ini + subPath: config.ini + resources: + limits: + cpu: "500m" + memory: 1000Mi + requests: + cpu: "500m" + memory: 1000Mi + volumes: + - name: configfile + configMap: + name: dongtai-iast-config.ini + imagePullSecrets: + - name: aliyun-registry-secret +--- +# dongtai-web服务 +apiVersion: v1 +kind: Service +metadata: + name: dongtai-server-svc + namespace: iast-dev +spec: + ports: + - port: 80 + protocol: TCP + targetPort: 8000 + selector: + app: dongtai-server + type: ClusterIP +--- +# dongtai-engine服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: dongtai-engine + namespace: iast-dev + annotations: + kubesphere.io/description: dongtai-engine + labels: + app: dongtai-engine +spec: + replicas: 1 + selector: + matchLabels: + app: dongtai-engine + template: + metadata: + labels: + app: dongtai-engine + spec: + containers: + - name: dongtai-engine-container + image: registry.cn-beijing.aliyuncs.com/secnium/dongtai-server-dev:VERSION + command: [ "/bin/sh","/opt/dongtai/webapi/docker/entrypoint.sh" ] + args: [ "worker" ] + imagePullPolicy: Always + volumeMounts: + - name: configfile + mountPath: /opt/dongtai/webapi/conf/config.ini + subPath: config.ini + resources: + limits: + cpu: "500m" + memory: 1000Mi + requests: + cpu: "500m" + memory: 1000Mi + volumes: + - name: configfile + configMap: + name: dongtai-iast-config.ini + imagePullSecrets: + - name: aliyun-registry-secret +--- +apiVersion: v1 +kind: Service +metadata: + name: dongtai-engine-svc + namespace: iast-dev +spec: + ports: + - port: 80 + protocol: TCP + targetPort: 8000 + selector: + app: dongtai-engine + type: ClusterIP +--- +#dongtai-engine-task服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: dongtai-engine-task + namespace: iast-dev + annotations: + kubesphere.io/description: dongtai-engine-task + labels: + app: dongtai-engine-task +spec: + replicas: 1 + selector: + matchLabels: + app: dongtai-engine-task + template: + metadata: + labels: + app: dongtai-engine-task + spec: + containers: + - name: dongtai-engine-task-container + image: registry.cn-beijing.aliyuncs.com/secnium/dongtai-server-dev:VERSION + command: [ "/bin/bash","/opt/dongtai/webapi/docker/entrypoint.sh" ] + args: [ "beat" ] + imagePullPolicy: Always + volumeMounts: + - name: configfile + mountPath: /opt/dongtai/webapi/conf/config.ini + subPath: config.ini + resources: + limits: + cpu: "500m" + memory: 1000Mi + requests: + cpu: "500m" + memory: 1000Mi + volumes: + - name: configfile + configMap: + name: dongtai-iast-config.ini + imagePullSecrets: + - name: aliyun-registry-secret diff --git a/.github/deploy/deploy-dongtai-server-prod.yml b/.github/deploy/deploy-dongtai-server-prod.yml new file mode 100644 index 000000000..2a96775fd --- /dev/null +++ b/.github/deploy/deploy-dongtai-server-prod.yml @@ -0,0 +1,166 @@ +--- +# dongtai-server服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: dongtai-server + namespace: iast-prod + annotations: + kubesphere.io/description: dongtai-server + labels: + app: dongtai-server +spec: + replicas: 1 + selector: + matchLabels: + app: dongtai-server + template: + metadata: + labels: + app: dongtai-server + spec: + containers: + - name: dongtai-server-container + image: dongtai/dongtai-server:VERSION + imagePullPolicy: Always + volumeMounts: + - name: configfile + mountPath: /opt/dongtai/dongtai_conf/conf/config.ini + subPath: config.ini + resources: + limits: + cpu: "500m" + memory: 1000Mi + requests: + cpu: "500m" + memory: 1000Mi + env: + - name: "environment" + value: "PROD" + - name: aliyun_logs_iast_server_project + value: k8s-log-c541e7d8ffff2434a8b7b4fd7d8645d2a + - name: aliyun_logs_iast_server + value: stdout + volumes: + - name: configfile + configMap: + name: dongtai-iast-config.ini + imagePullSecrets: + - name: aliyun-registry-secret +--- +# dongtai-web服务 +apiVersion: v1 +kind: Service +metadata: + name: dongtai-server-svc + namespace: iast-prod +spec: + ports: + - port: 80 + protocol: TCP + targetPort: 8000 + selector: + app: dongtai-server + type: ClusterIP +--- +# dongtai-engine服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: dongtai-engine + namespace: iast-prod + annotations: + kubesphere.io/description: dongtai-engine + labels: + app: dongtai-engine +spec: + replicas: 1 + selector: + matchLabels: + app: dongtai-engine + template: + metadata: + labels: + app: dongtai-engine + spec: + containers: + - name: dongtai-engine-container + image: dongtai/dongtai-server:VERSION + command: [ "/bin/sh","/opt/dongtai/deploy/docker/entrypoint.sh" ] + args: [ "worker" ] + imagePullPolicy: Always + volumeMounts: + - name: configfile + mountPath: /opt/dongtai/dongtai_conf/conf/config.ini + subPath: config.ini + resources: + limits: + cpu: "500m" + memory: 1000Mi + requests: + cpu: "500m" + memory: 1000Mi + volumes: + - name: configfile + configMap: + name: dongtai-iast-config.ini + imagePullSecrets: + - name: aliyun-registry-secret +--- +apiVersion: v1 +kind: Service +metadata: + name: dongtai-engine-svc + namespace: iast-prod +spec: + ports: + - port: 80 + protocol: TCP + targetPort: 8000 + selector: + app: dongtai-engine + type: ClusterIP +--- +#dongtai-engine-task服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: dongtai-engine-task + namespace: iast-prod + annotations: + kubesphere.io/description: dongtai-engine-task + labels: + app: dongtai-engine-task +spec: + replicas: 1 + selector: + matchLabels: + app: dongtai-engine-task + template: + metadata: + labels: + app: dongtai-engine-task + spec: + containers: + - name: dongtai-engine-task-container + image: dongtai/dongtai-server:VERSION + command: [ "/bin/bash","/opt/dongtai/deploy/docker/entrypoint.sh" ] + args: [ "beat" ] + imagePullPolicy: Always + volumeMounts: + - name: configfile + mountPath: /opt/dongtai/dongtai_conf/conf/config.ini + subPath: config.ini + resources: + limits: + cpu: "500m" + memory: 1000Mi + requests: + cpu: "500m" + memory: 1000Mi + volumes: + - name: configfile + configMap: + name: dongtai-iast-config.ini + imagePullSecrets: + - name: aliyun-registry-secret diff --git a/.github/deploy/deploy-dongtai-server-test.yml b/.github/deploy/deploy-dongtai-server-test.yml new file mode 100644 index 000000000..9fe6b1d84 --- /dev/null +++ b/.github/deploy/deploy-dongtai-server-test.yml @@ -0,0 +1,362 @@ +--- +# dongtai-server服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: dongtai-server + namespace: iast-test + annotations: + kubesphere.io/description: dongtai-server + labels: + app: dongtai-server + version: v1 +spec: + replicas: 1 + selector: + matchLabels: + app: dongtai-server + template: + metadata: + annotations: + sidecar.istio.io/inject: "true" + labels: + app: dongtai-server + version: v1 + spec: + containers: + - name: dongtai-server-container + image: registry.cn-beijing.aliyuncs.com/secnium/dongtai-server-test:VERSION + imagePullPolicy: Always + volumeMounts: + - name: configfile + mountPath: /opt/dongtai/dongtai_conf/conf/config.ini + subPath: config.ini + - name: log-path + mountPath: /tmp/logstash + env: + - name: DONGTAI_CONCURRENCY + value: --processes 2 --stats :3031 --stats-http + resources: + limits: + cpu: "500m" + memory: 1000Mi + requests: + cpu: "500m" + memory: 500Mi + volumes: + - name: configfile + configMap: + name: dongtai-iast-config.ini + - name: log-path + persistentVolumeClaim: + claimName: app-agent-pvc + imagePullSecrets: + - name: aliyun-registry-secret +--- +# dongtai-server服务 +apiVersion: v1 +kind: Service +metadata: + name: dongtai-server-svc + namespace: iast-test +spec: + ports: + - name: http + protocol: TCP + port: 80 + targetPort: 8000 + selector: + app: dongtai-server + type: ClusterIP +--- +#dongtai-worker-task服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: dongtai-worker-task + namespace: iast-test + annotations: + kubesphere.io/description: dongtai-worker-task + labels: + app: dongtai-worker-task + version: v1 +spec: + replicas: 1 + selector: + matchLabels: + app: dongtai-worker-task + template: + metadata: + labels: + app: dongtai-worker-task + version: v1 + spec: + containers: + - name: dongtai-worker-task-container + image: registry.cn-beijing.aliyuncs.com/secnium/dongtai-server-test:VERSION + command: [ "/bin/bash","/opt/dongtai/deploy/docker/entrypoint.sh" ] + args: [ "beat" ] + imagePullPolicy: Always + volumeMounts: + - name: configfile + mountPath: /opt/dongtai/dongtai_conf/conf/config.ini + subPath: config.ini + resources: + limits: + cpu: "500m" + memory: 1000Mi + requests: + cpu: "500m" + memory: 500Mi + volumes: + - name: configfile + configMap: + name: dongtai-iast-config.ini + imagePullSecrets: + - name: aliyun-registry-secret +--- + +# dongtai-worker-high-freq服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: dongtai-worker-high-freq + namespace: iast-test + annotations: + kubesphere.io/description: dongtai-worker-high-freq + labels: + app: dongtai-worker-high-freq + version: v1 +spec: + replicas: 1 + selector: + matchLabels: + app: dongtai-worker-high-freq + template: + metadata: + labels: + app: dongtai-worker-high-freq + version: v1 + spec: + containers: + - name: dongtai-worker-high-freq-container + image: registry.cn-beijing.aliyuncs.com/secnium/dongtai-server-test:VERSION + command: [ "/bin/sh","/opt/dongtai/deploy/docker/entrypoint.sh" ] + args: [ "worker-high-freq" ] + imagePullPolicy: Always + volumeMounts: + - name: configfile + mountPath: /opt/dongtai/dongtai_conf/conf/config.ini + subPath: config.ini + env: + - name: DONGTAI_CONCURRENCY + value: -P gevent --concurrency=121 + resources: + limits: + cpu: "500m" + memory: 1000Mi + requests: + cpu: "500m" + memory: 500Mi + volumes: + - name: configfile + configMap: + name: dongtai-iast-config.ini + imagePullSecrets: + - name: aliyun-registry-secret +--- +# dongtai-worker-beat服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: dongtai-worker-beat + namespace: iast-test + annotations: + kubesphere.io/description: dongtai-worker-beat + labels: + app: dongtai-worker-beat + version: v1 +spec: + replicas: 1 + selector: + matchLabels: + app: dongtai-worker-beat + template: + metadata: + labels: + app: dongtai-worker-beat + version: v1 + spec: + containers: + - name: dongtai-worker-beat-container + image: registry.cn-beijing.aliyuncs.com/secnium/dongtai-server-test:VERSION + command: [ "/bin/sh","/opt/dongtai/deploy/docker/entrypoint.sh" ] + args: [ "worker-beat" ] + imagePullPolicy: Always + volumeMounts: + - name: configfile + mountPath: /opt/dongtai/dongtai_conf/conf/config.ini + subPath: config.ini + env: + - name: DONGTAI_CONCURRENCY + value: --concurrency=2 + resources: + limits: + cpu: "500m" + memory: 1000Mi + requests: + cpu: "500m" + memory: 500Mi + volumes: + - name: configfile + configMap: + name: dongtai-iast-config.ini + imagePullSecrets: + - name: aliyun-registry-secret +--- +# dongtai-worker-other服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: dongtai-worker-other + namespace: iast-test + annotations: + kubesphere.io/description: dongtai-worker-other + labels: + app: dongtai-worker-other + version: v1 +spec: + replicas: 1 + selector: + matchLabels: + app: dongtai-worker-other + template: + metadata: + labels: + app: dongtai-worker-other + version: v1 + spec: + containers: + - name: dongtai-worker-other-container + image: registry.cn-beijing.aliyuncs.com/secnium/dongtai-server-test:VERSION + command: [ "/bin/sh","/opt/dongtai/deploy/docker/entrypoint.sh" ] + args: [ "worker-other" ] + imagePullPolicy: Always + volumeMounts: + - name: configfile + mountPath: /opt/dongtai/dongtai_conf/conf/config.ini + subPath: config.ini + env: + - name: DONGTAI_CONCURRENCY + value: --concurrency=2 + resources: + limits: + cpu: "500m" + memory: 1000Mi + requests: + cpu: "500m" + memory: 500Mi + volumes: + - name: configfile + configMap: + name: dongtai-iast-config.ini + imagePullSecrets: + - name: aliyun-registry-secret +--- +# dongtai-worker-sca服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: dongtai-worker-sca + namespace: iast-test + annotations: + kubesphere.io/description: dongtai-worker-sca + labels: + app: dongtai-worker-sca + version: v1 +spec: + replicas: 1 + selector: + matchLabels: + app: dongtai-worker-sca + template: + metadata: + labels: + app: dongtai-worker-sca + version: v1 + spec: + containers: + - name: dongtai-worker-sca-container + image: registry.cn-beijing.aliyuncs.com/secnium/dongtai-server-test:VERSION + command: [ "/bin/sh","/opt/dongtai/deploy/docker/entrypoint.sh" ] + args: [ "worker-sca" ] + imagePullPolicy: Always + volumeMounts: + - name: configfile + mountPath: /opt/dongtai/dongtai_conf/conf/config.ini + subPath: config.ini + env: + - name: DONGTAI_CONCURRENCY + value: --concurrency=2 + resources: + limits: + cpu: "500m" + memory: 1000Mi + requests: + cpu: "500m" + memory: 500Mi + volumes: + - name: configfile + configMap: + name: dongtai-iast-config.ini + imagePullSecrets: + - name: aliyun-registry-secret +--- +# dongtai-worker-es服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: dongtai-worker-es + namespace: iast-test + annotations: + kubesphere.io/description: dongtai-worker-es + labels: + app: dongtai-worker-es + version: v1 +spec: + replicas: 1 + selector: + matchLabels: + app: dongtai-worker-es + template: + metadata: + labels: + app: dongtai-worker-es + version: v1 + spec: + containers: + - name: dongtai-worker-es-container + image: registry.cn-beijing.aliyuncs.com/secnium/dongtai-server-test:VERSION + command: [ "/bin/sh","/opt/dongtai/deploy/docker/entrypoint.sh" ] + args: [ "worker-es" ] + imagePullPolicy: Always + volumeMounts: + - name: configfile + mountPath: /opt/dongtai/dongtai_conf/conf/config.ini + subPath: config.ini + env: + - name: DONGTAI_CONCURRENCY + value: --concurrency=2 + resources: + limits: + cpu: "500m" + memory: 1000Mi + requests: + cpu: "500m" + memory: 500Mi + volumes: + - name: configfile + configMap: + name: dongtai-iast-config.ini + imagePullSecrets: + - name: aliyun-registry-secret diff --git a/.github/workflows/ApiTest.yml b/.github/workflows/ApiTest.yml new file mode 100644 index 000000000..aa333dd41 --- /dev/null +++ b/.github/workflows/ApiTest.yml @@ -0,0 +1,62 @@ + +name: "ApiTest" + +on: + push: + branches: [main] + pull_request: + # The branches below must be a subset of the branches above + branches: [main] + schedule: + - cron: '0 0 * * *' + +jobs: + # This workflow contains a single job called "build" + Run-API-TEST: + runs-on: ubuntu-latest + services: + mysql: + image: dongtai/dongtai-mysql-unittest:latest + ports: + - 3306:3306 + options: >- + --health-cmd "mysqladmin ping --silent" + --health-interval 10s + --health-timeout 180s + --health-retries 10 + redis: + image: dongtai/dongtai-redis:latest + ports: + - 6379:6379 + options: >- + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + strategy: + max-parallel: 4 + matrix: + python-version: [3.7] + + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + python-version: 3.7 + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements-prod.txt + pip install schemathesis + + - name: Django Api Testing + run: | + mysql --host=127.0.0.1 -uroot -p'dongtai-iast' -D dongtai_webapi < /home/runner/work/DongTai/DongTai/test/init.sql + cp dongtai_conf/conf/config.ini.test dongtai_conf/conf/config.ini + export DOC=TRUE + mkdir -p /tmp/logstash/report/{img,word,pdf,excel} && mkdir -p /tmp/iast_cache/package + python3 ./deploy/docker/version_update.py || true + python3 manage.py runserver 0.0.0.0:8000 > webapi.log & + sleep 15 + schemathesis run --base-url "http://localhost:8000/" -H 'Authorization: Token 67aebd78e700ad36a82a152276196b5f49fafeb0' http://localhost:8000/api/XZPcGFKoxYXScwGjQtJx8u/schema/ --hypothesis-suppress-health-check=data_too_large,filter_too_much,too_slow,return_value,large_base_example,not_a_test_method,function_scoped_fixture --validate-schema=false --hypothesis-verbosity normal diff --git a/.github/workflows/ApiTestwithiast.yml b/.github/workflows/ApiTestwithiast.yml new file mode 100644 index 000000000..14d585f0e --- /dev/null +++ b/.github/workflows/ApiTestwithiast.yml @@ -0,0 +1,66 @@ + +name: "ApiTest-withIAST" + +on: + schedule: + - cron: '0 0 * * *' + +jobs: + # This workflow contains a single job called "build" + Run-API-TEST: + runs-on: ubuntu-latest + services: + mysql: + image: dongtai/dongtai-mysql-unittest:latest + ports: + - 3306:3306 + options: >- + --health-cmd "mysqladmin ping --silent" + --health-interval 10s + --health-timeout 180s + --health-retries 10 + redis: + image: dongtai/dongtai-redis:latest + ports: + - 6379:6379 + options: >- + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + strategy: + max-parallel: 4 + matrix: + python-version: [3.7] + + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + python-version: 3.7 + ref: main + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements-prod.txt + pip install schemathesis + + - name: Install dependencies + run: | + curl -X GET "https://iast.io/openapi/api/v1/agent/download?url=https://iast.io/openapi&language=python&projectName=WEBAPI${{ github.repository_owner }}.${{github.run_number}}" -H "Authorization: Token a303ab4bedc93f96808335d023d7ac4d2ba00773" -o dongtai-agent-python.tar.gz -k + pip install dongtai-agent-python.tar.gz + + + - name: Django Api Testing + run: | + mysql --host=127.0.0.1 -uroot -p'dongtai-iast' -D dongtai_webapi < /home/runner/work/DongTai/DongTai/test/init.sql + cp dongtai_conf/conf/config.ini.test dongtai_conf/conf/config.ini + export PROJECT_NAME=DONGTAI-SERVER.${{ github.repository_owner }} + export PROJECT_VERSION=${{github.run_number}} + export DOC=TRUE + export PYTHONAGENT=TRUE + mkdir -p /tmp/logstash/report/{img,word,pdf,excel} && mkdir -p /tmp/iast_cache/package + python3 ./deploy/docker/version_update.py || true + python3 manage.py runserver 0.0.0.0:8000 > webapi.log & + sleep 15 + schemathesis run --base-url "http://localhost:8000/" -H 'Authorization: Token 67aebd78e700ad36a82a152276196b5f49fafeb0' http://localhost:8000/api/XZPcGFKoxYXScwGjQtJx8u/schema/ --hypothesis-suppress-health-check=data_too_large,filter_too_much,too_slow,return_value,large_base_example,not_a_test_method,function_scoped_fixture --validate-schema=false --hypothesis-verbosity normal diff --git a/.github/workflows/UnitTest.yml b/.github/workflows/UnitTest.yml new file mode 100644 index 000000000..18b6eda1c --- /dev/null +++ b/.github/workflows/UnitTest.yml @@ -0,0 +1,54 @@ + +name: "UnitTest" + +on: + push: + branches: [main] + pull_request: + # The branches below must be a subset of the branches above + branches: [main] + schedule: + - cron: '0 0 * * *' + +jobs: + # This workflow contains a single job called "build" + Run-Unit-TEST: + runs-on: ubuntu-latest + services: + mysql: + image: dongtai/dongtai-mysql-unittest:latest + ports: + - 3306:3306 + options: >- + --health-cmd "mysqladmin ping --silent" + --health-interval 10s + --health-timeout 180s + --health-retries 10 + redis: + image: dongtai/dongtai-redis:latest + ports: + - 6379:6379 + options: >- + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + strategy: + max-parallel: 4 + matrix: + python-version: [3.7] + + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + python-version: 3.7 + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements-prod.txt + + - name: Django Unit Testing + run: | + python manage.py test diff --git a/.github/workflows/build-openapi.yml b/.github/workflows/build-openapi.yml deleted file mode 100644 index 18e622e69..000000000 --- a/.github/workflows/build-openapi.yml +++ /dev/null @@ -1,61 +0,0 @@ -# This is a basic workflow to help you get started with Actions - -name: Build DongTai-OpenAPI - -on: - push: - branches: [ develop ] - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - # This workflow contains a single job called "build" - build: - # The type of runner that the job will run on - runs-on: ubuntu-latest - strategy: - max-parallel: 4 - matrix: - python-version: [3.7] - - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - - name: start-build - uses: joelwmale/webhook-action@master - with: - url: ${{ secrets.WEBHOOK_URL }} - body: '{"msg_type": "interactive","card": {"config": {"wide_screen_mode": true,"enable_forward": true},"elements": [{"tag": "div","text": {"content": "状态:构建开始\n项目:${{github.repository}}\n分支:${{github.ref}}\n流程:${{github.workflow}}\n构建编号:${{github.run_number}}\n触发事件:${{github.event_name}}\n提交人:${{github.actor}}\nSHA-1:${{github.sha}}\n","tag": "lark_md"}}]}}' - - # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - name: Set up Python 3.7 - uses: actions/checkout@v1 - with: - python-version: 3.7 - - - name: Install dependencies - run: | - wget https://huoqi-public.oss-cn-beijing.aliyuncs.com/iast/dongtai-models-1.0.tar.gz -O /tmp/dongtai-models-1.0.tar.gz - python -m pip install --upgrade pip - pip install -r requirements.txt - pip install /tmp/dongtai-models-1.0.tar.gz - - - name: Lint with flake8 - run: | - pip install flake8 - # stop the build if there are Python syntax errors or undefined names - flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics - # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - - - name: Django Testing project - env: - DATABASE_URL: ${{ secrets.DATABASE_URL }} - run: | - cp conf/config.ini.example conf/config.ini - python3 manage.py test - - - name: finish build - uses: joelwmale/webhook-action@master - with: - url: ${{ secrets.WEBHOOK_URL }} - body: '{"msg_type": "interactive","card": {"config": {"wide_screen_mode": true,"enable_forward": true},"elements": [{"tag": "div","text": {"content": "状态:构建开始\n项目:${{github.repository}}\n分支:${{github.ref}}\n流程:${{github.workflow}}\n构建编号:${{github.run_number}}\n触发事件:${{github.event_name}}\n提交人:${{github.actor}}\nSHA-1:${{github.sha}}\n","tag": "lark_md"}}]}}' - \ No newline at end of file diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml new file mode 100644 index 000000000..879df1ce8 --- /dev/null +++ b/.github/workflows/codeql-analysis.yml @@ -0,0 +1,70 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: "CodeQL" + +on: + push: + branches: [ main ] + pull_request: + # The branches below must be a subset of the branches above + branches: [ main ] + schedule: + - cron: '31 17 * * 4' + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: [ 'python' ] + # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] + # Learn more about CodeQL language support at https://git.io/codeql-language-support + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v1 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + # queries: ./path/to/local/query, your-org/your-repo/queries@main + + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v1 + + # ℹ️ Command-line programs to run using the OS shell. + # 📚 https://git.io/JvXDl + + # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines + # and modify them (or add more) to build your code if your project + # uses a compiled language + + #- run: | + # make bootstrap + # make release + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v1 diff --git a/.github/workflows/community_notify.yml b/.github/workflows/community_notify.yml new file mode 100644 index 000000000..92d5b0041 --- /dev/null +++ b/.github/workflows/community_notify.yml @@ -0,0 +1,64 @@ +name: Community Notify + +on: + issues: + types: [opened, edited, deleted, closed] + issue_comment: + discussion: + types: [opened, edited, deleted] + discussion_comment: + +jobs: + issues: + name: Issue Created + if: ${{ github.event_name == 'issues' }} + runs-on: ubuntu-latest + steps: + - name: official notify + uses: joelwmale/webhook-action@master + with: + url: ${{ secrets.DONGTAI_WEBHOOK_URL }} + body: '{"msg_type":"interactive","card": {"header": {"title": {"tag": "plain_text", "content": "NOTIFY: issue ${{ github.event.issue.number }} ${{ github.event.action }}"}},"config": {"wide_screen_mode": true,"enable_forward": true},"elements": [{"tag": "div","text": {"content": "**repository**: ${{ github.event.repository.name }}\r\n**issue title**: ${{ github.event.issue.title }}\r\n**issue body**: ${{ github.event.issue.body }}\r\n**issue link:** [open issue link](${{ github.event.issue.html_url }})\r\n**issue state**: ${{ github.event.issue.state }}\r\n**assignee:** ${{ github.event.issue.assignee }} \r\n**assignees:** ${{ github.event.issue.assignees }} \r\n**issue labels**: ${{ github.event.issue.labels }}\r\n**issue author:** ${{github.actor}} \r\n**created at:** ${{ github.event.issue.created_at }}\r\n**updated at:** ${{ github.event.issue.updated_at }}\r\n","tag": "lark_md"}}]}}' + + issue_commented: + name: Issue comment + if: ${{ github.event_name == 'issue_comment' }} + runs-on: ubuntu-latest + steps: + - name: discussion comment notify + if: ${{ !github.event.issue.pull_request }} + uses: joelwmale/webhook-action@master + with: + url: ${{ secrets.DONGTAI_WEBHOOK_URL }} + body: '{"msg_type": "interactive","card": {"header": {"title": {"tag": "plain_text", "content": "NOTIFY: issue ${{ github.event.issue.number }} comment ${{ github.event.comment.number }} ${{ github.event.action }}"}}, "config": {"wide_screen_mode": true,"enable_forward": true},"elements": [{"tag": "div","text": {"content": "**repository name**: ${{ github.event.repository.name }}\n**issue title**: ${{ github.event.issue.title }}\n**comment body**: ${{ github.event.comment.body }}\n**issue comment link:** [open issue comment](${{ github.event.comment.html_url }})\n**issue state**: ${{ github.event.issue.state }}\n**assignee:** ${{ github.event.issue.assignee }} \n**assignees:** ${{ github.event.issue.assignees }} \n**issue labels**: ${{ github.event.issue.labels }}\n**issue comment author:** ${{github.actor}}\n**created at:** ${{ github.event.issue.created_at }}\n**updated at:** ${{ github.event.issue.updated_at }}","tag": "lark_md"}}]}}' + + - name: start-build + if: ${{ github.event.issue.pull_request }} + uses: joelwmale/webhook-action@master + with: + url: ${{ secrets.DONGTAI_WEBHOOK_URL }} + body: '{"msg_type": "interactive","card": {"config": {"wide_screen_mode": true,"enable_forward": true},"elements": [{"tag": "div","text": {"content": "PR Comment\r\n触发事件:${{github.event_name}}\r\n提交人:${{github.actor}}","tag": "lark_md"}}]}}' + + discussion: + name: Discussion Created + if: ${{ github.event_name == 'discussion' }} + runs-on: ubuntu-latest + steps: + - name: notify + uses: joelwmale/webhook-action@master + with: + url: ${{ secrets.DONGTAI_WEBHOOK_URL }} + body: '{"msg_type":"interactive","card": {"header": {"title": {"tag": "plain_text", "content": "NOTIFY: Discussion ${{ github.event.discussion.number }} ${{ github.event.action }}"}},"config": {"wide_screen_mode": true,"enable_forward": true},"elements": [{"tag": "div","text": {"content": "**repository**: ${{ github.event.repository.full_name }}\r\n**title**: ${{ github.event.discussion.title }}\r\n**body**: ${{ github.event.discussion.body }}\r\n**category:** ${{ github.event.discussion.category.name }}\r\n**state**: ${{ github.event.discussion.state }}\r\n**author:** ${{github.actor}}\r\n**created at:** ${{ github.event.discussion.created_at }}\r\n**updated at:** ${{ github.event.discussion.updated_at }}\r\n","tag": "lark_md"}}]}}' + + discussion_commented: + name: discussion commented notify + if: ${{ github.event_name == 'discussion_comment' }} + runs-on: ubuntu-latest + steps: + - name: notify + env: + LABELS: ${{ toJson(github.event.issue.labels) }} + uses: joelwmale/webhook-action@master + with: + url: ${{ secrets.DONGTAI_WEBHOOK_URL }} + body: '{"msg_type":"interactive","card": {"header": {"title": {"tag": "plain_text", "content": "NOTIFY: Discussion ${{ github.event.discussion.number }} comment ${{ github.event.action }}"}},"config": {"wide_screen_mode": true,"enable_forward": true},"elements": [{"tag": "div","text": {"content": "**repository**: ${{ github.event.repository.full_name }}\r\n**category:** ${{ github.event.discussion.category.name }}\r\n**state**: ${{ github.event.discussion.state }}\r\n**title**: ${{ github.event.discussion.title }}\r\n**body**: ${{ github.event.discussion.body }}\r\n**comment**: ${{ github.event.comment.body }}\r\n**author:** ${{github.actor}}\r\n**created at:** ${{ github.event.discussion.created_at }}\r\n**updated at:** ${{ github.event.discussion.updated_at }}\r\n","tag": "lark_md"}}]}}' diff --git a/.github/workflows/config/.dlc.json b/.github/workflows/config/.dlc.json new file mode 100644 index 000000000..e63ce06bf --- /dev/null +++ b/.github/workflows/config/.dlc.json @@ -0,0 +1,16 @@ +{ + "ignorePatterns": [ + { + "pattern": "^http://localhost" + } + ], + "timeout": "10s", + "retryOn429": true, + "retryCount": 10, + "fallbackRetryDelay": "1000s", + "aliveStatusCodes": [ + 200, + 401, + 404 + ] + } \ No newline at end of file diff --git a/.github/workflows/config_update.sh b/.github/workflows/config_update.sh new file mode 100644 index 000000000..9d261bfe0 --- /dev/null +++ b/.github/workflows/config_update.sh @@ -0,0 +1,9 @@ +git config --global user.name "$GITHUB_ACTOR-bot" +git config --global user.email "$GITHUB_ACTOR-bot@dongtai.io" + +cp -r dongtai_conf/conf/config.ini.example deploy/docker-compose/config-tutorial.ini + +git add . +git commit -m "Update: change config file" + +git push "https://$GITHUB_ACTOR:$GITHUB_TOKEN@github.com/$GITHUB_REPOSITORY.git" diff --git a/.github/workflows/dead-link-checker.yaml b/.github/workflows/dead-link-checker.yaml new file mode 100644 index 000000000..dda8f66a5 --- /dev/null +++ b/.github/workflows/dead-link-checker.yaml @@ -0,0 +1,36 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: Dead Link Checker + +on: + pull_request: + +concurrency: + group: dlc-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + CheckDeadLinks: + runs-on: ubuntu-latest + timeout-minutes: 30 + steps: + - uses: actions/checkout@v2 + - run: sudo npm install -g markdown-link-check + - run: | + for file in $(find . -name "*.md"); do + markdown-link-check -c .github/workflows/config/.dlc.json -q "$file" + done \ No newline at end of file diff --git a/.github/workflows/deploy-dev.yaml b/.github/workflows/deploy-dev.yaml new file mode 100644 index 000000000..9d404aab8 --- /dev/null +++ b/.github/workflows/deploy-dev.yaml @@ -0,0 +1,82 @@ +# This is a basic workflow to help you get started with Actions + +name: Deploy DongTai Server To Dev + +on: + push: + branches: [ "main" ] + + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + Deploy-to-dev: + if: ${{ github.event_name == 'push' && github.repository_owner == 'HXSecurity' }} + runs-on: ubuntu-latest + strategy: + max-parallel: 4 + matrix: + python-version: [3.7] + + # Steps represent a sequence of tasks that will be executed as part of the job + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: Setup QEMU + uses: docker/setup-qemu-action@v1 + + - name: Setup Docker Buildx + uses: docker/setup-buildx-action@v1 + + - name: Login to Aliyun Registry + uses: docker/login-action@v1 + with: + registry: ${{ secrets.ALIYUN_REGISTRY }} + username: ${{ secrets.ALIYUN_DOCKERHUB_USER }} + password: ${{ secrets.ALIYUN_DOCKERHUB_PASSWORD }} + + - name: Generate version file + run: | + #bash .github/workflows/config_update.sh + #bash .github/workflows/config_update.sh "${{ steps.release.outputs.VERSION }}" + + - name: Setup Ossutil + uses: manyuanrong/setup-ossutil@v2.0 + with: + endpoint: ${{ secrets.ALIYUN_OSS_ENDPOINT }} + access-key-id: ${{ secrets.ALIYUN_OSS_KEY_ID }} + access-key-secret: ${{ secrets.ALIYUN_OSS_KEY_SECRET }} + + - name: Download Agent + run: | + ossutil cp oss://dongtai/agent_test/java/latest/ ./ --include "*.jar" -r + ossutil cp oss://dongtai/agent_test/python/ ./ --include "*.tar.gz" -r + + - name: Generate version file + run: | + echo "REPLACE INTO project_version_control (version, component_name, component_version_hash) VALUES('${{ github.run_number }}.0.0', '${{ github.event.repository.name }}', '${GITHUB_SHA}');" >> ./deploy/docker/version.sql + + - name: Build + run: | + docker build -t registry.cn-beijing.aliyuncs.com/secnium/dongtai-server-test:latest . + docker tag registry.cn-beijing.aliyuncs.com/secnium/dongtai-server-test:latest registry.cn-beijing.aliyuncs.com/secnium/dongtai-server-test:1.0.${{github.run_number}} + + - name: push + uses: nick-invision/retry@v2 + with: + timeout_seconds: 300 + max_attempts: 10 + #retry_on: timeout + command: | + docker push registry.cn-beijing.aliyuncs.com/secnium/dongtai-server-test:latest + docker push registry.cn-beijing.aliyuncs.com/secnium/dongtai-server-test:1.0.${{github.run_number}} + + - uses: actions/checkout@master + - name: deploy to cluster + uses: wahyd4/kubectl-helm-action@master + env: + KUBE_CONFIG_DATA: ${{ secrets.KUBE_CONFIG_TEST_DATA }} + with: + args: | + find .github/deploy/deploy-dongtai-server-test.yml -type f -exec sed -i 's/VERSION/1.0.${{github.run_number}}/g' {} \; + ls .github/deploy/deploy-dongtai-server-test.yml | xargs -I {} kubectl apply -f {} diff --git a/.github/workflows/deploy_openapi_to_aws.yml b/.github/workflows/deploy_openapi_to_aws.yml deleted file mode 100644 index 5bbd55edc..000000000 --- a/.github/workflows/deploy_openapi_to_aws.yml +++ /dev/null @@ -1,86 +0,0 @@ -# This is a basic workflow to help you get started with Actions - -name: Deploy DongTai OpenAPI To AWS - -on: - pull_request: - branches: [ main ] - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - # This workflow contains a single job called "build" - build: - # The type of runner that the job will run on - runs-on: ubuntu-latest - strategy: - max-parallel: 4 - matrix: - python-version: [3.7] - - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - - name: start-build - uses: joelwmale/webhook-action@master - with: - url: ${{ secrets.WEBHOOK_URL }} - body: '{"msg_type": "interactive","card": {"config": {"wide_screen_mode": true,"enable_forward": true},"elements": [{"tag": "div","text": {"content": "状态:构建开始\n项目:${{github.repository}}\n分支:${{github.ref}}\n流程:${{github.workflow}}\n构建编号:${{github.run_number}}\n触发事件:${{github.event_name}}\n提交人:${{github.actor}}\nSHA-1:${{github.sha}}\n","tag": "lark_md"}}]}}' - - # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - name: Set up Python 3.7 - uses: actions/checkout@v1 - with: - python-version: 3.7 - - - name: Install dependencies - run: | - wget https://huoqi-public.oss-cn-beijing.aliyuncs.com/iast/dongtai-models-1.0.tar.gz -O /tmp/dongtai-models-1.0.tar.gz - python -m pip install --upgrade pip - pip install -r requirements.txt - pip install /tmp/dongtai-models-1.0.tar.gz - - - name: Lint with flake8 - run: | - pip install flake8 - # stop the build if there are Python syntax errors or undefined names - flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics - # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - - - name: Checkout - uses: actions/checkout@v2 - - - name: Set up QEMU - uses: docker/setup-qemu-action@v1 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1 - - - name: Login to DockerHub - uses: docker/login-action@v1 - with: - registry: ${{ secrets.ALIYUN_REGISTRY }} - username: ${{ secrets.ALIYUN_DOCKERHUB_USER }} - password: ${{ secrets.ALIYUN_DOCKERHUB_PASSWORD }} - - - name: Build and push - uses: docker/build-push-action@v2 - with: - context: . - push: true - tags: "registry.cn-beijing.aliyuncs.com/secnium/iast-saas-openapi:latest,registry.cn-beijing.aliyuncs.com/secnium/iast-saas-openapi:1.0.${{github.run_number}},registry.cn-beijing.aliyuncs.com/huoxian_pub/dongtai-openapi:latest," - - - uses: actions/checkout@master - - name: deploy to cluster - uses: wahyd4/kubectl-helm-action@master - env: - KUBE_CONFIG_DATA: ${{ secrets.KUBE_CONFIG_DATA }} - with: - args: | - find deploy/deploy-eks-iast-saas-openapi-prod.yml -type f -exec sed -i 's/VERSION/1.0.${{github.run_number}}/g' {} \; - ls deploy/deploy-eks-iast-saas-openapi-prod.yml | xargs -I {} kubectl apply -f {} - - - name: finish build - uses: joelwmale/webhook-action@master - with: - url: ${{ secrets.WEBHOOK_URL }} - body: '{"msg_type": "interactive","card": {"config": {"wide_screen_mode": true,"enable_forward": true},"elements": [{"tag": "div","text": {"content": "状态:构建完成\n项目:${{github.repository}}\n分支:${{github.ref}}\n流程:${{github.workflow}}\n构建编号:${{github.run_number}}\n触发事件:${{github.event_name}}\n提交人:${{github.actor}}\nSHA-1:${{github.sha}}\n","tag": "lark_md"}}]}}' diff --git a/.github/workflows/deploy_openapi_to_aws_test.yml b/.github/workflows/deploy_openapi_to_aws_test.yml deleted file mode 100644 index 2ec31f669..000000000 --- a/.github/workflows/deploy_openapi_to_aws_test.yml +++ /dev/null @@ -1,84 +0,0 @@ -# This is a basic workflow to help you get started with Actions - -name: Deploy DongTai OpenAPI To AWS Test - -on: - pull_request: - branches: [ develop ] - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - # This workflow contains a single job called "build" - build: - # The type of runner that the job will run on - runs-on: ubuntu-latest - strategy: - max-parallel: 4 - matrix: - python-version: [3.7] - - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - - name: start-build - uses: joelwmale/webhook-action@master - with: - url: ${{ secrets.WEBHOOK_URL }} - body: '{"msg_type": "interactive","card": {"config": {"wide_screen_mode": true,"enable_forward": true},"elements": [{"tag": "div","text": {"content": "状态:构建开始\n项目:${{github.repository}}\n分支:${{github.ref}}\n流程:${{github.workflow}}\n构建编号:${{github.run_number}}\n触发事件:${{github.event_name}}\n提交人:${{github.actor}}\nSHA-1:${{github.sha}}\n","tag": "lark_md"}}]}}' - - # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - name: Set up Python 3.7 - uses: actions/checkout@v1 - with: - python-version: 3.7 - - - name: Install dependencies - run: | - wget https://huoqi-public.oss-cn-beijing.aliyuncs.com/iast/dongtai-models-1.0.tar.gz -O /tmp/dongtai-models-1.0.tar.gz - python -m pip install --upgrade pip - pip install -r requirements.txt - pip install /tmp/dongtai-models-1.0.tar.gz - - name: Lint with flake8 - run: | - pip install flake8 - # stop the build if there are Python syntax errors or undefined names - flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics - # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - - name: Checkout - uses: actions/checkout@v2 - - - name: Set up QEMU - uses: docker/setup-qemu-action@v1 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1 - - - name: Login to DockerHub - uses: docker/login-action@v1 - with: - registry: ${{ secrets.ALIYUN_REGISTRY }} - username: ${{ secrets.ALIYUN_DOCKERHUB_USER }} - password: ${{ secrets.ALIYUN_DOCKERHUB_PASSWORD }} - - - name: Build and push - uses: docker/build-push-action@v2 - with: - context: . - file: DockerfileTest - push: true - tags: "registry.cn-beijing.aliyuncs.com/secnium/iast-saas-openapi-test:latest,registry.cn-beijing.aliyuncs.com/secnium/iast-saas-openapi-test:1.0.${{github.run_number}}," - - - uses: actions/checkout@master - - name: deploy to cluster - uses: wahyd4/kubectl-helm-action@master - env: - KUBE_CONFIG_DATA: ${{ secrets.KUBE_CONFIG_TEST_DATA }} - with: - args: | - find deploy/deploy-eks-iast-saas-openapi-test.yml -type f -exec sed -i 's/VERSION/1.0.${{github.run_number}}/g' {} \; - ls deploy/deploy-eks-iast-saas-openapi-test.yml | xargs -I {} kubectl apply -f {} - - name: finish build - uses: joelwmale/webhook-action@master - with: - url: ${{ secrets.WEBHOOK_URL }} - body: '{"msg_type": "interactive","card": {"config": {"wide_screen_mode": true,"enable_forward": true},"elements": [{"tag": "div","text": {"content": "状态:构建完成\n项目:${{github.repository}}\n分支:${{github.ref}}\n流程:${{github.workflow}}\n构建编号:${{github.run_number}}\n触发事件:${{github.event_name}}\n提交人:${{github.actor}}\nSHA-1:${{github.sha}}\n","tag": "lark_md"}}]}}' diff --git a/.github/workflows/dongtai-devops-pr.yaml b/.github/workflows/dongtai-devops-pr.yaml new file mode 100644 index 000000000..2ae9b312c --- /dev/null +++ b/.github/workflows/dongtai-devops-pr.yaml @@ -0,0 +1,16 @@ +on: + push: + pull_request: + #types: [opened] + +name: dongtai-devops-pr + +jobs: + generic_handler: + if: ${{ github.repository_owner == 'HXSecurityBusiness' }} + name: pr + runs-on: ubuntu-latest + steps: + - uses: tscuite/action-generic-handler@main + env: + GITHUB_TOKEN: ${{ secrets.DONGTAI_GITHUB_TOKEN_ALL_CLIENT }} \ No newline at end of file diff --git a/.github/workflows/dontai-devops-cp.yaml b/.github/workflows/dontai-devops-cp.yaml new file mode 100644 index 000000000..bdb94b084 --- /dev/null +++ b/.github/workflows/dontai-devops-cp.yaml @@ -0,0 +1,17 @@ +name: dontai-devops-cp +on: + push: + branches: + - 'main' +jobs: + run_if: + if: ${{ github.event_name == 'push' && github.repository_owner == 'HXSecurity' }} + runs-on: ubuntu-latest + steps: + - name: Check out branch + uses: actions/checkout@v1 + - name: Generate version file + run: | + git checkout -b pr@main@${{github.run_number}} + git remote set-url origin --push --add '${{ secrets.DONGTAI_GITHUB_TOKEN_ALL }}' + git push --set-upstream origin pr@main@${{github.run_number}} \ No newline at end of file diff --git a/.github/workflows/dtctl-unittest.yaml b/.github/workflows/dtctl-unittest.yaml new file mode 100644 index 000000000..1ff73bf6e --- /dev/null +++ b/.github/workflows/dtctl-unittest.yaml @@ -0,0 +1,34 @@ +name: dtctl-unitest +on: + push: + branches: [ main ] +jobs: + build: + name: Build + runs-on: ${{ matrix.os }} + if: ${{ github.event_name == 'push' && github.repository_owner == 'HXSecurity' }} + strategy: + matrix: + os: [ ubuntu-20.04 ,ubuntu-18.04, ] + steps: + - name: Setup docker environments + run: | + sudo curl -fsSL get.docker.com | sh + + - name: Setup BATS + uses: mig4/setup-bats@v1 + with: + bats-version: 1.5.0 + + - name: Checkout code + uses: actions/checkout@v1 + + - name: Download mysql data + run: | + wget https://huoqi-public.oss-cn-beijing.aliyuncs.com/iast/sql/data.zip -P ./deploy/docker-compose + unzip ./deploy/docker-compose/data.zip -d ./deploy/docker-compose + + - name: Do unittest + working-directory: ./deploy/docker-compose + run: | + bats test/test_dtctl.bats diff --git a/.github/workflows/release_dongtai.yml b/.github/workflows/release_dongtai.yml new file mode 100644 index 000000000..300c12d5e --- /dev/null +++ b/.github/workflows/release_dongtai.yml @@ -0,0 +1,109 @@ +name: Release DongTai-Server + +on: + release: + types: [ created, edited ] + +jobs: + build: + runs-on: ubuntu-latest + permissions: + contents: write + + strategy: + max-parallel: 4 + matrix: + python-version: [3.7] + + steps: + - name: start-build + uses: joelwmale/webhook-action@master + with: + url: ${{ secrets.DONGTAI_WEBHOOK_URL }} + body: '{"msg_type": "interactive","card": {"config": {"wide_screen_mode": true,"enable_forward": true},"elements": [{"tag": "div","text": {"content": "状态:项目${{github.repository}}构建开始\n分支:${{github.ref}}\n流程:${{github.workflow}}\n构建编号:${{github.run_number}}\n触发事件:${{github.event_name}}\n提交人:${{github.actor}}\nSHA-1:${{github.sha}}\n","tag": "lark_md"}}]}}' + + - name: Checkout + uses: actions/checkout@v2 + + - id: release + run: | + TAG_NAME=${{ github.event.release.tag_name }} + ID=`echo ${TAG_NAME##v}` + echo "::set-output name=VERSION::$ID" + + - name: Generate version file + run: | + bash .github/workflows/version_update.sh "${{ steps.release.outputs.VERSION }}" + + - name: Login to DockerHub + uses: docker/login-action@v1 + with: + username: ${{ secrets.DONGTAI_DOCKERHUB_USERNAME }} + password: ${{ secrets.DONGTAI_DOCKERHUB_TOKEN }} + + - name: Login to Aliyun Registry + uses: docker/login-action@v1 + with: + registry: ${{ secrets.ALIYUN_REGISTRY }} + username: ${{ secrets.ALIYUN_DOCKERHUB_USER }} + password: ${{ secrets.ALIYUN_DOCKERHUB_PASSWORD }} + + - name: Setup Ossutil + uses: manyuanrong/setup-ossutil@v2.0 + with: + # 填写Bucket所在地域的域名信息,可参考访问域名和数据中心。 + endpoint: ${{ secrets.ALIYUN_OSS_ENDPOINT }} + # AccessKeyID + access-key-id: ${{ secrets.ALIYUN_OSS_KEY_ID }} + # AccessKeySceret + access-key-secret: ${{ secrets.ALIYUN_OSS_KEY_SECRET }} + - run: | + echo "${{ github.event.repository.name }},version,${{ steps.release.outputs.version }}" >> version.txt && \ + echo "${{ github.event.repository.name }},commit_hash,${GITHUB_SHA}" >> version.txt && \ + cat version.txt + ossutil cp -rf version.txt oss://huoqi-public/iast/release-version/${{ github.event.repository.name }}/${{ steps.release.outputs.version }}/version.txt + ossutil cp oss://dongtai/agent/java/${{ steps.release.outputs.VERSION }}/ ./ --include "*.jar" -r + [ ! -f ./dongtai-agent.jar ] && echo "$FILE does not exist." + [ ! -f ./dongtai-agent.jar ] && ossutil cp oss://dongtai/agent/java/latest/ ./ --include "*.jar" -r + [ ! -f ./dongtai-agent.jar ] && echo "$FILE does not exist." + ossutil cp oss://dongtai/agent/python/ ./ --include "*.tar.gz" -r + ossutil cp oss://dongtai/agent/php/ ./ --include "*.tar.gz" -r + #ossutil cp oss://huoqi-public/iast/release-version/DongTai-agent-python/${{ steps.release.outputs.VERSION }}/version.txt DongTai-agent-python.version.txt || true + #ossutil cp oss://huoqi-public/iast/release-version/DongTai-agent-java/${{ steps.release.outputs.VERSION }}/version.txt DongTai-agent-java.version.txt || true + #ossutil cp oss://huoqi-public/iast/release-version/DongTai-agent-php/${{ steps.release.outputs.VERSION }}/version.txt DongTai-agent-php.version.txt || true + #DONGTAI_JAVA_AGENT_REPONAME=`cat DongTai-agent-java.version.txt | awk '{split($0,a,",");print a[1]}' | sed -n "1p"` + #DONGTAI_JAVA_AGENT_VERSION=`cat DongTai-agent-java.version.txt | awk '{split($0,a,",");print a[3]}' | sed -n "1p"` + #DONGTAI_JAVA_AGENT_HASH=`cat DongTai-agent-java.version.txt | awk '{split($0,a,",");print a[3]}' | sed -n "2p"` + #DONGTAI_PYTHON_AGENT_REPONAME=`cat DongTai-agent-python.version.txt | awk '{split($0,a,",");print a[1]}' | sed -n "1p"` + #DONGTAI_PYTHON_AGENT_VERSION=`cat DongTai-agent-python.version.txt | awk '{split($0,a,",");print a[3]}' | sed -n "1p"` + #DONGTAI_PYTHON_AGENT_HASH=`cat DongTai-agent-python.version.txt | awk '{split($0,a,",");print a[3]}' | sed -n "2p"` + echo "REPLACE INTO project_version_control (version, component_name, component_version_hash) VALUES('${{ steps.release.outputs.version }}', '${{ github.event.repository.name }}', '${GITHUB_SHA}');" >> ./deploy/docker/version.sql + #echo "REPLACE INTO project_version_control (version, component_name, component_version_hash) VALUES('$DONGTAI_JAVA_AGENT_VERSION', '$DONGTAI_JAVA_AGENT_REPONAME', '$DONGTAI_JAVA_AGENT_HASH');" >> ./deploy/docker/version.sql + #echo "REPLACE INTO project_version_control (version, component_name, component_version_hash) VALUES('$DONGTAI_PYTHON_AGENT_VERSION', '$DONGTAI_PYTHON_AGENT_REPONAME', '$DONGTAI_PYTHON_AGENT_HASH');" >> ./deploy/docker/version.sql + + - name: Build and push + uses: docker/build-push-action@v2 + with: + file: Dockerfile + context: . + push: true + tags: | + registry.cn-beijing.aliyuncs.com/huoxian_pub/dongtai-server:latest + registry.cn-beijing.aliyuncs.com/huoxian_pub/dongtai-server:${{ steps.release.outputs.version }} + dongtai/dongtai-server:latest + dongtai/dongtai-server:${{ steps.release.outputs.version }} + #- uses: actions/checkout@master + #- name: deploy to cluster + # uses: wahyd4/kubectl-helm-action@master + # env: + # KUBE_CONFIG_DATA: ${{ secrets.KUBE_CONFIG_DATA }} + # with: + # args: | + # find .github/deploy/deploy-dongtai-server-prod.yml -type f -exec sed -i 's/VERSION/${{ steps.release.outputs.version }}/g' {} \; + # ls .github/deploy/deploy-dongtai-server-prod.yml | xargs -I {} kubectl apply -f {} + + - name: finish build + uses: joelwmale/webhook-action@master + with: + url: ${{ secrets.DONGTAI_WEBHOOK_URL }} + body: '{"msg_type": "interactive","card": {"config": {"wide_screen_mode": true,"enable_forward": true},"elements": [{"tag": "div","text": {"content": "状态:项目${{github.repository}}构建成功\n分支:${{github.ref}}\n流程:${{github.workflow}}\n构建编号:${{github.run_number}}\n触发事件:${{github.event_name}}\n提交人:${{github.actor}}\nSHA-1:${{github.sha}}\n","tag": "lark_md"}}]}}' diff --git a/.github/workflows/release_helm_chart.yml b/.github/workflows/release_helm_chart.yml new file mode 100644 index 000000000..d38927cc0 --- /dev/null +++ b/.github/workflows/release_helm_chart.yml @@ -0,0 +1,46 @@ +name: Release helm chart +on: + push: + tags: + - "v*" +jobs: + build: + name: Build + runs-on: ubuntu-latest + + steps: + - name: Checkout repo + uses: actions/checkout@v2 + + - uses: azure/setup-helm@v1 + with: + version: 'latest' # default is latest stable + id: install + + - name: Get the release version + id: get_version + run: echo ::set-output name=VERSION::${GITHUB_REF#refs/tags/} + + - name: Setup ossutil + uses: manyuanrong/setup-ossutil@v2.0 + with: + endpoint: ${{ secrets.CHART_OSS_ENDPOINT }} + access-key-id: ${{ secrets.CHART_OSS_ACCESS_KEY_ID }} + access-key-secret: ${{ secrets.CHART_OSS_ACCESS_KEY_SECRET }} + + - name: Download existed repo files + run: | + ossutil cp -rf oss://dongtai-helm-charts/iast/ ~/helm/repo/ --include dongtai-iast-*.tgz --exclude "index.yaml" + + - name: Create helm package + run: | + helm package deploy/kubernetes/helm -d ~/helm/repo --app-version ${{ steps.get_version.outputs.VERSION }} --version ${{ steps.get_version.outputs.VERSION }} + helm repo index ~/helm/repo/ --url ${{ secrets.DONGTAI_IAST_CHART_REPO_URL }} + + + + - name: Push helm chart to repo + run: | + ossutil cp -rf ~/helm/repo/dongtai-iast-${{ steps.get_version.outputs.VERSION }}.tgz oss://dongtai-helm-charts/iast/ + ossutil cp -rf ~/helm/repo/index.yaml oss://dongtai-helm-charts/iast/ + diff --git a/.github/workflows/repository_notify.yml b/.github/workflows/repository_notify.yml new file mode 100644 index 000000000..f5b6a3dda --- /dev/null +++ b/.github/workflows/repository_notify.yml @@ -0,0 +1,43 @@ +name: Repository Popularity Statistice Notify + +on: + schedule: + - cron: '0 2 * * *' + +jobs: + stale: + runs-on: ubuntu-latest + steps: + - id: repo-stars + run: | + STARS=`curl -s 'https://api.github.com/repos/${{github.repository}}?page=$i&per_page=100' | jq .stargazers_count` + echo "::set-output name=stars::$STARS" + shell: bash + + - id: repo-forks + run: | + STARS=`curl -s 'https://api.github.com/repos/${{github.repository}}?page=$i&per_page=100' | jq .forks_count` + echo "::set-output name=forks::$STARS" + shell: bash + + - id: repo-issues + run: | + STARS=`curl -s 'https://api.github.com/repos/${{github.repository}}?page=$i&per_page=100' | jq .open_issues_count` + echo "::set-output name=issues::$STARS" + shell: bash + + - id: echo + run: | + echo "${{ steps.repo-stars.outputs.stars }}" + echo "${{ steps.repo-forks.outputs.forks }}" + echo "${{ steps.repo-issues.outputs.issues }}" + shell: bash + + - name: notify + env: + LABELS: ${{ toJson(github.event.issue.labels) }} + uses: joelwmale/webhook-action@master + with: + url: ${{ secrets.DONGTAI_WEBHOOK_URL }} + body: '{"msg_type": "interactive","card": {"header": {"title": {"tag": "plain_text", "content": "Repository Popularity Statistice Notify"}}, "config": {"wide_screen_mode": true,"enable_forward": true},"elements": [{"tag": "div","text": {"content": "**repository: **${{ github.repository }}\n**star: ** ${{ steps.repo-stars.outputs.stars }}\n**fork: **${{ steps.repo-forks.outputs.forks }}\n**issue count: **${{ steps.repo-issues.outputs.issues }}\n","tag": "lark_md"}}]}}' + diff --git a/.github/workflows/version_update.sh b/.github/workflows/version_update.sh new file mode 100644 index 000000000..656041b04 --- /dev/null +++ b/.github/workflows/version_update.sh @@ -0,0 +1,15 @@ +NEW_VERSION=$1 + +echo "curent path: $(pwd), change version to $NEW_VERSION" + +git config --global user.name "$GITHUB_ACTOR-bot" +git config --global user.email "$GITHUB_ACTOR-bot@dongtai.io" +git checkout -b "release-$NEW_VERSION" + +sed -i "s//$NEW_VERSION/g" ./deploy/docker-compose/README.MD +sed -i "s//$NEW_VERSION/g" ./deploy/docker-compose/README-zh.md + +git add . +git commit -m "Update: change version to $NEW_VERSION" + +git push "https://$GITHUB_ACTOR:$GITHUB_TOKEN@github.com/$GITHUB_REPOSITORY.git" HEAD:"release-$NEW_VERSION" diff --git a/.gitignore b/.gitignore index 3b572f0e6..a900c9224 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,5 @@ +*.swp +.DS_Store *.DS_Store /venv */__pycache__/* @@ -5,12 +7,25 @@ *.pyc .idea/ scaapi/cron/logs/sca.log -iast/upload/reports/* +*.docx db/* -conf/config.ini #Dockerfile -doc +doc/* doc/example.json +webapi/conf/config.ini logs/*.log build_docker.sh -iast-package \ No newline at end of file +dongtai_py_agent.log +dongtai-agent-python.tar.gz + +deploy/docker-compose/docker-compose.yml +doc +iast-package +config.ini +dist +dongtai.egg-info +build +*.log +*.log.* +celerybeat.pid +*.mo \ No newline at end of file diff --git a/.pep8speaks.yml b/.pep8speaks.yml new file mode 100644 index 000000000..a9231948c --- /dev/null +++ b/.pep8speaks.yml @@ -0,0 +1,32 @@ +scanner: + diff_only: True # If False, the entire file touched by the Pull Request is scanned for errors. If True, only the diff is scanned. + linter: pycodestyle # Other option is flake8 + +pycodestyle: # Same as scanner.linter value. Other option is flake8 + max-line-length: 100 # Default is 79 in PEP 8 + ignore: # Errors and warnings to ignore + - E501 + - E402 + - E231 + - E302 + - E301 + - E303 + - E251 + - E265 + - W291 + - E126 + - W293 + - W391 + - W503 + - E128 + - E127 + - E122 + - E241 + - E225 + - E401 + - E101 + - E125 + - E131 + - E266 + - E701 + - W191 diff --git a/AgentServer/base.py b/AgentServer/base.py deleted file mode 100644 index 0f2fadde9..000000000 --- a/AgentServer/base.py +++ /dev/null @@ -1,79 +0,0 @@ -#!/usr/bin/env python -# -*- coding:utf-8 -*- -# author:owefsad -# datetime:2021/1/12 下午7:40 -# software: PyCharm -# project: lingzhi-agent-server -from django.http import JsonResponse -from django.views.decorators.csrf import csrf_exempt -from rest_framework.views import APIView - - -class EndPoint(APIView): - name = "api-v1" - description = "ApiServer接口" - - def __init__(self, **kwargs): - """ - Constructor. Called in the URLconf; can contain helpful extra - keyword arguments, and other things. - """ - # Go through keyword arguments, and either save their values to our - # instance, or raise an error. - super().__init__(**kwargs) - - @csrf_exempt - def dispatch(self, request, *args, **kwargs): - self.args = args - self.kwargs = kwargs - request = self.initialize_request(request, *args, **kwargs) - self.request = request - self.headers = self.default_response_headers # deprecate? - - try: - self.initial(request, *args, **kwargs) - - # Get the appropriate handler method - if request.method.lower() in self.http_method_names: - handler = getattr(self, request.method.lower(), - self.http_method_not_allowed) - else: - handler = self.http_method_not_allowed - - response = handler(request, *args, **kwargs) - - except Exception as exc: - response = self.handle_exception(exc) - - self.response = self.finalize_response(request, response, *args, **kwargs) - return self.response - - -class R: - @staticmethod - def success(data=None, msg="success"): - return JsonResponse({ - "status": 201, - "msg": msg, - "data": data - }) - - @staticmethod - def failure(data=None, msg="failure", status=None): - if status: - return JsonResponse( - { - "status": 202, - "msg": msg, - "data": data - }, - status=status - ) - else: - return JsonResponse( - { - "status": 202, - "msg": msg, - "data": data - } - ) diff --git a/AgentServer/const.py b/AgentServer/const.py deleted file mode 100644 index 558475531..000000000 --- a/AgentServer/const.py +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/env python -# -*- coding:utf-8 -*- -# author:owefsad -# datetime:2021/1/27 上午10:35 -# software: PyCharm -# project: lingzhi-agent-server - -HOOK_TYPE_ENABLE = 1 -HOOK_TYPE_DISABLE = 0 diff --git a/AgentServer/settings.py b/AgentServer/settings.py deleted file mode 100644 index 69e3e0b1f..000000000 --- a/AgentServer/settings.py +++ /dev/null @@ -1,171 +0,0 @@ -""" -Django settings for AgentServer project. - -Generated by 'django-admin startproject' using Django 3.0.3. - -For more information on this file, see -https://docs.djangoproject.com/en/3.0/topics/settings/ - -For the full list of settings and their values, see -https://docs.djangoproject.com/en/3.0/ref/settings/ -""" - -import os -import sys -from configparser import ConfigParser - -# Build paths inside the project like this: os.path.join(BASE_DIR, ...) -BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) - -config = ConfigParser() -config.read(os.path.join(BASE_DIR, 'conf/config.ini')) - -# Quick-start development settings - unsuitable for production -# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/ - -# SECURITY WARNING: keep the secret key used in production secret! -SECRET_KEY = 'u2^jmdc^l#=uz&r765fb4nyo)k*)0%tk3%yp*xf#i8b%(+-&vj' - -# SECURITY WARNING: don't run with debug turned on in production! -DEBUG = os.environ.get("debug", 'false') == 'true' - -ALLOWED_HOSTS = ['*'] - -# Application definition - -INSTALLED_APPS = [ - 'django.contrib.admin', - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'django.contrib.sessions', - 'django.contrib.messages', - 'django.contrib.staticfiles', - 'rest_framework', - 'rest_framework.authtoken', - 'dongtai_models', - 'apiserver' -] - -REST_FRAMEWORK = { - 'PAGE_SIZE': 20, - 'DEFAULT_PAGINATION_CLASS': ['django.core.paginator'], - 'DEFAULT_AUTHENTICATION_CLASSES': [ - 'rest_framework.authentication.TokenAuthentication', - ], - 'DEFAULT_RENDERER_CLASSES': [ - 'rest_framework.renderers.JSONRenderer', - 'rest_framework.renderers.BrowsableAPIRenderer', - ] -} - -MIDDLEWARE = [ - 'django.middleware.security.SecurityMiddleware', - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.middleware.common.CommonMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', -] - -ROOT_URLCONF = 'AgentServer.urls' - -TEMPLATES = [ - { - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [], - 'APP_DIRS': True, - 'OPTIONS': { - 'context_processors': [ - 'django.template.context_processors.debug', - 'django.template.context_processors.request', - 'django.contrib.auth.context_processors.auth', - 'django.contrib.messages.context_processors.messages', - ], - }, - }, -] - -WSGI_APPLICATION = 'AgentServer.wsgi.application' - -# Database -# https://docs.djangoproject.com/en/3.0/ref/settings/#databases -if len(sys.argv) > 1 and sys.argv[1] == 'test': - DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.sqlite3', - 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), - } - } -else: - DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.mysql', - 'OPTIONS': {'charset': 'utf8mb4'}, - 'USER': config.get("mysql", 'user'), - 'NAME': config.get("mysql", 'name'), - 'PASSWORD': config.get("mysql", 'password'), - 'HOST': config.get("mysql", 'host'), - 'PORT': config.get("mysql", 'port'), - } - } - -# Password validation -# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators -AUTH_USER_MODEL = 'dongtai_models.User' -AUTH_PASSWORD_VALIDATORS = [ - { - 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', - }, -] - -# Internationalization -# https://docs.djangoproject.com/en/3.0/topics/i18n/ - -LANGUAGE_CODE = 'en-us' - -TIME_ZONE = 'UTC' - -USE_I18N = True - -USE_L10N = True - -USE_TZ = True - -# Static files (CSS, JavaScript, Images) -# https://docs.djangoproject.com/en/3.0/howto/static-files/ - -STATIC_URL = '/static/' - -# 配置RSA加解密需要的公钥、私钥路径 -PRIVATE_KEY = os.path.join(BASE_DIR, 'config', 'rsa_keys/private_key.pem') -PUBLIC_KEY = os.path.join(BASE_DIR, 'config', 'rsa_keys/public_key.pem') - -BASE_ENGINE_URL = config.get("engine", "url") + '/api/engine/run?method_pool_id={id}' - -LOGGING = { - 'version': 1, - 'disable_existing_loggers': False, - 'handlers': { - 'console': { - 'level': 'DEBUG', - 'class': 'logging.StreamHandler', - }, - 'dongtai.openapi': { - 'class': 'logging.handlers.RotatingFileHandler', - 'filename': 'logs/apiserver.log', - 'backupCount': 5, - 'maxBytes': 1024 * 1024 * 10, - }, - }, - 'loggers': { - 'dongtai.openapi': { - 'handlers': ['console', 'dongtai.openapi'], - 'propagate': True, - 'level': 'INFO', - }, - } -} - -# 配置阿里云OSS访问凭证 -ACCESS_KEY = config.get('aliyun_oss', 'access_key') -ACCESS_KEY_SECRET = config.get('aliyun_oss', 'access_key_secret') diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 000000000..3710d2db0 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,80 @@ +# Change log + +## [1.3.0](https://github.com/HXSecurity/DongTai-webapi/releases/tag/v1.3.0)-2021-1-15 +* Features + * Added the function of API automatic testing +* Improve + * Improve the query speed of the vulnerability export interface + * Improve the problem of missing hints in some content +* Fix + * Fixed re-dos issue in regex validation + * Fixed component export csv not correctly carrying UTF-8 BOM + * Fixed the problem that the prompt information is inconsistent when the project information is modified + * Fixed some content missing i18n part + * Fixed the problem of component vulnerability display + + +## [1.2.0](https://github.com/HXSecurity/DongTai-webapi/releases/tag/v1.2.0)-2021-12-18 +* Features + * Added license display of components + * Added a set of interfaces to get the overview list by id + * Added custom rule batch processing interface + * Added component export function +* Improve + * Improved the query speed of the component overview interface +* Fix + * Fixed a bug that caused vulnerabilities to be undetected by modifying the strategy + * Fixed the bug that failed to obtain data from the /api/v1/sensitive_info_rule/ page + * Fixed the bug that the regular check is inconsistent with the engine +## [1.2.0](https://github.com/HXSecurity/DongTai-core/releases/tag/v1.2.0)-2021-12-31 +* Function + * Increase license in ScaMaven +* Improve + * Change command to TextField +## [1.2.0](https://github.com/HXSecurity/DongTai-openapi/releases/tag/v1.2.0) - 2021-12-31 +* Function + * Add Go Agent download + * Add Gzip + +## [1.1.4](https://github.com/HXSecurity/DongTai-openapi/releases/tag/v1.1.4)-2021-12-18 +* Improve + * Split and add hooks to accommodate plugin development +* Function + * Increase hard-coded vulnerabilities + * Increase SCA handler +## [1.1.4](https://github.com/HXSecurity/DongTai-webapi/releases/tag/v1.1.4)-2021-12-18 +* Improve + * Split and add hooks to accommodate plugin development +* Fix + * Fixed VulDetail when container is None , argument of type 'NoneType' is not iterable + * Fixed VulSummary Inappropriate sql query causes API timeout + * Fixed The name of the scanning strategy is not brought back when returning + * Fixed /api/v1/vulns local variable 'result' referenced before assignment + * Fixed /api/v1/sensitive_info_rule/ fields No indication of range +## [1.1.4](https://github.com/HXSecurity/DongTai-core/releases/tag/v1.1.4)-2021-12-18 +* Function + * Increase hard-coded vulnerabilities + +## [1.1.3](https://github.com/HXSecurity/DongTai-openapi/releases/tag/v1.1.3)-2021-12-03 + +* Function + * Projects are now sorted according to the time of obtaining component and vulnerability information +## [1.1.3](https://github.com/HXSecurity/dongtai-core/releases/tag/v1.1.3)-2021-12-03 + +* Function + * Increase the vulnerability active verification switch (including global and project level) +## [1.1.3](https://github.com/HXSecurity/DongTai-webapi/releases/tag/v1.1.3)-2021-12-03 + +* Function + * Projects are now sorted according to the time of obtaining component and vulnerability information + * Added scan template policy management + * Increase the vulnerability active verification switch (including global and project level) +* Improve + * Component information now adds component path + * Improved the original paging logic + * Improved the original data verification to adapt to the boundary value + * The agent name now gives priority to the alias when binding the agent +* Fix + * Fixed the error that may be caused by agentid when the project is created + * Fixed a non-atomic error when the project was created + * Fixed permission errors when deleting data diff --git a/CHANGELOG_CN.md b/CHANGELOG_CN.md new file mode 100644 index 000000000..e4e851854 --- /dev/null +++ b/CHANGELOG_CN.md @@ -0,0 +1,87 @@ +# 升级日志 + +## [1.3.0](https://github.com/HXSecurity/DongTai-webapi/releases/tag/v1.3.0)-2021-1-15 +* 功能 + * 增加了API自动测试的功能 +* 改善 + * 改善了漏洞导出接口的查询速度 + * 改善了部分内容缺少提示的问题 +* 修复 + * 修复了正则表达式验证中的re-dos问题 + * 修复了组件导出csv没有正确携带UTF-8 BOM的问题 + * 修复项目信息修改时提示信息不一致的问题 + * 修复了部分内容缺少i18n部分的问题 + * 修复了组件漏洞展示的问题 +## [1.3.0](https://github.com/HXSecurity/DongTai-openapi/releases/tag/v1.3.0) - 2022-01-18 +* 功能 + * 增加SCA API,本地可通过调用SCA API查询组件是否存在漏洞 + + +## [1.2.0](https://github.com/HXSecurity/DongTai-webapi/releases/tag/v1.2.0)-2021-12-18 +* 功能 + * 增加了组件的license展示 + * 增加了一组通过id获取概况列表的接口 + * 增加了自定义规则批量处理接口 + * 增加组件导出功能 +* 改善 + * 改善了组件概况接口的查询速度 +* 修复 + * 修复了修改策略导致漏洞无法检出的bug + * 修复了/api/v1/sensitive_info_rule/分页数据获取失败的bug + * 修复正则校验与engine不一致的bug +## [1.2.0](https://github.com/HXSecurity/DongTai-core/releases/tag/v1.2.0)-2021-12-31 +* 功能 + * 在ScaMaven增加license +* 改进 + * 将commandTextField +## [1.2.0](https://github.com/HXSecurity/DongTai-openapi/releases/tag/v1.2.0) - 2021-12-31 +* 功能 + * 增加Go Agent下载 + * 增加Gzip + + + +## [1.1.4](https://github.com/HXSecurity/DongTai-webapi/releases/tag/v1.1.4)-2021-12-18 +* 提升 + * 拆分和添加钩子以适应插件开发 +* + * 修复了container为 None 时的 VulDetail ,NoneType导致的问题 + * 修复了 VulSummary 不适当的 sql 查询导致 API 超时的问题 + * 修复返回时缺失扫描策略名称的的问题 + * 修正/api/v1/vulns 局部变量'result' 赋值前引用的问题 + * 修正 /api/v1/sensitive_info_rule/ 字段 没有范围指示 +## [1.1.4](https://github.com/HXSecurity/DongTai-core/releases/tag/v1.1.4) - 2021-12-18 +* 功能 + * 增加硬编码漏洞 +## [1.1.4](https://github.com/HXSecurity/DongTai-openapi/releases/tag/v1.1.4) - 2021-12-18 +* 功能 + * 增加批量上传组件接口 +* 改进 + * 拆分和添加钩子以适应插件开发 +* 功能 + * 增加硬编码漏洞上报规则 + + +## [1.1.3](https://github.com/HXSecurity/DongTai-webapi/releases/tag/v1.1.3) - 2021-12-03 + +* 功能 + * 项目现在根据获取组件和漏洞信息时间排序 + * 增加了扫描模板策略管理 + * 增加漏洞主动验证开关(包括全局与项目级) +* 改进 + * 组件信息现在增加了组件路径 + * 改进了原有的分页逻辑 + * 改进了原有的数据校验以适应边界值 + * 绑定探针时探针名现在优先显示别名 +* 修复 + * 修复项目创建时agentid可能导致的错误 + * 修复了项目创建时非原子性错误 + * 修复删除数据时存在的权限错误 +## [1.1.3](https://github.com/HXSecurity/dongtai-core/releases/tag/v1.1.3)-2021-12-03 + +* 功能 + * 增加漏洞主动验证开关(包括全局与项目级) +## [1.1.3](https://github.com/HXSecurity/DongTai-openapi/releases/tag/v1.1.3) - 2021-12-03 + +* 功能 + * 增加漏洞主动验证开关(包括全局与项目级) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 000000000..aa198bf8e --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,68 @@ +# Contributing to DongTai IAST + +Thank you for considering contributing to DongTai IAST. You can contribute to DongTai IAST in many ways: + +- reporting-issues +- code-contributions + +## Bug reports + +If you think you have found a bug in DongTai IAST, first make sure that you are testing against the latest version of DongTai IAST - your issue may already have been fixed. If not, please follow the prompts to describe the bug as completely as possible when [submitting the issue](https://github.com/HXSecurity/DongTai/issues/new?assignees=exexute&labels=bug&projects=1&template=bug_report.yaml). + +## Feature requests + +If you find yourself wishing for a feature that doesn't exist in DongTai IAST, please [submit your issue](https://github.com/HXSecurity/DongTai/issues/new?assignees=exexute&labels=feature&projects=1&template=feature_request.yaml). We look forward to getting more user’s needs. + +## Contributing code + +If you would like to contribute a new feature or a bug fix to DongTai IAST, please discuss your idea first on the Github issue. If there is no Github issue for your idea, please open one. We always pay attention to the issue and provide solutions. If you have never created a pull request before, here is a tutorial on how to create a pull request. + +1. If you want to write code that you would like to contribute to the DongTai IAST, following these guidelines will make it easier for the DongTai IAST development team to review and accept your changes. + + **Coding Guidelines** + + - Following the Latest Source Code + - Neat code formatting + - Commits + - Look through all of your changes in your patch or pull request before you submit it to us. Make sure that everything you've changed is there for a reason. + - Please don't include unfinished work to the patch. Make sure that it doesn't contain any TODO comments. If you added some code and ended up not needing it, please make sure that you delete it before you submit your patch. + - Please don't include any changes that affect formatting, fixing "yellow code" (warnings), or code style along with actual changes that fix a bug or implement a feature. No one likes to leave poor code, but remember that having these changes mixed complicates the process of review. + - Please don't fix multiple problems within a single patch or pull request. + - Please avoid moving or renaming classes unless it is necessary for the fix. + +2. Fork the project, clone your fork: + + ```shell + git clone https://github.com// + ``` + +3. If you cloned a while ago, get the latest changes from upstream: + + ```shell + git checkout main + git pull upstream main + git submodule init + ``` + +4. Create a new topic branch (off the main project development branch) to contain your feature, change, or fix: + + ```shell + git checkout -b + ``` + +5. Push your topic branch up to your fork: + + ```shell + git push origin + ``` + +6. Open a Pull Request with a clear title and description. + +## Contributor Resources + +### Contributor Level + +- **Contributor** You can become a contributor by submit a valid ISSUE, pass a PR, or answer user’s question in the community +- **maintainers** First, you need to be a contributor; Second, you have submitted important issues /PR or other outstanding contributions; Existing maintainers and core development then discuss whether to allow them to join the maintainer team. +- **core members** Core members need to be maintainers, and then, have their own ideas and insights on the development of the product, can put forward key suggestions or develop related functions; The existing core members discuss together and decide whether to allow them to join the core team. + diff --git a/Dockerfile b/Dockerfile index 83ab05445..3b00d483c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,20 +1,30 @@ -FROM python:3.7.7 +FROM python:3.7-slim ARG VERSION ENV DEBIAN_FRONTEND=noninteractive ENV LANG=en_US.UTF-8 +ENV LC_ALL=en_US.UTF-8 +ENV LANG=en_US.UTF-8 +ENV LANGUAGE=en_US.UTF-8 ENV TZ=Asia/Shanghai -RUN curl -fsSL https://nginx.org/keys/nginx_signing.key | apt-key add - \ - && apt-key fingerprint ABF5BD827BD9BF62 \ - && apt-get update -y \ - && apt install -y libc6-dev unzip vim cron swig openjdk-11-jdk +RUN apt-get update -y \ + && apt install -y gettext gcc make cmake libmariadb-dev curl libc6-dev unzip cron openjdk-11-jdk fonts-wqy-microhei vim +# htop sysstat net-tools iproute2 procps lsof + +RUN curl -L https://github.com/Endava/cats/releases/download/cats-7.0.1/cats-linux -o /usr/local/bin/cats \ + && chmod +x /usr/local/bin/cats \ + && ln -s /usr/local/bin/cats /usr/bin/cats \ + && curl -L https://huoqi-public.oss-cn-beijing.aliyuncs.com/iast/wkhtmltopdf -o /usr/local/bin/wkhtmltopdf \ + && chmod +x /usr/local/bin/wkhtmltopdf \ + && ln -s /usr/local/bin/wkhtmltopdf /usr/bin/wkhtmltopdf -COPY requirements.txt /opt/iast/apiserver/requirements.txt -ADD https://huoqi-public.oss-cn-beijing.aliyuncs.com/iast/dongtai-models-1.0.tar.gz /opt/iast/apiserver/dongtai-models-1.0.tar.gz -RUN pip3 install -i http://mirrors.aliyun.com/pypi/simple/ --trusted-host mirrors.aliyun.com -r /opt/iast/apiserver/requirements.txt && pip3 install /opt/iast/apiserver/dongtai-models-1.0.tar.gz && rm -rf /opt/iast/apiserver/dongtai-models-1.0.tar.gz +COPY requirements-prod.txt /opt/dongtai/webapi/requirements.txt +RUN pip3 install -r /opt/dongtai/webapi/requirements.txt -COPY . /opt/iast/apiserver -WORKDIR /opt +# debug performance ... +COPY . /opt/dongtai +WORKDIR /opt/dongtai -CMD ["/usr/local/bin/uwsgi","--ini", "/opt/iast/apiserver/conf/uwsgi.ini"] +RUN /bin/bash -c 'mkdir -p /tmp/{logstash/{batchagent,report/{img,word,pdf,excel,html}},iast_cache/package}' && mv /opt/dongtai/*.jar /tmp/iast_cache/package/ || true && mv /opt/dongtai/*.tar.gz /tmp/ || true +ENTRYPOINT ["/bin/bash","/opt/dongtai/deploy/docker/entrypoint.sh"] \ No newline at end of file diff --git a/DockerfileTest b/DockerfileTest deleted file mode 100644 index 50e247820..000000000 --- a/DockerfileTest +++ /dev/null @@ -1,20 +0,0 @@ -FROM python:3.7.7 -ARG VERSION -ENV DEBIAN_FRONTEND=noninteractive -ENV LANG=en_US.UTF-8 -ENV TZ=Asia/Shanghai - -RUN curl -fsSL https://nginx.org/keys/nginx_signing.key | apt-key add - \ - && apt-key fingerprint ABF5BD827BD9BF62 \ - && apt-get update -y \ - && apt install -y libc6-dev unzip vim cron swig openjdk-11-jdk - -COPY requirements.txt /opt/iast/apiserver/requirements.txt -ADD https://huoqi-public.oss-cn-beijing.aliyuncs.com/iast/dongtai-models-test-1.0.tar.gz /opt/iast/apiserver/dongtai-models-1.0.tar.gz -RUN pip3 install -i http://mirrors.aliyun.com/pypi/simple/ --trusted-host mirrors.aliyun.com -r /opt/iast/apiserver/requirements.txt && pip3 install /opt/iast/apiserver/dongtai-models-1.0.tar.gz && rm -rf /opt/iast/apiserver/dongtai-models-1.0.tar.gz - - -COPY . /opt/iast/apiserver -WORKDIR /opt - -CMD ["/usr/local/bin/uwsgi","--ini", "/opt/iast/apiserver/conf/uwsgi.ini"] diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 30ace6a87..000000000 --- a/LICENSE +++ /dev/null @@ -1,674 +0,0 @@ - GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU General Public License is a free, copyleft license for -software and other kinds of works. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -the GNU General Public License is intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. We, the Free Software Foundation, use the -GNU General Public License for most of our software; it applies also to -any other work released this way by its authors. You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - To protect your rights, we need to prevent others from denying you -these rights or asking you to surrender the rights. Therefore, you have -certain responsibilities if you distribute copies of the software, or if -you modify it: responsibilities to respect the freedom of others. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must pass on to the recipients the same -freedoms that you received. You must make sure that they, too, receive -or can get the source code. And you must show them these terms so they -know their rights. - - Developers that use the GNU GPL protect your rights with two steps: -(1) assert copyright on the software, and (2) offer you this License -giving you legal permission to copy, distribute and/or modify it. - - For the developers' and authors' protection, the GPL clearly explains -that there is no warranty for this free software. For both users' and -authors' sake, the GPL requires that modified versions be marked as -changed, so that their problems will not be attributed erroneously to -authors of previous versions. - - Some devices are designed to deny users access to install or run -modified versions of the software inside them, although the manufacturer -can do so. This is fundamentally incompatible with the aim of -protecting users' freedom to change the software. The systematic -pattern of such abuse occurs in the area of products for individuals to -use, which is precisely where it is most unacceptable. Therefore, we -have designed this version of the GPL to prohibit the practice for those -products. If such problems arise substantially in other domains, we -stand ready to extend this provision to those domains in future versions -of the GPL, as needed to protect the freedom of users. - - Finally, every program is threatened constantly by software patents. -States should not allow patents to restrict development and use of -software on general-purpose computers, but in those that do, we wish to -avoid the special danger that patents applied to a free program could -make it effectively proprietary. To prevent this, the GPL assures that -patents cannot be used to render the program non-free. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Use with the GNU Affero General Public License. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU Affero General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the special requirements of the GNU Affero General Public License, -section 13, concerning interaction through a network will apply to the -combination as such. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - {one line to give the program's name and a brief idea of what it does.} - Copyright (C) {year} {name of author} - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - - If the program does terminal interaction, make it output a short -notice like this when it starts in an interactive mode: - - {project} Copyright (C) {year} {fullname} - This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands -might be different; for a GUI interface, you would use an "about box". - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -. \ No newline at end of file diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 000000000..261eeb9e9 --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README-zh.md b/README-zh.md new file mode 100644 index 000000000..5c39a740b --- /dev/null +++ b/README-zh.md @@ -0,0 +1,95 @@ +# DongTai + +[![django-project](https://img.shields.io/badge/django%20versions-3.0.3-blue)](https://www.djangoproject.com/) +[![license Apache-2.0](https://img.shields.io/github/license/HXSecurity/DongTai-agent-java)](https://github.com/HXSecurity/DongTai-agent-java/blob/main/LICENSE) +[![GitHub release](https://img.shields.io/github/v/release/HXSecurity/DongTai?label=DongTai)](https://github.com/HXSecurity/DongTai/releases) + +[![GitHub release](https://img.shields.io/github/v/release/HXSecurity/Dongtai-webapi?label=Dongtai-webapi)](https://github.com/HXSecurity/DongTai-webapi/releases) +[![GitHub release](https://img.shields.io/github/v/release/HXSecurity/Dongtai-openapi?label=Dongtai-openapi)](https://github.com/HXSecurity/DongTai-openapi/releases) +[![GitHub release](https://img.shields.io/github/v/release/HXSecurity/Dongtai-engine?label=Dongtai-engine)](https://github.com/HXSecurity/DongTai-engine/releases) +[![GitHub release](https://img.shields.io/github/v/release/HXSecurity/Dongtai-web?label=Dongtai-web)](https://github.com/HXSecurity/DongTai-web/releases) +[![GitHub release](https://img.shields.io/github/v/release/HXSecurity/DongTai-agent-java?label=DongTai-agent-java)](https://github.com/HXSecurity/DongTai-agent-java/releases) +[![GitHub release](https://img.shields.io/github/v/release/HXSecurity/DongTai-agent-python?label=DongTai-agent-python)](https://github.com/HXSecurity/DongTai-agent-python/releases) + +[English](README.md) + +## DongTai是什么? + +DongTai是一款开源的被动式交互式安全测试(IAST)产品,通过动态hook和污点跟踪算法等实现**通用漏洞检测**、**多请求关联漏洞检测(包括但不限于越权漏洞、未授权访问)**、**第三方组件漏洞检测**等,目前支持Java、Python两种语言的应用漏洞检测。 + +## 项目结构 + +``` +. +├── deploy +├── dongtai_common 各个服务调用的常用函数和类 +├── dongtai_conf 配置文件 +├── dongtai_engine 漏洞检测与漏洞处理部分 +├── dongtai_protocol dongtai-server和agent交互的协议 +├── dongtai_web 与web交互的api +├── static 静态文件 +└── test 测试用例 +``` + + +## 技术架构 + +"火线-洞态IAST"具有多个基础服务,包括:`DongTai-web`、`DongTai-webapi`、`DongTai-openapi`、`DongTai-engine`、`agent`、`DongTai-Base-Image`、`DongTai-Plugin-IDEA`,其中: + +- `DongTai-web`是DongTai的产品页面,用于处理用户与洞态的交互 +- `DongTai-webapi`负责处理用户的相关操作 +- `DongTai-openapi`用于处理`agent`上报的注册/心跳/调用方法/第三方组件/错误日志等数据,下发hook策略,下发探针控制指令等 +- `DongTai-engine`根据调用方法数据和污点跟踪算法分析HTTP/HTTPS/RPC请求中是否存在漏洞,同时负责其它相关的定时任务 +- `agent`是DongTai的探针模块,包含不同编程语言的数据采集端,用于采集应用运行时的数据并上报至`DongTai-OpenAPI`服务 +- `DongTai-Base-Image`包含洞态运行时依赖的基础服务,包括:MySql、Redis +- `DongTai-Plugin-IDEA`是Java探针对应的IDEA插件,可通过插件直接运行Java探针,直接在IDEA中检测漏洞 + +## 应用场景 + +"火线-洞态IAST"的应用场景包括但不限于: + +- 嵌入`DevSecOps`流程,实现应用漏洞的自动化检测/第三方组件梳理/第三方组件漏洞检测 +- 针对开源软件/开源组件进行通用漏洞挖掘 +- 上线前安全测试等 + +## 快速开始 + +`洞态IAST`支持**SaaS服务**和**本地化部署**,本地化部署的详细部署方案见[**部署文档**](./deploy) + +### 1. SaaS版本 + +- 填写[在线问卷](https://jinshuju.net/f/I9PNmf)注册账号 +- 登录[洞态IAST](https://iast.io)系统 +- 根据[在线文档](https://doc.dongtai.io/zh/02_start/index.html)进行快速体验 + +### 2. 本地化部署版本 + +**洞态IAST**支持多种部署方案,可通过[部署文档](./deploy)了解部署方案详情,方案如下: + +- 单机版部署 + - [x] [docker-compose部署](./deploy/docker-compose) + - [ ] docker部署方案 - 待更新 +- 集群版部署 + - [x] [Kubernetes集群部署](./deploy/kubernetes) + +#### docker-compose部署 + +```shell script +git clone git@github.com:HXSecurity/DongTai.git +cd DongTai +chmod u+x build_with_docker_compose.sh +./build_with_docker_compose.sh +``` + +## 贡献 + +欢迎并非常感谢您的贡献, 请参阅[contribution.md](https://github.com/HXSecurity/DongTai/blob/main/CONTRIBUTING.md)了解如何向项目贡献 + +## 文档 + +- [官方文档](https://doc.dongtai.io/zh/) +- [官方网站](https://dongtai.io) + +## Stats + +![Alt](https://repobeats.axiom.co/api/embed/ea6a307f8f06cd1c2a19f2312751eb1706382af8.svg "Repobeats analytics image") diff --git a/README.md b/README.md new file mode 100644 index 000000000..43c0809d5 --- /dev/null +++ b/README.md @@ -0,0 +1,98 @@ +# DongTai + +[![django-project](https://img.shields.io/badge/django%20versions-3.2.15-blue)](https://www.djangoproject.com/) +[![license Apache-2.0](https://img.shields.io/github/license/HXSecurity/DongTai-agent-java)](https://github.com/HXSecurity/DongTai-agent-java/blob/main/LICENSE) +[![GitHub release](https://img.shields.io/github/v/release/HXSecurity/DongTai?label=DongTai)](https://github.com/HXSecurity/DongTai/releases) + +[![GitHub release](https://img.shields.io/github/v/release/HXSecurity/Dongtai-webapi?label=Dongtai-webapi)](https://github.com/HXSecurity/DongTai-webapi/releases) +[![GitHub release](https://img.shields.io/github/v/release/HXSecurity/Dongtai-openapi?label=Dongtai-openapi)](https://github.com/HXSecurity/DongTai-openapi/releases) +[![GitHub release](https://img.shields.io/github/v/release/HXSecurity/Dongtai-engine?label=Dongtai-engine)](https://github.com/HXSecurity/DongTai-engine/releases) +[![GitHub release](https://img.shields.io/github/v/release/HXSecurity/Dongtai-web?label=Dongtai-web)](https://github.com/HXSecurity/DongTai-web/releases) +[![GitHub release](https://img.shields.io/github/v/release/HXSecurity/DongTai-agent-java?label=DongTai-agent-java)](https://github.com/HXSecurity/DongTai-agent-java/releases) +[![GitHub release](https://img.shields.io/github/v/release/HXSecurity/DongTai-agent-python?label=DongTai-agent-python)](https://github.com/HXSecurity/DongTai-agent-python/releases) + +[中文版本(Chinese version)](README-zh.md) + +## About DongTai IAST + +`DongTai IAST` is an open-source passive interactive security testing (IAST) product. It uses dynamic hooks and taint tracking algorithms to achieve **universal vulnerability detection** and **multiples request associated with vulnerability detection (including but not limited to unauthorized vulnerabilities, overpower vulnerabilities)**, **Third-party component vulnerability detection**, etc. Currently, applications in Java and Python are supported for vulnerability detection. + + +## Project structure +``` +. +├── deploy +├── dongtai_common common functions and classes for each service to call +├── dongtai_conf configuration files +├── dongtai_engine vulnerability detection and vulnerability processing part +├── dongtai_protocol protocols for interaction between dongtai-server and agent +├── dongtai_web api for interacting with the web +├── static static files +└── test testcases + +``` + +## Architecture + +`DongTai IAST` has multiple basic services, including `DongTai-web`, `DongTai-webapi`, `DongTai-openapi`, `DongTai-engine`, `agent`, `DongTai-deploy`, `DongTai-Base-Image` and `DongTai-Plugin-IDEA`: + +- `DongTai-web` is the product page of DongTai, which is used to handle the interaction between users and cave states. +- `DongTai-webapi` is responsible for handling user-related operations. +- `DongTai-openapi` is used to process the registration/heartbeat/call method/third-party component/error log data reported by `agent`, issue hook strategy, issue probe control commands, etc. +- `DongTai-engine` analyzes whether there are vulnerabilities in HTTP/HTTPS/RPC requests according to the calling method data and taint tracking algorithm, and is also responsible for other related timing tasks. +- `agent` is a probe module of DongTai, including data collection terminals in different programming languages, used to collect data during application runtime and report to the `DongTai-OpenAPI` service. +- `DongTai-deploy` is used for the deployment of DongTai IAST, including docker-compose single-node deployment, Kubernetes cluster deployment, etc. If you want a deployment plan, you can add features or contribute to the deployment plan. +- `DongTai-Base-Image` contains the basic services that DongTai depends on runtime, including MySql, Redis. +- `DongTai-Plugin-IDEA` is the IDEA plug-in corresponding to the Java probe. You can run the Java probe directly through the plug-in and detect the vulnerabilities directly in IDEA. + +## Scenario + +The usage scenarios of "DongTai IAST" include but not limited to: + +- Embed the `DevSecOps` process to realize automatic detection of application vulnerabilities/third-party component combing/third-party component vulnerability detection. +- Common vulnerability mining for open source software/open source components. +- Security testing before release, etc. + +## Quick start + +`DongTai IAST` supports **SaaS Service** and **Localized Deployment**. Please refer to [**Deployment Document**](./deploy) for localized deployment. + +### 1. SaaS Version + +- Fill out the [Online Form](https://jinshuju.net/f/I9PNmf) to register an account. +- Log in to the [DongTai IAST] (). +- Have a quick start with [Online Guideline](https://docs.dongtai.io/docs/category/%E5%BF%AB%E9%80%9F%E5%BC%80%E5%A7%8B). + +### 2. Localized Deployment Version + +`DongTai IAST` supports a variety of deployment schemes which refer to [Deployment Document](./deploy): + +- Stand-alone Deployment + - [x] [Docker-compose](./deploy/docker-compose) + - [ ] docker - pending upgrade +- Cluster Deployment + - [x] [Kubernetes](./deploy/kubernetes) + +#### Docker-compose + +```shell script +git clone git@github.com:HXSecurity/DongTai.git +cd DongTai +chmod u+x build_with_docker_compose.sh +./build_with_docker_compose.sh +``` + +## Contributing + +Contributions are welcomed and greatly appreciated. Further reading — [CONTRIBUTING.md](https://github.com/HXSecurity/DongTai/blob/main/CONTRIBUTING.md) for details on submitting patches and contribution workflow. + +Any questions? Let's discuss in [#DongTai discussions](https://github.com/HXSecurity/DongTai/discussions) + +## Futher Resources + +- [Documentation](https://docs.dongtai.io/) +- [DongTai WebSite](https://dongtai.io) + +## Stats + +![Alt](https://repobeats.axiom.co/api/embed/ea6a307f8f06cd1c2a19f2312751eb1706382af8.svg "Repobeats analytics image") diff --git a/apiserver/apps.py b/apiserver/apps.py deleted file mode 100644 index 64c2ffc64..000000000 --- a/apiserver/apps.py +++ /dev/null @@ -1,5 +0,0 @@ -from django.apps import AppConfig - - -class ApiserverConfig(AppConfig): - name = 'apiserver' diff --git a/apiserver/base/openapi.py b/apiserver/base/openapi.py deleted file mode 100644 index 288629462..000000000 --- a/apiserver/base/openapi.py +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python -# -*- coding:utf-8 -*- -# author:owefsad -# datetime:2020/11/26 下午12:42 -# software: PyCharm -# project: lingzhi-webapi -from rest_framework.authentication import TokenAuthentication, SessionAuthentication - -from AgentServer.base import EndPoint -from apiserver.permissions import ScopedPermission - - -class OpenApiPermission(ScopedPermission): - def has_permission(self, request, view): - user = request.user - if user is not None and user.is_active: - return True - return False - - def has_object_permission(self, request, view, obj): - print('enter has object permission') - return super().has_object_permission(request, view, obj) - - -class OpenApiEndPoint(EndPoint): - authentication_classes = (TokenAuthentication,) - permission_classes = (OpenApiPermission,) - - -class EngineApiEndPoint(EndPoint): - authentication_classes = (SessionAuthentication, TokenAuthentication) - permission_classes = (OpenApiPermission,) diff --git a/apiserver/const.py b/apiserver/const.py deleted file mode 100644 index c2730242d..000000000 --- a/apiserver/const.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python -# -*- coding:utf-8 -*- -# author:owefsad -# datetime:2020/10/29 17:56 -# software: PyCharm -# project: webapi - -REPORT_HEART_BEAT = 0x01 -REPORT_SCA = 0x11 -REPORT_VULN_NORNAL = 0x21 -REPORT_VULN_DYNAMIC = 0x22 -REPORT_VULN_OVER_POWER = 0x23 -REPORT_VULN_SAAS_POOL = 0x24 - -REPORT_AUTH_ADD = 0x31 -REPORT_AUTH_UPDATE = 0x32 -REPORT_ERROR_LOG = 0x51 - -STRATEGY_ENABLE = 'enable' -STRATEGY_DISABLE = 'disable' diff --git a/apiserver/encrypter.py b/apiserver/encrypter.py deleted file mode 100644 index c6aab6307..000000000 --- a/apiserver/encrypter.py +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env python -# -*- coding:utf-8 -*- -# author:owefsad -# datetime:2020/6/4 14:55 -# software: PyCharm -# project: webapi - -""" -RSA加解密 -""" -import base64 - -from M2Crypto import BIO, RSA - -from AgentServer import settings - -with open(settings.PRIVATE_KEY, 'r') as f: - PRIVATE_KEY = f.read() - - -class RsaCrypto: - MAX_LENGTH = 128 - - @staticmethod - def cut(data, length): - return [data[i:i + length] for i in range(0, len(data), length)] - - @staticmethod - def decrypt(encrypt_data): - # 解密 - cipher = base64.b64decode(encrypt_data) - ciphers = RsaCrypto.cut(cipher, RsaCrypto.MAX_LENGTH) - pri_bio = BIO.MemoryBuffer(PRIVATE_KEY.encode('utf-8')) - pri_rsa = RSA.load_key_bio(pri_bio) - fragments = [] - for _cipher in ciphers: - plain = pri_rsa.private_decrypt(_cipher, RSA.pkcs1_padding) - fragments.append(plain.decode('utf-8')) - return ''.join(fragments) - - @staticmethod - def encrypt(data): - with open(settings.PUBLIC_KEY, 'r') as public_key: - public_key_value = public_key.read() - text = data.encode('utf-8') - fragments = RsaCrypto.cut(text, 118) - pub_bio = BIO.MemoryBuffer(public_key_value.encode('utf-8')) - pub_rsa = RSA.load_pub_key_bio(pub_bio) - - secret = bytes() - for fragment in fragments: - secret += pub_rsa.public_encrypt(fragment, RSA.pkcs1_padding) - sign = base64.b64encode(secret) - return sign.decode('utf-8') diff --git a/apiserver/report/handler/error_log_handler.py b/apiserver/report/handler/error_log_handler.py deleted file mode 100644 index c8d74711a..000000000 --- a/apiserver/report/handler/error_log_handler.py +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python -# -*- coding:utf-8 -*- -# author:owefsad -# datetime:2020/10/23 11:54 -# software: PyCharm -# project: webapi -import time - -from dongtai_models.models.errorlog import IastErrorlog - -from apiserver.report.handler.report_handler_interface import IReportHandler - - -class ErrorLogHandler(IReportHandler): - - def parse(self): - self.app_name = self.detail.get('app_name') - self.web_server_path = self.detail.get('web_server_path') - self.log = self.detail.get('log') - self.agent_name = self.detail.get('agent_name') - self.project_name = self.detail.get('project_name', 'Demo Project') - self.language = self.detail.get('language') - - def save(self): - self.agent = self.get_agent(project_name=self.project_name, agent_name=self.agent_name) - if self.agent: - IastErrorlog( - errorlog=self.log, - agent=self.agent, - state='已上报', - dt=int(time.time()) - ).save() diff --git a/apiserver/report/handler/heartbeat_handler.py b/apiserver/report/handler/heartbeat_handler.py deleted file mode 100644 index f08dd1661..000000000 --- a/apiserver/report/handler/heartbeat_handler.py +++ /dev/null @@ -1,117 +0,0 @@ -#!/usr/bin/env python -# -*- coding:utf-8 -*- -# author:owefsad -# datetime:2020/10/23 11:56 -# software: PyCharm -# project: webapi -import base64 -import time - -from dongtai_models.models.heartbeat import Heartbeat -from dongtai_models.models.server import IastServerModel - -from apiserver.report.handler.report_handler_interface import IReportHandler - - -class HeartBeatHandler(IReportHandler): - - def parse(self): - self.server_env = self.detail.get('server_env') - self.app_name = self.detail.get('app_name') - self.app_path = self.detail.get('app_path') - self.web_server_name = self.detail.get('web_server_name') - self.web_server_port = self.detail.get('web_server_port') - self.web_server_version = self.detail.get('web_server_version') - self.web_server_path = self.detail.get('web_server_path') - self.web_server_hostname = self.detail.get('web_server_hostname') - self.web_server_ip = self.detail.get('web_server_ip') - self.req_count = self.detail.get('req_count') - self.pid = self.detail.get('pid') - self.hostname = self.detail.get('hostname') - self.cpu = self.detail.get('cpu') - self.memory = self.detail.get('memory') - self.network = self.detail.get('network') - self.disk = self.detail.get('disk') - self.agent_name = self.detail.get('agent_name') - self.project_name = self.detail.get('project_name', 'Demo Project') - - def save_heartbeat(self): - heartbeat = Heartbeat( - hostname=self.hostname, - network=self.network, - memory=self.memory, - cpu=self.cpu, - disk=self.disk, - pid=self.pid, - env='', - req_count=self.req_count, - dt=int(time.time()), - agent=self.agent - ) - heartbeat.save() - - def get_command(self, envs): - for env in envs: - if 'sun.java.command' in env.lower(): - return '='.join(env.split('=')[1:]) - return '' - - def get_runtime(self, envs): - for env in envs: - if 'java.runtime.name' in env.lower(): - return '='.join(env.split('=')[1:]) - return '' - - def save_server(self): - # 根据服务器信息检查是否存在当前服务器,如果存在,标记为存活,否则,标记为失败 - env = "" - envs = [] - self.command = "" - if self.server_env: - env = base64.b64decode(self.server_env).decode('utf-8') - env = env.replace('{', '').replace('}', '') - envs = env.split(',') - self.command = self.get_command(envs) - - iast_servers = IastServerModel.objects.filter( - name=self.web_server_name, - hostname=self.hostname, - ip=self.web_server_ip, - port=self.web_server_port, - command=self.command - ) - - if len(iast_servers) > 0: - iast_server = iast_servers[0] - iast_server.status = 'online' - iast_server.update_time = int(time.time()) - iast_server.save() - return iast_server - else: - iast_server = IastServerModel( - name=self.web_server_name, - hostname=self.hostname, - ip=self.web_server_ip, - port=self.web_server_port, - environment=env, - path=self.web_server_path, - status='online', - container=self.web_server_name, - container_path=self.web_server_path, - command=self.command, - runtime=self.get_runtime(envs), - create_time=int(time.time()), - update_time=int(time.time()) - ) - iast_server.save() - return iast_server - - def save(self): - self.agent = self.get_agent(project_name=self.project_name, agent_name=self.agent_name) - if self.agent: - self.agent.is_running = 1 - self.agent.latest_time = int(time.time()) - self.agent.save() - self.save_heartbeat() - self.agent.server = self.save_server() - self.agent.save() diff --git a/apiserver/report/handler/narmal_vul_handler.py b/apiserver/report/handler/narmal_vul_handler.py deleted file mode 100644 index 7fa4db013..000000000 --- a/apiserver/report/handler/narmal_vul_handler.py +++ /dev/null @@ -1,132 +0,0 @@ -#!/usr/bin/env python -# -*- coding:utf-8 -*- -# author: owefsad@huoxian.cn -# datetime: 2021/4/27 下午2:48 -# project: dongtai-openapi - -# !/usr/bin/env python -# -*- coding:utf-8 -*- -# author:owefsad -# datetime:2020/10/23 11:55 -# software: PyCharm -# project: webapi -import time - -from dongtai_models.models.strategy import IastStrategyModel -from dongtai_models.models.vul_level import IastVulLevel -from dongtai_models.models.vulnerablity import IastVulnerabilityModel - -from apiserver.report.handler.report_handler_interface import IReportHandler - - -class BaseVulnHandler(IReportHandler): - @staticmethod - def create_top_stack(obj): - stack = f'{obj["classname"]}.{obj["methodname"]}({obj["in"]})' - return stack - - @staticmethod - def create_bottom_stack(obj): - stack = f'{obj["classname"]}.{obj["methodname"]}("{obj["in"]}")' - return stack - - def get_vul_info(self, agent): - vul_level = '待定' - vul_type = self.vuln_type - vul_type_enable = 'disable' - # 根据用户ID判断获取策略中的漏洞等级 - strategy = IastStrategyModel.objects.values('vul_type', 'level', 'state').filter(vul_type=vul_type).first() - if strategy: - vul_level = strategy.get('level', 4) - vul_type = strategy.get('vul_type', None) - vul_type_enable = strategy.get('state', 'disable') - return vul_level, vul_type, vul_type_enable - - def get_command(self, envs): - for env in envs: - if 'sun.java.command' in env.lower(): - return '='.join(env.split('=')[1:]) - return '' - - def get_runtime(self, envs): - for env in envs: - if 'java.runtime.name' in env.lower(): - return '='.join(env.split('=')[1:]) - return '' - - def parse(self): - self.server_name = self.detail.get('server_name') - self.server_port = self.detail.get('server_port') - self.server_env = self.detail.get('server_env') - self.hostname = self.detail.get('hostname') - self.agent_version = self.detail.get('agent_version') - self.app_name = self.detail.get('app_name') - self.app_path = self.detail.get('app_path') - self.http_uri = self.detail.get('http_uri') - self.http_url = self.detail.get('http_url') - self.http_query_string = self.detail.get('http_query_string') - self.http_header = self.detail.get('http_header') - self.http_method = self.detail.get('http_method') - self.http_scheme = self.detail.get('http_scheme') - self.http_secure = self.detail.get('http_secure') - self.http_protocol = self.detail.get('http_protocol') - self.vuln_type = self.detail.get('vuln_type') - self.app_caller = self.detail.get('app_caller') - self.language = self.detail.get('language') - self.agent_name = self.detail.get('agent_name') - self.taint_value = self.detail.get('taint_value') - self.taint_position = self.detail.get('taint_position') - self.client_ip = self.detail.get('http_client_ip') - self.param_name = self.detail.get('param_name') - self.container = self.detail.get('container') - self.container_path = self.detail.get('container_path') - self.project_name = self.detail.get('project_name', 'Demo Project') - - -class NormalVulnHandler(BaseVulnHandler): - def save(self): - # 查漏洞名称对应的漏洞等级,狗咋熬漏洞等级表 - agent = self.get_agent(project_name=self.project_name, agent_name=self.agent_name) - if agent: - vul_level, vul_type, vul_type_enable = self.get_vul_info(agent) - if vul_type_enable == 'enable': - level = IastVulLevel.objects.filter(id=vul_level).first() - strategy = IastStrategyModel.objects.filter(vul_type=vul_type).first() - if level and strategy: - iast_vul = IastVulnerabilityModel.objects.filter( - type=strategy.vul_name, - url=self.http_url, - http_method=self.http_method, - agent=agent - ).first() - if iast_vul: - iast_vul.req_header = self.http_header - iast_vul.req_params = self.http_query_string - iast_vul.counts = iast_vul.counts + 1 - iast_vul.latest_time = int(time.time()) - iast_vul.status = '已上报' - iast_vul.save() - else: - vul = IastVulnerabilityModel( - type=strategy.vul_name, - level=level, - url=self.http_url, - uri=self.http_uri, - http_method=self.http_method, - http_scheme=self.http_scheme, - http_protocol=self.http_protocol, - req_header=self.http_header, - req_params=self.http_query_string, - req_data='', # fixme 请求体 数据保存 - res_header='', # fixme 响应头,暂时没有,后续补充 - res_body='', # fixme 响应体数据 - agent=agent, - context_path=self.app_name, - counts=1, - status='已上报', - language=self.language, - first_time=int(time.time()), - latest_time=int(time.time()), - client_ip=self.client_ip - ) - vul.save() diff --git a/apiserver/report/handler/report_handler_factory.py b/apiserver/report/handler/report_handler_factory.py deleted file mode 100644 index 8bc3fc041..000000000 --- a/apiserver/report/handler/report_handler_factory.py +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env python -# -*- coding:utf-8 -*- -# author:owefsad -# datetime:2020/10/30 10:29 -# software: PyCharm -# project: webapi -from apiserver import const -from apiserver.report.handler.auth_info_handler import AuthAddHandler, AuthUpdateHandler -from apiserver.report.handler.error_log_handler import ErrorLogHandler -from apiserver.report.handler.heartbeat_handler import HeartBeatHandler -from apiserver.report.handler.narmal_vul_handler import NormalVulnHandler -from apiserver.report.handler.over_power_handler import OverPowerHandler -from apiserver.report.handler.saas_method_pool_handler import SaasMethodPoolHandler -from apiserver.report.handler.sca_handler import ScaHandler - - -class ReportHandlerFactory: - @staticmethod - def get_handler(report_type): - if report_type == const.REPORT_HEART_BEAT: - return HeartBeatHandler() - elif report_type == const.REPORT_ERROR_LOG: - return ErrorLogHandler() - elif report_type == const.REPORT_SCA: - return ScaHandler() - elif report_type == const.REPORT_VULN_SAAS_POOL: - return SaasMethodPoolHandler() - elif report_type == const.REPORT_VULN_NORNAL: - return NormalVulnHandler() - elif report_type == const.REPORT_AUTH_ADD: - return AuthAddHandler() - elif report_type == const.REPORT_AUTH_UPDATE: - return AuthUpdateHandler() - elif report_type == const.REPORT_VULN_OVER_POWER: - return OverPowerHandler() - else: - print(report_type, type(report_type)) diff --git a/apiserver/report/handler/report_handler_interface.py b/apiserver/report/handler/report_handler_interface.py deleted file mode 100644 index a07b2a8fe..000000000 --- a/apiserver/report/handler/report_handler_interface.py +++ /dev/null @@ -1,67 +0,0 @@ -#!/usr/bin/env python -# -*- coding:utf-8 -*- -# author:owefsad -# datetime:2020/10/30 10:31 -# software: PyCharm -# project: webapi -from django.db.models import Q -from dongtai_models.models.agent import IastAgent - - -class IReportHandler: - def __init__(self): - self._report = None - self._detail = None - self._user_id = None - - @property - def report(self): - return self._report - - @report.setter - def report(self, reports): - self._report = reports - - @property - def detail(self): - return self._detail - - @detail.setter - def detail(self, detail): - self._detail = detail - - @property - def user_id(self): - return self._user_id - - @user_id.setter - def user_id(self, user_id): - self._user_id = user_id - - def parse(self): - pass - - def save(self): - pass - - def handle(self, report, user): - self.report = report - self.detail = self.report.get('detail') - self.agent_token = self.detail.get('agent_name') - self.user_id = user - # todo 检查当前用户是否有操作该agent的权限 - self.parse() - self.save() - - def get_project_agents(self, agent): - if agent.bind_project_id != 0: - agents = IastAgent.objects.filter( - Q(project_name=self.project_name) | Q(bind_project_id=agent.bind_project_id), online=1, - user=self.user_id) - else: - agents = IastAgent.objects.filter(project_name=self.project_name, user=self.user_id) - return agents - - def get_agent(self, agent_name, project_name): - return IastAgent.objects.filter(token=agent_name, project_name=project_name, online=1, - user=self.user_id).first() diff --git a/apiserver/report/handler/saas_method_pool_handler.py b/apiserver/report/handler/saas_method_pool_handler.py deleted file mode 100644 index 82089e884..000000000 --- a/apiserver/report/handler/saas_method_pool_handler.py +++ /dev/null @@ -1,107 +0,0 @@ -#!/usr/bin/env python -# -*- coding:utf-8 -*- -# author:owefsad -# datetime:2021/1/5 下午12:36 -# software: PyCharm -# project: lingzhi-webapi -import json -import logging -import time -from hashlib import sha1 - -import requests -from dongtai_models.models.agent_method_pool import MethodPool - -from AgentServer.settings import BASE_ENGINE_URL -from apiserver.report.handler.report_handler_interface import IReportHandler - -logger = logging.getLogger('dongtai.openapi') - - -class SaasMethodPoolHandler(IReportHandler): - def parse(self): - self.http_uri = self.detail.get('http_uri') - self.http_url = self.detail.get('http_url') - self.http_query_string = self.detail.get('http_query_string') - self.http_req_data = self.detail.get('http_req_data') - self.http_req_header = self.detail.get('http_req_header') - self.http_method = self.detail.get('http_method') - self.http_scheme = self.detail.get('http_scheme') - self.http_secure = self.detail.get('http_secure') - self.http_protocol = self.detail.get('http_protocol') - self.http_res_header = self.detail.get('http_res_header') - self.http_res_body = self.detail.get('http_res_body') - self.context_path = self.detail.get('context_path') - self.vuln_type = self.detail.get('vuln_type') - self.language = self.detail.get('language', 'Java') - self.agent_name = self.detail.get('agent_name') - self.project_name = self.detail.get('project_name', 'Demo Project') - self.taint_value = self.detail.get('taint_value') - self.taint_position = self.detail.get('taint_position') - self.client_ip = self.detail.get('http_client_ip') - self.param_name = self.detail.get('param_name') - self.method_pool = self.report.get('detail', {}).get('pool', None) - if self.method_pool: - self.method_pool = sorted(self.method_pool, key=lambda e: e.__getitem__('invokeId'), reverse=True) - - def save(self): - # 数据存储 - # 计算唯一签名,确保数据唯一 - # 数据存储 - agent = self.get_agent(project_name=self.project_name, agent_name=self.agent_name) - if agent: - pool_sign = self.calc_hash() - current_version_agents = self.get_project_agents(agent) - method_pool = MethodPool.objects.filter(pool_sign=pool_sign, agent__in=current_version_agents).first() - update_record = True - if method_pool: - method_pool.update_time = int(time.time()) - method_pool.method_pool = json.dumps(self.method_pool) - method_pool.save() - else: - # 获取agent - update_record = False - timestamp = int(time.time()) - method_pool = MethodPool.objects.create( - agent=agent, - url=self.http_url, - uri=self.http_uri, - http_method=self.http_method, - http_scheme=self.http_scheme, - http_protocol=self.http_protocol, - req_header=self.http_req_header, - req_params=self.http_query_string, - req_data=self.http_req_data, - res_header=self.http_res_header, - res_body=self.http_res_body, - context_path=self.context_path, - language=self.language, - method_pool=json.dumps(self.method_pool), - pool_sign=pool_sign, - clent_ip=self.client_ip, - create_time=timestamp, - update_time=timestamp - ) - self.send_to_engine(method_pool.id, update_record) - - @staticmethod - def send_to_engine(method_pool_id, update_record): - logger.info( - f'[+] send method_pool [{method_pool_id}] to engine for {"update" if update_record else "new record"}') - try: - requests.get(url=BASE_ENGINE_URL.format(id=method_pool_id)) - except Exception as e: - logger.info(f'[-] Failure: send method_pool [{method_pool_id}], Error: {e}') - - def calc_hash(self): - sign_raw = self.http_uri - for method in self.method_pool: - sign_raw += f"{method.get('className')}.{method.get('methodName')}()->" - sign_sha1 = self.sha1(sign_raw) - return sign_sha1 - - @staticmethod - def sha1(raw): - h = sha1() - h.update(raw.encode('utf-8')) - return h.hexdigest() diff --git a/apiserver/report/handler/sca_handler.py b/apiserver/report/handler/sca_handler.py deleted file mode 100644 index af1e65938..000000000 --- a/apiserver/report/handler/sca_handler.py +++ /dev/null @@ -1,86 +0,0 @@ -#!/usr/bin/env python -# -*- coding:utf-8 -*- -# author:owefsad -# datetime:2020/10/23 11:55 -# software: PyCharm -# project: webapi -import json -import logging -import time - -from dongtai_models.models.asset import Asset -from dongtai_models.models.sca_maven_artifact import ScaMavenArtifact -from dongtai_models.models.sca_maven_db import ScaMavenDb -from dongtai_models.models.sca_vul_db import ScaVulDb -from dongtai_models.models.vul_level import IastVulLevel - -from apiserver.report.handler.report_handler_interface import IReportHandler - -logger = logging.getLogger("django") - - -class ScaHandler(IReportHandler): - - def parse(self): - self.package_path = self.detail.get('package_path') - self.package_signature = self.detail.get('package_signature') - self.package_name = self.detail.get('package_name') - self.package_algorithm = self.detail.get('package_algorithm') - self.agent_name = self.detail.get('agent_name') - self.project_name = self.detail.get('project_name', 'Demo Project') - self.language = self.detail.get('language') - - def save(self): - if all([self.agent_name, self.package_path, self.package_name, self.package_signature, - self.package_algorithm]) is False: - logger.warn(f"数据不完整,数据:{json.dumps(self.report)}") - else: - agent = self.get_agent(project_name=self.project_name, agent_name=self.agent_name) - if agent: - smd = ScaMavenDb.objects.filter(sha_1=self.package_signature).values("version", "aql").first() - _version = self.package_name.split('/')[-1].replace('.jar', '').split('-')[-1] - version = smd.get('version', _version) if smd else _version - package_name = smd.get('aql', self.package_name) if smd else self.package_name - aids = ScaMavenArtifact.objects.filter(signature=self.package_signature).values("aid") - if len(aids) > 0: - aids = [_['aid'] for _ in aids] - vul_count = len(aids) - levels = ScaVulDb.objects.filter(id__in=aids).values('vul_level') - - level = 'info' - if len(levels) > 0: - levels = [_['vul_level'] for _ in levels] - if 'high' in levels: - level = 'high' - elif 'high' in levels: - level = 'high' - elif 'medium' in levels: - level = 'medium' - elif 'low' in levels: - level = 'low' - else: - level = 'info' - - try: - level = IastVulLevel.objects.get(name=level) - current_version_agents = self.get_project_agents(agent) - asset_count = 0 - if current_version_agents: - asset_count = Asset.objects.values("id").filter(signature_value=self.package_signature, - agent__in=current_version_agents).count() - - if asset_count == 0: - Asset( - package_path=self.package_path, - version=version, - vul_count=vul_count, - level=level, - package_name=package_name, - signature_value=self.package_signature, - signature_algorithm=self.package_algorithm, - dt=time.time(), - agent=agent, - language=self.language - ).save() - except Exception as e: - pass diff --git a/apiserver/report/report_handler_factory.py b/apiserver/report/report_handler_factory.py deleted file mode 100644 index 8b7071e85..000000000 --- a/apiserver/report/report_handler_factory.py +++ /dev/null @@ -1,58 +0,0 @@ -#!/usr/bin/env python -# -*- coding:utf-8 -*- -# author:owefsad -# datetime:2020/10/23 12:00 -# software: PyCharm -# project: webapi -from apiserver import const - - -class ReportHandler: - HANDLERS = {} - - # 注册handler到当前命名空间,后续进行异步处理数据 - @staticmethod - def handler(reports): - """ - 处理上传的报告,如果报告的类型不存在,则忽略本次上传; - 检查用户与agent的权限 - :param reports: - :return: - """ - report_type = reports.get('type') - if report_type is None: - print(reports) - else: - if report_type == const.REPORT_HEART_BEAT: - print(f"心跳报告:{reports}") - elif report_type == const.REPORT_ERROR_LOG: - print(f"错误日志报告:{reports}") - elif report_type == const.REPORT_SCA: - print(f"SCA报告:{reports}") - elif report_type == const.REPORT_AUTH_ADD: - print(f"新增权限报告:{reports}") - elif report_type == const.REPORT_AUTH_UPDATE: - print(f"更新权限报告:{reports}") - elif report_type == const.REPORT_VULN_NORNAL: - print(f"无调用栈漏洞报告:{reports}") - elif report_type == const.REPORT_VULN_DYNAMIC: - print(f"污点跟踪漏洞报告:{reports}") - elif report_type == const.REPORT_VULN_OVER_POWER: - print(f"越权检测数据报告:{reports}") - elif report_type == const.REPORT_VULN_SAAS_POOL: - print(f"SAAS版污点方法池数据报告:{reports}") - else: - print(report_type, type(report_type)) - - try: - ReportHandler.HANDLERS.get(report_type).handler(reports) - except: - pass - - @classmethod - def register(cls, handler_name): - def wrapper(handler): - cls.HANDLERS.update({handler_name: handler}) - return handler - - return wrapper diff --git a/apiserver/urls.py b/apiserver/urls.py deleted file mode 100644 index bd881fa04..000000000 --- a/apiserver/urls.py +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env python -# -*- coding:utf-8 -*- -# author:owefsad -# datetime:2021/1/12 下午6:59 -# software: PyCharm -# project: lingzhi-agent-server - -# 报告接口:上传报告 -from django.urls import path - -from apiserver.views.agent_download import AgentDownload -from apiserver.views.agent_register import AgentRegisterEndPoint -from apiserver.views.engine_auto_deploy import AutoDeployEndPoint -from apiserver.views.engine_download import EngineDownloadEndPoint -from apiserver.views.engine_heartbeat import EngineHeartBeatEndPoint -from apiserver.views.engine_status import EngineUpdateEndPoint -from apiserver.views.engine_status import EngineStopStart -from apiserver.views.hook_profiles import HookProfilesEndPoint -from apiserver.views.properties import PropertiesEndPoint -from apiserver.views.report_upload import ReportUploadEndPoint - -urlpatterns = [ - path('agent/download', AgentDownload.as_view()), - path('deploy/auto', AutoDeployEndPoint.as_view()), - path('engine/heartbeat', EngineHeartBeatEndPoint.as_view()), - path('engine/download', EngineDownloadEndPoint.as_view()), - path('agent/register', AgentRegisterEndPoint.as_view()), - path('engine/update', EngineUpdateEndPoint.as_view()), - path('engine/update/', EngineUpdateEndPoint.as_view()), - path('profiles', HookProfilesEndPoint.as_view()), - path('properties', PropertiesEndPoint.as_view()), - path('report/upload', ReportUploadEndPoint.as_view()), - path('engine/startstop', EngineStopStart.as_view()), - # todo 增加重放请求获取接口,用于后续逻辑漏洞/漏洞验证等功能,暂时先不实现 -] diff --git a/apiserver/utils.py b/apiserver/utils.py deleted file mode 100644 index 39e3e477f..000000000 --- a/apiserver/utils.py +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env python -# -*- coding:utf-8 -*- -# author: owefsad@huoxian.cn -# datetime: 2021/6/1 上午9:53 -# project: dongtai-openapi - -# -*- coding: utf-8 -*- -import logging - -import oss2 -from oss2.exceptions import NoSuchKey - -from AgentServer import settings - -logger = logging.getLogger('dongtai.openapi') - - -class OssDownloader(object): - BUCKET_URL = 'https://oss-cn-beijing.aliyuncs.com' - BUCKET_NAME = 'dongtai' - - @staticmethod - def download_file_to_path(access_key, access_key_secret, bucket_url, bucket_name, object_name, local_file): - """ - - :param access_key: - :param access_key_secret: - :param bucket_url: - :param bucket_name: - :param object_name: - :param local_file: - :return: - """ - try: - auth = oss2.Auth(access_key, access_key_secret) - bucket = oss2.Bucket(auth, bucket_url, bucket_name) - bucket.get_object_to_file(object_name, local_file) - return True - except NoSuchKey as e: - # NoSuchKey表示oss云端文件不存在,通知管理员 - logger.error(f'oss download failure, reason: remote file not found, filename: {object_name}') - return False - except Exception as e: - logger.error(f'oss download failure, reason: {e}') - return False - - @staticmethod - def download_file(object_name, local_file): - return OssDownloader.download_file_to_path(access_key=settings.ACCESS_KEY, - access_key_secret=settings.ACCESS_KEY_SECRET, - bucket_url=OssDownloader.BUCKET_URL, - bucket_name=OssDownloader.BUCKET_NAME, - object_name=object_name, - local_file=local_file) diff --git a/apiserver/views/agent_download.py b/apiserver/views/agent_download.py deleted file mode 100644 index 169916d0f..000000000 --- a/apiserver/views/agent_download.py +++ /dev/null @@ -1,83 +0,0 @@ -#!/usr/bin/env python -# -*- coding:utf-8 -*- -# author:owefsad -# datetime:2021/1/14 下午7:17 -# software: PyCharm -# project: lingzhi-agent-server -import os -import uuid, logging - -from django.http import FileResponse -from rest_framework.authtoken.models import Token - -from AgentServer.base import R -from apiserver.base.openapi import OpenApiEndPoint -from apiserver.utils import OssDownloader - -logger = logging.getLogger('dongtai.openapi') - - -class AgentDownload(OpenApiEndPoint): - """ - 当前用户详情 - """ - name = "download_iast_agent" - description = "下载洞态Agent" - LOCAL_AGENT_FILE = '/tmp/iast-agent.jar' - REMOTE_AGENT_FILE = 'agent/java/iast-agent.jar' - - def get(self, request): - """ - IAST下载 agent接口s - :param request: - :return: - """ - try: - base_url = request.query_params.get('url', 'https://www.huoxian.cn') - project_name = request.query_params.get('projectName', 'Demo Project') - - if self.download_agent_jar() is False: - return R.failure(msg="agent file download failure. please contact official staff for help.") - - token, success = Token.objects.get_or_create(user=request.user) - agent_token = ''.join(str(uuid.uuid4()).split('-')) - if self.create_config_file(base_url=base_url, agent_token=agent_token, - auth_token=token.key, - project_name=project_name): - self.replace_jar_config() - response = FileResponse(open(AgentDownload.LOCAL_AGENT_FILE, "rb")) - response['content_type'] = 'application/octet-stream' - response['Content-Disposition'] = "attachment; filename=agent.jar" - return response - else: - return R.failure(msg="agent file not exit.") - except Exception as e: - logger.error(f'agent下载失败,用户: {request.user.get_username()},错误详情:{e}') - return R.failure(msg="agent file not exit.") - - @staticmethod - def create_config_file(base_url, agent_token, auth_token, project_name): - try: - data = "iast.name=lingzhi-Enterprise 1.0.0\niast.version=1.0.0\niast.response.name=lingzhi\niast.response.value=1.0.0\niast.server.url={url}\niast.server.token={token}\niast.allhook.enable=false\niast.dump.class.enable=false\niast.dump.class.path=/tmp/iast-class-dump/\niast.service.heartbeat.interval=30000\niast.service.vulreport.interval=1000\napp.name=LingZhi\nengine.status=start\nengine.name={agent_token}\njdk.version={jdk_level}\nproject.name={project_name}\niast.proxy.enable=false\niast.proxy.host=\niast.proxy.port=\n" - with open('/tmp/iast.properties', 'w') as config_file: - config_file.write( - data.format(url=base_url, token=auth_token, agent_token=agent_token, jdk_level=1, - project_name=project_name)) - return True - except Exception as e: - logger.error(f'agent配置文件创建失败,原因:{e}') - return False - - @staticmethod - def replace_jar_config(): - # 执行jar -uvf {AgentDownload.LOCAL_AGENT_FILE} iast.properties更新jar包的文件 - import os - os.system(f'cd /tmp;jar -uvf {AgentDownload.LOCAL_AGENT_FILE} iast.properties') - - @staticmethod - def download_agent_jar(): - if os.path.exists(AgentDownload.LOCAL_AGENT_FILE): - return True - else: - return OssDownloader.download_file(object_name=AgentDownload.REMOTE_AGENT_FILE, - local_file=AgentDownload.LOCAL_AGENT_FILE) diff --git a/apiserver/views/agent_register.py b/apiserver/views/agent_register.py deleted file mode 100644 index 67ff13353..000000000 --- a/apiserver/views/agent_register.py +++ /dev/null @@ -1,96 +0,0 @@ -#!/usr/bin/env python -# -*- coding:utf-8 -*- -# author:owefsad -# datetime:2020/11/30 下午3:13 -# software: PyCharm -# project: lingzhi-webapi -import time - -from dongtai_models.models.agent import IastAgent -from dongtai_models.models.project import IastProject -from dongtai_models.models.project_version import IastProjectVersion -from rest_framework.request import Request - -from AgentServer.base import R -from apiserver.base.openapi import OpenApiEndPoint -from apiserver.decrypter import parse_data - - -class AgentRegisterEndPoint(OpenApiEndPoint): - """ - 引擎注册接口 - """ - name = "api-v1-agent-register" - description = "引擎注册" - - def post(self, request: Request): - """ - IAST下载 agent接口s - :param request: - :return: - 服务器作为agent的唯一值绑定 - token: agent-ip-port-path - """ - # 接受 token名称,version,校验token重复性,latest_time = now.time() - # 生成agent的唯一token - # 注册 - try: - user = request.user - param = parse_data(request.read()) - token = param.get('name', '') - version = param.get('version', '') - project_name = param.get('project', 'Demo Project').strip() - if not token or not version or not project_name: - return R.failure(msg="参数错误") - - project = self.get_project(project_name, user) - if project: - project_current_version = self.get_project_current_version(project['id']) - if self.is_exist_agent(token, project_name, user, project_current_version['id']): - return R.failure(msg="agent已注册") - else: - # 注册项目 - self.register_agent(True, token, user, version, project['id'], project_name, - project_current_version['id']) - else: - if self.is_exist_agent(token, project_name, user, 0): - return R.failure(msg="agent已注册") - else: - self.register_agent(False, token, user, version, 0, project_name, 0) - - return R.success() - except Exception as e: - return R.failure(msg="参数错误") - - @staticmethod - def get_project(project_name, user): - return IastProject.objects.values("id").filter(name=project_name, user=user).first() - - @staticmethod - def get_project_current_version(project_id): - return IastProjectVersion.objects.filter(project_id=project_id, current_version=1, status=1).values( - "id").first() - - @staticmethod - def is_exist_agent(token, project_name, user, current_project_version_id): - return IastAgent.objects.values("id").filter(token=token, project_name=project_name, user=user, - project_version_id=current_project_version_id).exists() - - @staticmethod - def register_agent(exist_project, token, user, version, project_id, project_name, project_version_id): - if exist_project: - IastAgent.objects.filter(token=token, online=1, user=user).update(online=0) - - IastAgent.objects.create( - token=token, - version=version, - latest_time=int(time.time()), - user=user, - is_running=1, - bind_project_id=project_id, - project_name=project_name, - control=0, - is_control=0, - online=1, - project_version_id=project_version_id - ) diff --git a/apiserver/views/engine_status.py b/apiserver/views/engine_status.py deleted file mode 100644 index af0585549..000000000 --- a/apiserver/views/engine_status.py +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/env python -# -*- coding:utf-8 -*- -# author:owefsad -# datetime:2020/8/4 16:47 -# software: PyCharm -# project: webapi -import logging -import time - -from dongtai_models.models.agent import IastAgent - -from AgentServer.base import R -from apiserver.base.openapi import OpenApiEndPoint - -logger = logging.getLogger("django") - - -class EngineUpdateEndPoint(OpenApiEndPoint): - name = "iast_engine_update_status_edit" - description = "IAST 检测引擎更新状态修改接口" - - def get(self, request, status=None): - """ - IAST 检测引擎 agent接口 - :param request: - :return: - """ - agent_name = request.query_params.get('agent_name') - agent = IastAgent.objects.filter(user=request.user, token=agent_name, is_running=1).first() - if not agent: - return R.failure("agent不存在或无权限访问") - - if status: - if agent.is_control == 1: - agent.control = status - agent.is_control = 0 - agent.latest_time = int(time.time()) - agent.save() - return R.success(msg="安装完成") - else: - return R.failure(msg="引擎正在被安装或卸载,请稍后再试") - else: - if agent.control == 1 and agent.is_control == 0: - agent.is_control = 1 - agent.latest_time = int(time.time()) - agent.save() - return R.success(data=agent.control) - else: - return R.failure(msg="不需要更新或正在更新中") - - -class EngineStopStart(OpenApiEndPoint): - name = "iast_engine_update_status_edit" - description = "IAST 检测引擎更新状态修改接口" - - def get(self, request): - """ - IAST 检测引擎 agent接口 - :param request: - :return: - """ - agent_name = request.query_params.get('agent_name') - agent = IastAgent.objects.filter(user=request.user, token=agent_name, is_running=1).first() - if not agent: - return R.failure("agent不存在或无权限访问") - - if agent.is_control ==0: - return R.failure(msg="暂无命令", data="notcmd") - - #启动 - if agent.control == 3: - agent.is_control = 0 - agent.is_core_running = 1 - agent.latest_time = int(time.time()) - agent.save() - return R.success(data="start", msg=str(agent.is_running)+agent.token) - #暂停 - if agent.control == 4: - agent.is_control = 0 - agent.is_core_running = 0 - agent.latest_time = int(time.time()) - agent.save() - return R.success(data="stop") - return R.success(data="notcmd") \ No newline at end of file diff --git a/apiserver/views/hook_profiles.py b/apiserver/views/hook_profiles.py deleted file mode 100644 index 7cdd93766..000000000 --- a/apiserver/views/hook_profiles.py +++ /dev/null @@ -1,73 +0,0 @@ -#!/usr/bin/env python -# -*- coding:utf-8 -*- -# author:owefsad -# datetime:2020/11/24 下午9:16 -# software: PyCharm -# project: lingzhi-webapi -import json -import logging - -from dongtai_models.models.hook_strategy import HookStrategy -from dongtai_models.models.hook_talent_strategy import IastHookTalentStrategy -from dongtai_models.models.hook_type import HookType -from rest_framework.request import Request - -from AgentServer import const -from AgentServer.base import R -from apiserver.base.openapi import OpenApiEndPoint -# note: 当前依赖必须保留,否则无法通过hooktype反向查找策略 - -logger = logging.getLogger("django") - - -class HookProfilesEndPoint(OpenApiEndPoint): - name = "api-v1-profiles" - description = "获取HOOK策略" - - @staticmethod - def get_profiles(talent, user=None): - profiles = list() - talent_strategy = IastHookTalentStrategy.objects.filter(talent=talent).first() - strategy_types = json.loads(talent_strategy.values) - enable_hook_types = HookType.objects.filter(id__in=strategy_types, enable=const.HOOK_TYPE_ENABLE) - for enable_hook_type in enable_hook_types: - strategy_details = list() - profiles.append({ - 'type': enable_hook_type.type, - 'enable': enable_hook_type.enable, - 'value': enable_hook_type.value, - 'details': strategy_details - }) - strategies = enable_hook_type.strategies.filter(created_by__in=[1, user.id] if user else [1], - enable=const.HOOK_TYPE_ENABLE) - for strategy in strategies: - strategy_details.append({ - "source": strategy.source, - "track": strategy.track, - "target": strategy.target, - "value": strategy.value, - "inherit": strategy.inherit - }) - return profiles - - def get(self, request: Request): - """ - IAST 检测引擎 agent接口 - :param request: - :return: - """ - user = request.user - talent = user.get_talent() - profiles = self.get_profiles(talent, user) - - return R.success(data=profiles) - - def put(self, request): - pass - - def post(self): - pass - - -if __name__ == '__main__': - strategy_count = HookStrategy.objects.count() diff --git a/apiserver/views/report_upload.py b/apiserver/views/report_upload.py deleted file mode 100644 index 880b96583..000000000 --- a/apiserver/views/report_upload.py +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env python -# -*- coding:utf-8 -*- -# author:owefsad -# datetime:2021/1/12 下午7:45 -# software: PyCharm -# project: lingzhi-agent-server - -from AgentServer.base import R -from apiserver.base.openapi import OpenApiEndPoint -from apiserver.decrypter import parse_data -from apiserver.report.handler.report_handler_factory import ReportHandlerFactory - - -class ReportUploadEndPoint(OpenApiEndPoint): - name = "api-v1-report-upload" - description = "agent上传报告" - - def post(self, request): - try: - report = parse_data(request.read()) - report_type = report.get('type') - handler = ReportHandlerFactory.get_handler(report_type) - handler.handle(report, request.user) - - return R.success(msg="report upload success.") - except Exception as e: - return R.failure(msg=f"report upload failed, reason: {e}") diff --git a/batect b/batect new file mode 100755 index 000000000..bf7ac71f5 --- /dev/null +++ b/batect @@ -0,0 +1,205 @@ +#!/usr/bin/env bash + +{ + set -euo pipefail + + # This file is part of Batect. + # Do not modify this file. It will be overwritten next time you upgrade Batect. + # You should commit this file to version control alongside the rest of your project. It should not be installed globally. + # For more information, visit https://github.com/batect/batect. + + VERSION="0.79.1" + CHECKSUM="${BATECT_DOWNLOAD_CHECKSUM:-8d7de395863cddecc660933fa05d67af54129b06a7fea2307e409c7cd3c04686}" + DOWNLOAD_URL_ROOT=${BATECT_DOWNLOAD_URL_ROOT:-"https://updates.batect.dev/v1/files"} + DOWNLOAD_URL=${BATECT_DOWNLOAD_URL:-"$DOWNLOAD_URL_ROOT/$VERSION/batect-$VERSION.jar"} + QUIET_DOWNLOAD=${BATECT_QUIET_DOWNLOAD:-false} + + BATECT_WRAPPER_CACHE_DIR=${BATECT_CACHE_DIR:-"$HOME/.batect/cache"} + VERSION_CACHE_DIR="$BATECT_WRAPPER_CACHE_DIR/$VERSION" + JAR_PATH="$VERSION_CACHE_DIR/batect-$VERSION.jar" + BATECT_WRAPPER_DID_DOWNLOAD=false + + SCRIPT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + + function main() { + if ! haveVersionCachedLocally; then + download + BATECT_WRAPPER_DID_DOWNLOAD=true + fi + + checkChecksum + runApplication "$@" + } + + function haveVersionCachedLocally() { + [ -f "$JAR_PATH" ] + } + + function download() { + checkForCurl + + mkdir -p "$VERSION_CACHE_DIR" + temp_file=$(mktemp) + + if [[ $QUIET_DOWNLOAD == 'true' ]]; then + curl --silent --fail --show-error --location --output "$temp_file" "$DOWNLOAD_URL" + else + echo "Downloading Batect version $VERSION from $DOWNLOAD_URL..." + curl -# --fail --show-error --location --output "$temp_file" "$DOWNLOAD_URL" + fi + + mv "$temp_file" "$JAR_PATH" + } + + function checkChecksum() { + local_checksum=$(getLocalChecksum) + + if [[ "$local_checksum" != "$CHECKSUM" ]]; then + echo "The downloaded version of Batect does not have the expected checksum. Delete '$JAR_PATH' and then re-run this script to download it again." + exit 1 + fi + } + + function getLocalChecksum() { + if [[ "$(uname)" == "Darwin" ]]; then + shasum -a 256 "$JAR_PATH" | cut -d' ' -f1 + else + sha256sum "$JAR_PATH" | cut -d' ' -f1 + fi + } + + function runApplication() { + java_path=$(getPathToJava) + checkForJava "$java_path" + + java_version_info=$(getJavaVersionInfo "$java_path") + checkJavaVersion "$java_version_info" + + java_version=$(extractJavaVersion "$java_version_info") + java_version_major=$(extractJavaMajorVersion "$java_version") + + if (( java_version_major >= 9 )); then + JAVA_OPTS=(--add-opens java.base/sun.nio.ch=ALL-UNNAMED --add-opens java.base/java.io=ALL-UNNAMED) + else + JAVA_OPTS=() + fi + + if [[ "$(uname -o 2>&1)" == "Msys" ]] && hash winpty 2>/dev/null && [ -t /dev/stdin ]; then + GIT_BASH_PTY_WORKAROUND=(winpty) + else + GIT_BASH_PTY_WORKAROUND=() + fi + + BATECT_WRAPPER_SCRIPT_DIR="$SCRIPT_PATH" \ + BATECT_WRAPPER_CACHE_DIR="$BATECT_WRAPPER_CACHE_DIR" \ + BATECT_WRAPPER_DID_DOWNLOAD="$BATECT_WRAPPER_DID_DOWNLOAD" \ + HOSTNAME="$HOSTNAME" \ + exec \ + ${GIT_BASH_PTY_WORKAROUND[@]+"${GIT_BASH_PTY_WORKAROUND[@]}"} \ + "$java_path" \ + -Djava.net.useSystemProxies=true \ + ${JAVA_OPTS[@]+"${JAVA_OPTS[@]}"} \ + -jar "$JAR_PATH" \ + "$@" + } + + function checkForCurl() { + if ! hash curl 2>/dev/null; then + echo "curl is not installed or not on your PATH. Please install it and try again." >&2 + exit 1 + fi + } + + function getPathToJava() { + if useJavaHome; then + echo "$JAVA_HOME/bin/java" + else + echo "java" + fi + } + + function useJavaHome() { + test -n "${JAVA_HOME+x}" + } + + function checkForJava() { + local java_path="$1" + + if ! hash "$java_path" 2>/dev/null; then + showJavaNotInstalledError + fi + } + + function showJavaNotInstalledError() { + if useJavaHome; then + echo "JAVA_HOME is set to '$JAVA_HOME', but there is no Java executable at '$JAVA_HOME/bin/java'." >&2 + else + echo "Java is not installed or not on your PATH. Please install it and try again." >&2 + fi + + exit 1 + } + + function checkJavaVersion() { + java_version_info="$1" + java_version=$(extractJavaVersion "$java_version_info") + java_version_major=$(extractJavaMajorVersion "$java_version") + java_version_minor=$(extractJavaMinorVersion "$java_version") + + if (( java_version_major < 1 || ( java_version_major == 1 && java_version_minor <= 7 ) )); then + if useJavaHome; then + echo "The version of Java that is available in JAVA_HOME is version $java_version, but version 1.8 or greater is required." >&2 + echo "If you have a newer version of Java installed, please make sure JAVA_HOME is set correctly." >&2 + echo "JAVA_HOME takes precedence over any versions of Java available on your PATH." >&2 + else + echo "The version of Java that is available on your PATH is version $java_version, but version 1.8 or greater is required." >&2 + echo "If you have a newer version of Java installed, please make sure your PATH is set correctly." >&2 + fi + + exit 1 + fi + + if ! javaIs64Bit "$java_version_info"; then + if useJavaHome; then + echo "The version of Java that is available in JAVA_HOME is a 32-bit version, but Batect requires a 64-bit Java runtime." >&2 + echo "If you have a 64-bit version of Java installed, please make sure JAVA_HOME is set correctly." >&2 + echo "JAVA_HOME takes precedence over any versions of Java available on your PATH." >&2 + else + echo "The version of Java that is available on your PATH is a 32-bit version, but Batect requires a 64-bit Java runtime." >&2 + echo "If you have a 64-bit version of Java installed, please make sure your PATH is set correctly." >&2 + fi + + exit 1 + fi + } + + function getJavaVersionInfo() { + local java_path="$1" + + "$java_path" -version 2>&1 || showJavaNotInstalledError + } + + function extractJavaVersion() { + echo "$1" | grep version | sed -En ';s/.* version "([0-9]+)(\.([0-9]+))?.*".*/\1.\3/p;' + } + + function extractJavaMajorVersion() { + java_version=$1 + + echo "${java_version%.*}" + } + + function extractJavaMinorVersion() { + java_version=$1 + java_version_minor="${java_version#*.}" + + echo "${java_version_minor:-0}" + } + + function javaIs64Bit() { + echo "$1" | grep -q '64-[Bb]it' + } + + main "$@" + exit $? +} diff --git a/batect.cmd b/batect.cmd new file mode 100644 index 000000000..136f157ec --- /dev/null +++ b/batect.cmd @@ -0,0 +1,473 @@ +@echo off +rem This file is part of Batect. +rem Do not modify this file. It will be overwritten next time you upgrade Batect. +rem You should commit this file to version control alongside the rest of your project. It should not be installed globally. +rem For more information, visit https://github.com/batect/batect. + +setlocal EnableDelayedExpansion + +set "version=0.79.1" + +if "%BATECT_CACHE_DIR%" == "" ( + set "BATECT_CACHE_DIR=%USERPROFILE%\.batect\cache" +) + +set "rootCacheDir=!BATECT_CACHE_DIR!" +set "cacheDir=%rootCacheDir%\%version%" +set "ps1Path=%cacheDir%\batect-%version%.ps1" + +set script=Set-StrictMode -Version 2.0^ + +$ErrorActionPreference = 'Stop'^ + +^ + +$Version='0.79.1'^ + +^ + +function getValueOrDefault($value, $default) {^ + + if ($value -eq $null) {^ + + $default^ + + } else {^ + + $value^ + + }^ + +}^ + +^ + +$DownloadUrlRoot = getValueOrDefault $env:BATECT_DOWNLOAD_URL_ROOT "https://updates.batect.dev/v1/files"^ + +$UrlEncodedVersion = [Uri]::EscapeDataString($Version)^ + +$DownloadUrl = getValueOrDefault $env:BATECT_DOWNLOAD_URL "$DownloadUrlRoot/$UrlEncodedVersion/batect-$UrlEncodedVersion.jar"^ + +$ExpectedChecksum = getValueOrDefault $env:BATECT_DOWNLOAD_CHECKSUM '8d7de395863cddecc660933fa05d67af54129b06a7fea2307e409c7cd3c04686'^ + +^ + +$RootCacheDir = getValueOrDefault $env:BATECT_CACHE_DIR "$env:USERPROFILE\.batect\cache"^ + +$VersionCacheDir = "$RootCacheDir\$Version"^ + +$JarPath = "$VersionCacheDir\batect-$Version.jar"^ + +$DidDownload = 'false'^ + +^ + +function main() {^ + + if (-not (haveVersionCachedLocally)) {^ + + download^ + + $DidDownload = 'true'^ + + }^ + +^ + + checkChecksum^ + + runApplication @args^ + +}^ + +^ + +function haveVersionCachedLocally() {^ + + Test-Path $JarPath^ + +}^ + +^ + +function download() {^ + + Write-Output "Downloading Batect version $Version from $DownloadUrl..."^ + +^ + + createCacheDir^ + +^ + + $oldProgressPreference = $ProgressPreference^ + +^ + + try {^ + + [Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12^ + +^ + + # Turn off the progress bar to significantly reduce download times - see https://github.com/PowerShell/PowerShell/issues/2138#issuecomment-251165868^ + + $ProgressPreference = 'SilentlyContinue'^ + +^ + + Invoke-WebRequest -Uri $DownloadUrl -OutFile $JarPath ^| Out-Null^ + + } catch {^ + + $Message = $_.Exception.Message^ + +^ + + Write-Host -ForegroundColor Red "Downloading failed with error: $Message"^ + + exit 1^ + + } finally {^ + + $ProgressPreference = $oldProgressPreference^ + + }^ + +}^ + +^ + +function checkChecksum() {^ + + $localChecksum = (Get-FileHash -Algorithm 'SHA256' $JarPath).Hash.ToLower()^ + +^ + + if ($localChecksum -ne $expectedChecksum) {^ + + Write-Host -ForegroundColor Red "The downloaded version of Batect does not have the expected checksum. Delete '$JarPath' and then re-run this script to download it again."^ + + exit 1^ + + }^ + +}^ + +^ + +function createCacheDir() {^ + + if (-not (Test-Path $VersionCacheDir)) {^ + + New-Item -ItemType Directory -Path $VersionCacheDir ^| Out-Null^ + + }^ + +}^ + +^ + +function runApplication() {^ + + $java = findJava^ + + $javaVersion = checkJavaVersion $java^ + +^ + + if ($javaVersion.Major -ge 9) {^ + + $javaArgs = @("--add-opens", "java.base/sun.nio.ch=ALL-UNNAMED", "--add-opens", "java.base/java.io=ALL-UNNAMED")^ + + } else {^ + + $javaArgs = @()^ + + }^ + +^ + + $combinedArgs = $javaArgs + @("-Djava.net.useSystemProxies=true", "-jar", $JarPath) + $args^ + + $env:HOSTNAME = $env:COMPUTERNAME^ + + $env:BATECT_WRAPPER_CACHE_DIR = $RootCacheDir^ + + $env:BATECT_WRAPPER_DID_DOWNLOAD = $DidDownload^ + +^ + + $info = New-Object System.Diagnostics.ProcessStartInfo^ + + $info.FileName = $java.Source^ + + $info.Arguments = combineArgumentsToString($combinedArgs)^ + + $info.RedirectStandardError = $false^ + + $info.RedirectStandardOutput = $false^ + + $info.UseShellExecute = $false^ + +^ + + $process = New-Object System.Diagnostics.Process^ + + $process.StartInfo = $info^ + + $process.Start() ^| Out-Null^ + + $process.WaitForExit()^ + +^ + + exit $process.ExitCode^ + +}^ + +^ + +function useJavaHome() {^ + + return ($env:JAVA_HOME -ne $null)^ + +}^ + +^ + +function findJava() {^ + + if (useJavaHome) {^ + + $java = Get-Command "$env:JAVA_HOME\bin\java" -ErrorAction SilentlyContinue^ + +^ + + if ($java -eq $null) {^ + + Write-Host -ForegroundColor Red "JAVA_HOME is set to '$env:JAVA_HOME', but there is no Java executable at '$env:JAVA_HOME\bin\java.exe'."^ + + exit 1^ + + }^ + +^ + + return $java^ + + }^ + +^ + + $java = Get-Command "java" -ErrorAction SilentlyContinue^ + +^ + + if ($java -eq $null) {^ + + Write-Host -ForegroundColor Red "Java is not installed or not on your PATH. Please install it and try again."^ + + exit 1^ + + }^ + +^ + + return $java^ + +}^ + +^ + +function checkJavaVersion([System.Management.Automation.CommandInfo]$java) {^ + + $versionInfo = getJavaVersionInfo $java^ + + $rawVersion = getJavaVersion $versionInfo^ + + $parsedVersion = New-Object Version -ArgumentList $rawVersion^ + + $minimumVersion = "1.8"^ + +^ + + if ($parsedVersion -lt (New-Object Version -ArgumentList $minimumVersion)) {^ + + if (useJavaHome) {^ + + Write-Host -ForegroundColor Red "The version of Java that is available in JAVA_HOME is version $rawVersion, but version $minimumVersion or greater is required."^ + + Write-Host -ForegroundColor Red "If you have a newer version of Java installed, please make sure JAVA_HOME is set correctly."^ + + Write-Host -ForegroundColor Red "JAVA_HOME takes precedence over any versions of Java available on your PATH."^ + + } else {^ + + Write-Host -ForegroundColor Red "The version of Java that is available on your PATH is version $rawVersion, but version $minimumVersion or greater is required."^ + + Write-Host -ForegroundColor Red "If you have a newer version of Java installed, please make sure your PATH is set correctly."^ + + }^ + +^ + + exit 1^ + + }^ + +^ + + if (-not ($versionInfo -match "64\-[bB]it")) {^ + + if (useJavaHome) {^ + + Write-Host -ForegroundColor Red "The version of Java that is available in JAVA_HOME is a 32-bit version, but Batect requires a 64-bit Java runtime."^ + + Write-Host -ForegroundColor Red "If you have a 64-bit version of Java installed, please make sure JAVA_HOME is set correctly."^ + + Write-Host -ForegroundColor Red "JAVA_HOME takes precedence over any versions of Java available on your PATH."^ + + } else {^ + + Write-Host -ForegroundColor Red "The version of Java that is available on your PATH is a 32-bit version, but Batect requires a 64-bit Java runtime."^ + + Write-Host -ForegroundColor Red "If you have a 64-bit version of Java installed, please make sure your PATH is set correctly."^ + + }^ + +^ + + exit 1^ + + }^ + +^ + + return $parsedVersion^ + +}^ + +^ + +function getJavaVersionInfo([System.Management.Automation.CommandInfo]$java) {^ + + $info = New-Object System.Diagnostics.ProcessStartInfo^ + + $info.FileName = $java.Source^ + + $info.Arguments = "-version"^ + + $info.RedirectStandardError = $true^ + + $info.RedirectStandardOutput = $true^ + + $info.UseShellExecute = $false^ + +^ + + $process = New-Object System.Diagnostics.Process^ + + $process.StartInfo = $info^ + + $process.Start() ^| Out-Null^ + + $process.WaitForExit()^ + +^ + + $stderr = $process.StandardError.ReadToEnd()^ + + return $stderr^ + +}^ + +^ + +function getJavaVersion([String]$versionInfo) {^ + + $versionLine = ($versionInfo -split [Environment]::NewLine)[0]^ + +^ + + if (-not ($versionLine -match "version `"([0-9]+)(\.([0-9]+))?.*`"")) {^ + + Write-Error "Java reported a version that does not match the expected format: $versionLine"^ + + }^ + +^ + + $major = $Matches.1^ + +^ + + if ($Matches.Count -ge 3) {^ + + $minor = $Matches.3^ + + } else {^ + + $minor = "0"^ + + }^ + +^ + + return "$major.$minor"^ + +}^ + +^ + +function combineArgumentsToString([Object[]]$arguments) {^ + + $combined = @()^ + +^ + + $arguments ^| %% { $combined += escapeArgument($_) }^ + +^ + + return $combined -join " "^ + +}^ + +^ + +function escapeArgument([String]$argument) {^ + + return '"' + $argument.Replace('"', '"""') + '"'^ + +}^ + +^ + +main @args^ + + + +if not exist "%cacheDir%" ( + mkdir "%cacheDir%" +) + +echo !script! > "%ps1Path%" + +set BATECT_WRAPPER_SCRIPT_DIR=%~dp0 + +rem Why do we explicitly exit? +rem cmd.exe appears to read this script one line at a time and then executes it. +rem If we modify the script while it is still running (eg. because we're updating it), then cmd.exe does all kinds of odd things +rem because it continues execution from the next byte (which was previously the end of the line). +rem By explicitly exiting on the same line as starting the application, we avoid these issues as cmd.exe has already read the entire +rem line before we start the application and therefore will always exit. + +rem Why do we set PSModulePath? +rem See issue #627 +set "PSModulePath=" +powershell.exe -ExecutionPolicy Bypass -NoLogo -NoProfile -File "%ps1Path%" %* && exit /b 0 || exit /b !ERRORLEVEL! + +rem What's this for? +rem This is so the tests for the wrapper has a way to ensure that the line above terminates the script correctly. +echo WARNING: you should never see this, and if you do, then Batect's wrapper script has a bug diff --git a/batect.yml b/batect.yml new file mode 100644 index 000000000..9b68314b8 --- /dev/null +++ b/batect.yml @@ -0,0 +1,137 @@ +containers: + dongtai-mysql: + image: dongtai/dongtai-mysql-unittest:latest + image_pull_policy: Always + ports: + - "33060:3306" + + dongtai-redis: + image: dongtai/dongtai-redis:latest + image_pull_policy: Always + dongtai-server: + build_directory: . + dockerfile: ./Dockerfile + # environment: + # DOC: ${WEBAPI_DOC:-TRUE} + # debug: ${WEBAPI_debug:-true} + # SAVEEYE: ${WEBAPI_SAVEEYE:-TRUE} + # REQUESTLOG: ${WEBAPI_REQUESTLOG:-TRUE} + # CPROFILE: ${WEBAPI_CPROFILE:-TRUE} + # PYTHONAGENT: ${WEBAPI_PYTHON_AGENT:-FALSE} + # PROJECT_NAME: ${WEBAPI_PROJECT_NAME:-LocalWEBAPI} + # PROJECT_VERSION: ${WEBAPI_PROJECT_VERSION:-v1.0} + # LOG_PATH: ${WEBAPI_LOG_PATH:-/tmp/dongtai-agent-python.log} + # DONGTAI_IAST_BASE_URL: ${DONGTAI_IAST_BASE_URL:-https://iast.io/openapi} + # DONGTAI_AGNET_TOKEN: ${DONGTAI_AGNET_TOKEN:-79798299b48839c84886d728958a8f708e119868} + volumes: + - .:/opt/dongtai/ + + dongtai-web: + image: dongtai/dongtai-web:latest + image_pull_policy: Always + volumes: + - ./deploy/docker-compose/nginx.conf:/etc/nginx/nginx.conf + ports: + - "80:80" + + dongtai-engine: + build_directory: . + dockerfile: ./Dockerfile + command: worker + volumes: + - .:/opt/dongtai/ + + + dongtai-engine-task: + build_directory: . + dockerfile: ./Dockerfile + command: beat + volumes: + - .:/opt/dongtai/ + dependencies: + - dongtai-engine + +tasks: + serve: + description: Serve the webapi application standingalone + run: + container: dongtai-server + entrypoint: /opt/dongtai/webapi/.batect/agent_deco.sh /opt/dongtai/webapi/.batect/manage_run_server.sh + ports: + - "8000:8000" + group: serve + serve-uwsgi: + description: Serve the webapi application standingalone + run: + container: dongtai-server + ports: + - "8000:8000" + group: serve + + shell: + description: Serve the webapi application standingalone + run: + container: dongtai-server + entrypoint: /bin/bash + ports: + - "8000:8000" + dependencies: + - dongtai-mysql + - dongtai-redis + group: serve + + makemessages: + description: makemessages about i18n + run: + container: dongtai-server + entrypoint: python manage.py makemessages -l zh -l en + ports: + - "8000:8000" + group: tool + + serve-with-db: + description: Serve the webapi application with db + run: + container: dongtai-server + # entrypoint: /opt/dongtai/webapi/.batect/agent_deco.sh /opt/dongtai/webapi/.batect/manage_run_server.sh + ports: + - "8000:8000" + dependencies: + - dongtai-mysql + - dongtai-redis + customise: + dongtai-mysql: + ports: + - "33060:3306" + group: serve + + test: + description: run webapi unittest + run: + container: dongtai-server + entrypoint: /opt/dongtai/webapi/.batect/agent_deco.sh /opt/dongtai/webapi/.batect/manage_test.sh + dependencies: + - dongtai-redis + - dongtai-mysql + group: test + + integration-test-web: + description: integration with web front-end + run: + container: dongtai-web + dependencies: + - dongtai-mysql + - dongtai-redis + - dongtai-server + group: integration + integration-test-all: + description: integration with all components + run: + container: dongtai-web + dependencies: + - dongtai-mysql + - dongtai-server + - dongtai-redis + - dongtai-engine + - dongtai-engine-task + group: integration diff --git a/build_with_docker_compose.sh b/build_with_docker_compose.sh new file mode 100755 index 000000000..5de2e4242 --- /dev/null +++ b/build_with_docker_compose.sh @@ -0,0 +1,10 @@ +#!/bin/bash +set -x +build_dongtai_iast(){ + echo -e "Start to install DongTai IAST service" + chmod u+x deploy/docker-compose/dtctl + cd deploy/docker-compose/ + ./dtctl install +} + +build_dongtai_iast diff --git a/conf/config.ini.example b/conf/config.ini.example deleted file mode 100644 index 0aefa0075..000000000 --- a/conf/config.ini.example +++ /dev/null @@ -1,27 +0,0 @@ -[mysql] -host = mysql-server -port = mysql-port -name = database_name -user = username -password = password - -[redis] -host = redis-server -port = redis-port -password = password -db = - -[engine] -url = engine-url - -[smtp] -server = server -user = user -password = password -from_addr = from_add -ssl = ssl -cc_addr = cc_addr - -[aliyun_oss] -access_key = LTAI5t7pu9WUT2DcbknfNiaD -access_key_secret = ZoEOSi7KfayQ7JalvJVHa37fdZ4XFY \ No newline at end of file diff --git a/config/rsa_keys/private_key.pem b/config/rsa_keys/private_key.pem deleted file mode 100644 index fdb94b7e1..000000000 --- a/config/rsa_keys/private_key.pem +++ /dev/null @@ -1,15 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIICXAIBAAKBgQC5hIGsrKhzobovomu0qdgHWbrrVZr8pOwuFUoDYWqaw+GlPn/n -R+abMO3nvlRqI7XmQwMCl2vAKwT5tu9QyVxqadgxfIssFCkruZFubrnqSYXmsrgu -4h/26VBLBzRo0PvQNa3TUDetwHqy5My4YTfye55978AQqStjX0c3Q1S2ewIDAQAB -AoGAdNjOzDGbj+knWeVVLV9kn4yWvK8YhVRjJctm6KYs/PmvX+LzsHqK9uhkj/hD -Nu7zHOrMNI+m4Ey4P4hHR3zsZU9iQm7BrRKnEa5MyIqTLPZHeeGMnoXkrPLdI02v -G21WYHvhvRYufSjg0pASAEWRQXodW5cRp0LaXHjIa5PhI0ECQQDdZAVERXKVvZUT -q7vA7cl57+274/Rp0Gnk9kkNgpkWZ8ibdMvF9y7GdqCKa8F/QshQkvyQlXNr6lbe -mo09x0hxAkEA1oTbWpFjYqKVnCt9BkMoLpudfjAAwJq/0CGoGv73zWuxp9QIUmzu -HiK2WWhkX2+7S5yklU0ITg0RsVQi/j4DqwJAMV/VLNywKWUWkkSCowxlGqS/yF+W -auLoOwASWsj9i1mqC8GIkKYH8IsUUsqlZNRoCFA/s58hFrg0l4Cwb066gQJBAMj6 -P+hiPhKy8CgYNlIfqQ3BjZQjtRC96uc8IRrptZAnTZJG/GT82Toym1S4kRE4xOja -IPgf65/0bpcMlJxNzXECQAgFDQ2ioMMTiPmSrwkImNEzFps5kpdrJiwTYop+xeXT -Hd5CAYP+PXMQxbejn1oRqrhaYrley8RuPO+s9UhEMqY= ------END RSA PRIVATE KEY----- diff --git a/config/rsa_keys/public_key.pem b/config/rsa_keys/public_key.pem deleted file mode 100644 index d1e9c879d..000000000 --- a/config/rsa_keys/public_key.pem +++ /dev/null @@ -1,6 +0,0 @@ ------BEGIN PUBLIC KEY----- -MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC5hIGsrKhzobovomu0qdgHWbrr -VZr8pOwuFUoDYWqaw+GlPn/nR+abMO3nvlRqI7XmQwMCl2vAKwT5tu9QyVxqadgx -fIssFCkruZFubrnqSYXmsrgu4h/26VBLBzRo0PvQNa3TUDetwHqy5My4YTfye559 -78AQqStjX0c3Q1S2ewIDAQAB ------END PUBLIC KEY----- diff --git a/deploy/README.MD b/deploy/README.MD new file mode 100644 index 000000000..fe770006e --- /dev/null +++ b/deploy/README.MD @@ -0,0 +1,23 @@ +# dongtai-deploy + +[中文版本(Chinese version)](README.ZH-CN.MD) + +[![DongTai-project](https://img.shields.io/badge/DongTai%20versions-beta-green)](https://hxsecurity.github.io/DongTai-Doc/) + +## Single-node deployment + +> Docker-compose one-click deployment and Docker image one-click deployment solution pull image from Ali Cloud private image warehouse, fast speed, no network problems, recommended to use + +### 1. Docker-compose + +[doc](docker-compose/README.MD) + +### 2. Deploy With Official images + +To be updated + +## Cluster edition deployment + +### Kubernetes + +[doc](kubernetes/README.MD) diff --git a/deploy/README.ZH-CN.MD b/deploy/README.ZH-CN.MD new file mode 100644 index 000000000..ecd5089e6 --- /dev/null +++ b/deploy/README.ZH-CN.MD @@ -0,0 +1,21 @@ +# dongtai-deploy +[English](README.MD) + +[![DongTai-project](https://img.shields.io/badge/DongTai%20versions-beta-green)](https://hxsecurity.github.io/DongTai-Doc/) + +## 一、单机版部署 + +> docker-compose一键部署与docker镜像一键部署方案从阿里云私有镜像仓库拉取镜像,速度快,无网络问题,推荐使用 + +### 1. docker-compose一键部署 + +[部署方案](docker-compose/README.ZH-CN.MD) + +### 2. docker镜像一键部署方案 +待更新 + +## 二、集群版部署 + +### Kubernetes版本一键部署 + +[部署方案](kubernetes/README.ZH-CN.md) diff --git a/AgentServer/__init__.py b/deploy/commands/__init__.py similarity index 100% rename from AgentServer/__init__.py rename to deploy/commands/__init__.py diff --git a/apiserver/admin.py b/deploy/commands/admin.py similarity index 100% rename from apiserver/admin.py rename to deploy/commands/admin.py diff --git a/deploy/commands/apps.py b/deploy/commands/apps.py new file mode 100644 index 000000000..7ec231bb4 --- /dev/null +++ b/deploy/commands/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class DeployConfig(AppConfig): + default_auto_field = 'django.db.models.BigAutoField' + name = 'deploy.commands' diff --git a/apiserver/__init__.py b/deploy/commands/management/__init__.py similarity index 100% rename from apiserver/__init__.py rename to deploy/commands/management/__init__.py diff --git a/db.sqlite3 b/deploy/commands/management/commands/__init__.py similarity index 100% rename from db.sqlite3 rename to deploy/commands/management/commands/__init__.py diff --git a/deploy/commands/management/commands/statistics.py b/deploy/commands/management/commands/statistics.py new file mode 100644 index 000000000..00e522bd5 --- /dev/null +++ b/deploy/commands/management/commands/statistics.py @@ -0,0 +1,59 @@ +from django.core.management.base import BaseCommand +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.agent_method_pool import MethodPool +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.models.asset_vul import IastAssetVul + + +class Command(BaseCommand): + help = 'scripts to deal with old data to new version' + functions = [] + + def add_arguments(self, parser): + pass + #parser.add_argument('update', nargs='+', type=int) + + def handle(self, *args, **options): + agent_count = IastAgent.objects.all().count() + online_agent_count = IastAgent.objects.filter(online=1).count() + total_method_pool_count = MethodPool.objects.all().count() + total_vulnerability = IastVulnerabilityModel.objects.all().count() + total_high_vulnerability = IastVulnerabilityModel.objects.filter( + level_id=1).count() + total_medium_vulnerability = IastVulnerabilityModel.objects.filter( + level_id=2).count() + total_low_vulnerability = IastVulnerabilityModel.objects.filter( + level_id=3).count() + total_info_vulnerability = IastVulnerabilityModel.objects.filter( + level_id=4).count() + total_note_vulnerability = IastVulnerabilityModel.objects.filter( + level_id=5).count() + total_high_asset_vulnerability = IastAssetVul.objects.filter( + level_id=1).count() + total_medium_asset_vulnerability = IastAssetVul.objects.filter( + level_id=2).count() + total_low_asset_vulnerability = IastAssetVul.objects.filter( + level_id=3).count() + total_info_asset_vulnerability = IastAssetVul.objects.filter( + level_id=4).count() + total_note_asset_vulnerability = IastAssetVul.objects.filter( + level_id=5).count() + + self.stdout.write(f""" + ============================================= + agent_count={agent_count} + online_agent_count={online_agent_count} + total_method_pool_count={total_method_pool_count} + total_vulnerability={total_vulnerability} + total_high_vulnerability={total_high_vulnerability} + total_medium_vulnerability={total_medium_vulnerability} + total_low_vulnerability={total_low_vulnerability} + total_info_vulnerability={total_info_vulnerability} + total_note_vulnerability={total_note_vulnerability} + total_high_asset_vulnerability={total_high_asset_vulnerability} + total_medium_asset_vulnerability={total_medium_asset_vulnerability} + total_low_asset_vulnerability={total_low_asset_vulnerability} + total_info_asset_vulnerability={total_info_asset_vulnerability} + total_note_asset_vulnerability={total_note_asset_vulnerability} + ============================================== + """) diff --git a/deploy/commands/management/commands/update.py b/deploy/commands/management/commands/update.py new file mode 100644 index 000000000..b00f6b7da --- /dev/null +++ b/deploy/commands/management/commands/update.py @@ -0,0 +1,16 @@ +from django.core.management.base import BaseCommand + + +class Command(BaseCommand): + help = 'scripts to deal with old data to new version' + functions = [] + def add_arguments(self, parser): + pass + #parser.add_argument('update', nargs='+', type=int) + + def handle(self, *args, **options): + from dongtai_web.dongtai_sca.tasks import refresh_all_asset_data + refresh_all_asset_data() + self.stdout.write( + self.style.SUCCESS('Successfully flash old data "%s"' % + '123123213321')) diff --git a/deploy/commands/models.py b/deploy/commands/models.py new file mode 100644 index 000000000..71a836239 --- /dev/null +++ b/deploy/commands/models.py @@ -0,0 +1,3 @@ +from django.db import models + +# Create your models here. diff --git a/apiserver/tests.py b/deploy/commands/tests.py similarity index 100% rename from apiserver/tests.py rename to deploy/commands/tests.py diff --git a/deploy/commands/urls.py b/deploy/commands/urls.py new file mode 100644 index 000000000..2f740c2f2 --- /dev/null +++ b/deploy/commands/urls.py @@ -0,0 +1,26 @@ +"""webapi URL Configuration + +The `urlpatterns` list routes URLs to views. For more information please see: + https://docs.djangoproject.com/en/3.0/topics/http/urls/ +Examples: +Function views + 1. Add an import: from my_app import views + 2. Add a URL to urlpatterns: path('', views.home, name='home') +Class-based views + 1. Add an import: from other_app.views import Home + 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') +Including another URLconf + 1. Import the include() function: from django.urls import include, path + 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) +""" +from django.conf.urls.static import static +from django.urls import include, path +import os +from deploy.views import VersionListView + +urlpatterns = [ + path('versionlist', VersionListView.as_view()), +] + + +urlpatterns = [path('api/v1/version_control/', include(urlpatterns))] diff --git a/deploy/commands/views.py b/deploy/commands/views.py new file mode 100644 index 000000000..1bb946082 --- /dev/null +++ b/deploy/commands/views.py @@ -0,0 +1,27 @@ +from django.shortcuts import render +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.models.version_control import VersionControl +import json +# Create your views here. + +COMPONENT_LIST = ("DongTai", "DongTai-agent-java", "DongTai-agent-python", + "DongTai-engine", "DongTai-openapi", "DongTai-webapi") + + +class VersionListView(UserEndPoint): + def get(self, request): + component_datas = VersionControl.objects.filter( + component_name__in=COMPONENT_LIST).all() + data = {} + for component_data in component_datas: + data[component_data.component_name] = { + "version": component_data.version, + "commit_hash": component_data.component_version_hash + } + if not data[component_data.component_name]['commit_hash']: + del data[component_data.component_name]['commit_hash'] + if component_data.additional: + additional_data = json.loads(component_data.additional) + data[component_data.component_name].update(additional_data) + return R.success(data=data) diff --git a/deploy/deploy-eks-iast-saas-openapi-prod.yml b/deploy/deploy-eks-iast-saas-openapi-prod.yml deleted file mode 100644 index 820187e2c..000000000 --- a/deploy/deploy-eks-iast-saas-openapi-prod.yml +++ /dev/null @@ -1,55 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: iast-agent-api - namespace: iast-prod - annotations: - kubesphere.io/description: iast-agent-api - labels: - app: iast-agent-api -spec: - replicas: 5 - selector: - matchLabels: - app: iast-agent-api - template: - metadata: - labels: - app: iast-agent-api - spec: - containers: - - name: iast-agent-api-container - image: registry.cn-beijing.aliyuncs.com/secnium/iast-saas-openapi:VERSION - imagePullPolicy: Always - volumeMounts: - - name: configfile - mountPath: /opt/iast/apiserver/conf/config.ini - subPath: config.ini - - resources: - limits: - cpu: "500m" - memory: 1000Mi - requests: - cpu: "500m" - memory: 1000Mi - volumes: - - name: configfile - configMap: - name: iast-test-config.ini - imagePullSecrets: - - name: aliyun-registry-secret ---- -apiVersion: v1 -kind: Service -metadata: - name: iast-agent-api-svc - namespace: iast-prod -spec: - ports: - - port: 80 - protocol: TCP - targetPort: 8000 - selector: - app: iast-agent-api - type: ClusterIP diff --git a/deploy/deploy-eks-iast-saas-openapi-test.yml b/deploy/deploy-eks-iast-saas-openapi-test.yml deleted file mode 100644 index 215e4cbd3..000000000 --- a/deploy/deploy-eks-iast-saas-openapi-test.yml +++ /dev/null @@ -1,55 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: iast-agent-api - namespace: iast-test - annotations: - kubesphere.io/description: iast-agent-api - labels: - app: iast-agent-api -spec: - replicas: 1 - selector: - matchLabels: - app: iast-agent-api - template: - metadata: - labels: - app: iast-agent-api - spec: - containers: - - name: iast-agent-api-container - image: registry.cn-beijing.aliyuncs.com/secnium/iast-saas-openapi-test:VERSION - imagePullPolicy: Always - volumeMounts: - - name: configfile - mountPath: /opt/iast/apiserver/conf/config.ini - subPath: config.ini - - resources: - limits: - cpu: "500m" - memory: 1000Mi - requests: - cpu: "500m" - memory: 1000Mi - volumes: - - name: configfile - configMap: - name: iast-test-config.ini - imagePullSecrets: - - name: aliyun-registry-secret ---- -apiVersion: v1 -kind: Service -metadata: - name: iast-agent-api-svc - namespace: iast-test -spec: - ports: - - port: 80 - protocol: TCP - targetPort: 8000 - selector: - app: iast-agent-api - type: ClusterIP diff --git a/deploy/docker-compose/README-zh.md b/deploy/docker-compose/README-zh.md new file mode 100644 index 000000000..c727c264b --- /dev/null +++ b/deploy/docker-compose/README-zh.md @@ -0,0 +1,67 @@ +## docker-compose一键部署【单机】 +[English](README.MD) + +洞态IAST云端支持通过`docker-compose`的方式进行一键安装,但是需要提前安装`docker-compose`工具。可以通过`docker-compose -version`命令查看当前机器是否已经安装,如果没有安装,通过百度查询并安装`docker-compose`即可。 + + +### 部署流程 + +#### 自定义配置(可选) +如需修改mysql和redis的配置,需要手动修改 `config-tutorial.ini`文件内的`[mysql]`和`[redis]`部分配置。 +修改完成后,在下述的部署过程选择skip相应的组件. + +#### 部署 + +更新代码 + +``` +git pull +``` + +执行安装 + +``` +./dtctl install +``` +最新发布版本 + + +``` +./dtctl install -v +``` +s: 跳过的资源(skip),可选: `mysql` `redis` `mysql,redis`,默认:不跳过 + +v: 需要被安装的版本,默认为最新的发布版本 + +环境启动成功后,通过安装过程中指定的`web service port`访问即可。 + + +### 升级 + +更新代码 +``` +git pull +``` + +执行更新 +``` +./dtctl upgrade +``` +更新成最新发布版本 + + +``` +./dtctl upgrade -t +``` +更新到执行的版本 + +t: to version + + + +### 卸载 + +``` +./dtctl rm -d +``` +d : 改选项会让数据和服务一起被删除 diff --git a/deploy/docker-compose/README.MD b/deploy/docker-compose/README.MD new file mode 100644 index 000000000..e16e02edf --- /dev/null +++ b/deploy/docker-compose/README.MD @@ -0,0 +1,78 @@ +## Deploy With Docker-Compose +[中文版本(Chinese version)](README-zh.md) + +## Prepare in advance + - Docker + - docker-compose + - You can verify docker/docker-compose with this command: `docker -v` and `docker-compose`, if you re + + +### Deployment + +#### Custom Configuration(Optional) +If you want to modify the configuration of mysql and redis, you need to modify section `[mysql]` and section`[redis]` of the file `config-tutorial.ini` manually. +After modification, `skip` the relevant component in your deploy step. + + +#### Deploy + +get the latest code + +``` +git pull +``` + +install + +``` +./dtctl install +``` + +install the latest release version + +``` +./dtctl install -v +``` + +s: skip specified component,optional: `mysql` `redis` `mysql,redis`,default:don't skip + +v: the version you want to install, default: latest release version + + +After installation, access it with your specified `web service port`. + + + + +### Upgrade + +download latest `dtctl` + +``` +git pull +``` + +upgrade + +``` +./dtctl upgrade +``` + +upgrade to the latest release version + +``` +./dtctl upgrade -t +``` + +upgrade to the specified version + +t: to version + + + +### Uninstall + +``` +./dtctl rm -d +``` +d : remove server and data diff --git a/deploy/docker-compose/config-tutorial.ini b/deploy/docker-compose/config-tutorial.ini new file mode 100644 index 000000000..dcc9d0e57 --- /dev/null +++ b/deploy/docker-compose/config-tutorial.ini @@ -0,0 +1,72 @@ +[mysql] +host = dongtai-mysql +port = 3306 +name = dongtai_webapi +user = root +password = dongtai-iast + +[redis] +host = dongtai-redis +port = 6379 +password = 123456 +db = 0 + +[engine] +url = http://dongtai-engine:8000 + +[apiserver] +url = http://dongtai-server:8000 +#url = http://dongtai-web:8000 + +[security] +csrf_trust_origins = .example.com +secret_key = vbjlvbxfvazjfprywuxgyclmvhtmselddsefxxlcixovmqfpgy + +[smtp] +server = smtp_server +user = smtp_user +password = smtp_password +from_addr = from_addr +ssl = False +cc_addr = cc_addr +port = 25 + + +[sca] +#https://iast.huoxian.cn/openapi/sca/v1 +base_url = https://sca.huoxian.cn/ +timeout = 5 +token = + + +[task] +retryable = true +max_retries = 3 +async_send = true +async_send_delay = 5 + +[log_service] +host = dongtai-logstash +port = 8083 + +[common_file_path] +tmp_path = /tmp/logstash +report_img = report/img +report_pdf = report/pdf +report_word = report/word +report_excel = report/excel + +[other] +domain = http://localhost.domain/ +demo_session_cookie_domain = .huoxian.cn +logging_level = INFO +cache_preheat = True + +[elastic_search] +enable = false +host = http://dongtai:dongtaies@dongtaies:9200 +vulnerability_index = alias-dongtai-v1-vulnerability-dev +asset_aggr_index = alias-dongtai-v1-asset-aggr-dev +asset_index = alias-dongtai-v1-asset-dev +method_pool_index = alias-dongtai-v1-method-pool-dev +asset_vul_index = alias-dongtai-v1-asset-vul-dev diff --git a/deploy/docker-compose/config-tutorial.ini-legacy b/deploy/docker-compose/config-tutorial.ini-legacy new file mode 100644 index 000000000..ee6982ee7 --- /dev/null +++ b/deploy/docker-compose/config-tutorial.ini-legacy @@ -0,0 +1,35 @@ +[mysql] +host = dongtai-mysql +port = 3306 +name = dongtai_webapi +user = root +password = dongtai-iast + +[redis] +host = dongtai-redis +port = 6379 +password = 123456 +db = 0 + +[engine] +url = http://dongtai-engine:8000 + +[apiserver] +url = http://dongtai-openapi:8000 + +[security] +csrf_trust_origins = .example.com + +[smtp] +server = smtp_server +user = smtp_user +password = smtp_password +from_addr = from_addr +ssl = False +cc_addr = cc_addr +port = 25 + + +[sca] +#https://iast.huoxian.cn/openapi/sca/v1 +base_url = https://iast.huoxian.cn/openapi/sca/v1 diff --git a/deploy/docker-compose/docker-compose-openrasp.yml b/deploy/docker-compose/docker-compose-openrasp.yml new file mode 100644 index 000000000..29eb2ea4a --- /dev/null +++ b/deploy/docker-compose/docker-compose-openrasp.yml @@ -0,0 +1,51 @@ +version: "2" +services: + dongtai-mysql: + image: registry.cn-beijing.aliyuncs.com/huoxian_pub/dongtai-mysql:{CHANGE_THIS_VERSION} + restart: always + volumes: + - "mysql-vol:/var/lib/mysql:rw" + + dongtai-redis: + image: registry.cn-beijing.aliyuncs.com/huoxian_pub/dongtai-redis:{CHANGE_THIS_VERSION} + restart: always + + dongtai-web: + image: registry.cn-beijing.aliyuncs.com/huoxian_pub/dongtai-web:{CHANGE_THIS_VERSION} + restart: always + ports: + - "8088:80" + volumes: + - "./nginx.conf:/etc/nginx/nginx.conf" + depends_on: + - dongtai-server + + dongtai-engine: + image: "registry.cn-beijing.aliyuncs.com/huoxian_pub/dongtai-server:{CHANGE_THIS_VERSION}" + restart: always + entrypoint: ["/opt/dongtai/webapi/docker/entrypoint.sh", "worker"] + volumes: + - "./config-tutorial.ini:/opt/dongtai/webapi/conf/config.ini" + + dongtai-server: + image: "registry.cn-beijing.aliyuncs.com/huoxian_pub/dongtai-server:{CHANGE_THIS_VERSION}" + restart: always + volumes: + - "./config-tutorial.ini:/opt/dongtai/webapi/conf/config.ini" + + dongtai-engine-task: + image: "registry.cn-beijing.aliyuncs.com/huoxian_pub/dongtai-server:{CHANGE_THIS_VERSION}" + restart: always + entrypoint: ["/opt/dongtai/webapi/docker/entrypoint.sh", "beat"] + volumes: + - "./config-tutorial.ini:/opt/dongtai/webapi/conf/config.ini" + + dongtai-vulns: + image: "dongtai/openrasp-tomcat8:dongtai-java-agent{CHANGE_THIS_VERSION}" + restart: always + ports: + - "8089:8080" + environment: + - "JAVA_TOOL_OPTIONS=-javaagent:/opt/agent.jar -Ddongtai.app.name=OpenRASP -Ddongtai.server.url=http://dongtai-web/openapi -Ddongtai.server.token={CHANGE_THIS_TOKEN}" +volumes: + mysql-vol: diff --git a/deploy/docker-compose/docker-compose.yml.example b/deploy/docker-compose/docker-compose.yml.example new file mode 100644 index 000000000..f744e4eb1 --- /dev/null +++ b/deploy/docker-compose/docker-compose.yml.example @@ -0,0 +1,111 @@ +version: "2" +services: + dongtai-mysql: + image: registry.cn-beijing.aliyuncs.com/huoxian_pub/dongtai-mysql:{ChangeThisVersion} + restart: always + ports: + - "3306:3306" + volumes: + - "mysql-vol:/var/lib/mysql:rw" + logging: + driver: "json-file" + options: + max-size: "10m" + dongtai-redis: + image: registry.cn-beijing.aliyuncs.com/huoxian_pub/dongtai-redis:{ChangeThisVersion} + restart: always + logging: + driver: "json-file" + options: + max-size: "10m" + dongtai-web: + image: registry.cn-beijing.aliyuncs.com/huoxian_pub/dongtai-web:{ChangeThisVersion} + restart: always + ports: + - "80:80" + volumes: + - "./nginx.conf:/etc/nginx/nginx.conf" + depends_on: + - dongtai-server + logging: + driver: "json-file" + options: + max-size: "10m" + dongtai-server: + image: "registry.cn-beijing.aliyuncs.com/huoxian_pub/dongtai-server:{ChangeThisVersion}" + restart: always + sysctls: + net.core.somaxconn: 1024 + volumes: + - ./log/:/tmp/logstash/ + - ./config-tutorial.ini:/opt/dongtai/dongtai_conf/conf/config.ini + logging: + driver: "json-file" + options: + max-size: "10m" + dongtai-worker-task: + image: "registry.cn-beijing.aliyuncs.com/huoxian_pub/dongtai-server:{ChangeThisVersion}" + restart: always + entrypoint: ["/opt/dongtai/deploy/docker/entrypoint.sh", "beat"] + volumes: + - ./config-tutorial.ini:/opt/dongtai/dongtai_conf/conf/config.ini + logging: + driver: "json-file" + options: + max-size: "10m" + dongtai-worker-beat: + image: "registry.cn-beijing.aliyuncs.com/huoxian_pub/dongtai-server:{ChangeThisVersion}" + restart: always + entrypoint: ["/opt/dongtai/deploy/docker/entrypoint.sh", "worker-beat"] + volumes: + - ./config-tutorial.ini:/opt/dongtai/dongtai_conf/conf/config.ini + logging: + driver: "json-file" + options: + max-size: "10m" + dongtai-worker-high-freq: + image: "registry.cn-beijing.aliyuncs.com/huoxian_pub/dongtai-server:{ChangeThisVersion}" + restart: always + entrypoint: ["/opt/dongtai/deploy/docker/entrypoint.sh", "worker-high-freq"] + volumes: + - ./config-tutorial.ini:/opt/dongtai/dongtai_conf/conf/config.ini + logging: + driver: "json-file" + options: + max-size: "10m" + dongtai-worker-other: + image: "registry.cn-beijing.aliyuncs.com/huoxian_pub/dongtai-server:{ChangeThisVersion}" + restart: always + entrypoint: ["/opt/dongtai/deploy/docker/entrypoint.sh", "worker-other"] + volumes: + - ./config-tutorial.ini:/opt/dongtai/dongtai_conf/conf/config.ini + logging: + driver: "json-file" + options: + max-size: "10m" + dongtai-logrotate: + image: registry.cn-beijing.aliyuncs.com/huoxian_pub/dongtai-logrotate:{ChangeThisVersion} + restart: always + user: root + volumes: + - "./log/:/tmp/logstash/" + logging: + driver: "json-file" + options: + max-size: "10m" + dongtai-logstash: + image: registry.cn-beijing.aliyuncs.com/huoxian_pub/dongtai-logstash:{ChangeThisVersion} + restart: always + user: root + environment: + - DATABASE=dongtai-mysql:3306/dongtai_webapi + - USERNAME=root + - PASSWORD=dongtai-iast + volumes: + - "./log/:/tmp/logstash/" + logging: + driver: "json-file" + options: + max-size: "10m" +volumes: + mysql-vol: \ No newline at end of file diff --git a/deploy/docker-compose/dtctl b/deploy/docker-compose/dtctl new file mode 100755 index 000000000..7d43e6c5d --- /dev/null +++ b/deploy/docker-compose/dtctl @@ -0,0 +1,2627 @@ +#!/bin/bash +set -e +set -o pipefail +# set -x +_filename=$0 + +SKIP_MYSQL=false +SKIP_REDIS=false +CHANGE_THIS_VERSION="" +DOCKER_COMPOSE_FILE="" +UPGRADE_DIR=~/dongtai_iast_upgrade +DESTORY_DATA=false +PROJECT_NAME=dongtai +REGISRTY=1 + +MYSQL_IP='dongtai-mysql:3306' +MYSQL_USERNAME='root' +MYSQL_PASSWORD='dongtai-iast' +MYSQL_DATABASES='dongtai_webapi' + +DONGTAI_API="iast.huoxian.cn" +# DONGTAI_API="google.com" +DONGTAI_API_ACCESSABLE=true + + +DONGTAI_WEB_VER="" +DONGTAI_SERVER_VER="" +DONGTAI_REDIS_VER="" +DONGTAI_MYSQL_VER="" + +Info() { + echo -e "[Info] $1" >&2 +} + +Error() { + echo -e "\033[31m[Error] $1 \033[0m" >&2 +} + +Todo() { + echo -e "\033[36m[Todo] $1 \033[0m" >&2 +} + +Notice() { + echo -e "\033[33m[Important] $1 \033[0m" >&2 +} + +Usage() { + echo -e "\033[33m[Usage] $1 \033[0m" >&2 +} + +function print() { + echo Print:$1 >&2 +} + +# Display usage message function +usage() { + Info "Usage:" + Usage "\t$_filename -h Display usage message" + Usage "\t$_filename install -s mysql,redis -v 1.0.5 Install iast server" + Usage "\t$_filename remove|rm [-d] Uninstall iast server" + Usage "\t$_filename upgrade -t 1.1.2 Upgrade iast server" + Usage "\t$_filename version Get image version" + Usage "\t$_filename dbhash Get database schema hash" + Usage "\t$_filename dbschema Export database schema" + Usage "\t$_filename dbrestore -f FILEPATH Restore mysql database" + Usage "\t$_filename dbbackup -d FILEPATH Backup mysql database" + Usage "\t$_filename file Export docker-compose.yml " + Usage "\t$_filename logs server|web|mysql|engine Extract tail logs" + Usage "\t$_filename start [PROJECT] Start all container,default project: dongtai" + Usage "\t$_filename index Initialize elasticsearch index" + +} + +OUT="$(uname -s)" +case "${OUT}" in +Linux*) machine=Linux ;; +Darwin*) machine=Mac ;; +CYGWIN*) machine=Cygwin ;; +MINGW*) machine=MinGw ;; +*) machine="UNKNOWN:${OUT}" ;; +esac +CURRENT_PATH=$( + cd "$(dirname "$0")" || exit +) +cd "$CURRENT_PATH" || exit + +sha=sha1sum +if [ $machine == "Mac" ]; then + sha=shasum +fi + +trim() { + local trimmed="$1" + # Strip leading spaces. + while [[ $trimmed == ' '* ]]; do + trimmed="${trimmed## }" + done + # Strip trailing spaces. + while [[ $trimmed == *' ' ]]; do + trimmed="${trimmed%% }" + done + echo "$trimmed" +} + +function realpath() { + [[ $1 == /* ]] && echo "$1" || echo "$PWD/${1#./}" +} + +shell_path=$(realpath "$0") +shell_dir=$(dirname "$shell_path") + +# compare version +function first_gt_second() { + first=${1//./} + second=${2//./} + + aa=$(( first - second )) + if [ $aa -gt 0 ]; then + echo true + else + echo false + fi +} + +# check if dongtai iast server is Running +function is_server_running() { + if [ $(docker ps | grep "dongtai-web:" | wc -l) -gt 0 ]; then + echo true + return + fi + echo false + return +} + +# exit when server is not running +function exit_not_running() { + if [ $(is_server_running) != true ]; then + Error "Server is not running!" + exit 1 + fi +} + +# get docker-compose project_name +get_project_name() { + retval=$(docker inspect --format='{{ index .Config.Labels "com.docker.compose.project" }}' $(docker ps | grep "dongtai-web:" | awk '{print $1}')) + echo "$retval" +} +function current_image_version() { + PROJECT_NAME=$(get_project_name) + if [ -z $1 ]; then + ver=$(docker ps --filter "label=com.docker.compose.project=$PROJECT_NAME" | grep "dongtai-web:" | awk '{split($2,a,":");print a[2]}' | tail -n 1 ) + else + ver=$(docker ps --filter "label=com.docker.compose.project=$PROJECT_NAME" | grep "$1:" | awk '{split($2,a,":");print a[2]}'| tail -n 1 ) + fi + echo "$ver" +} + +function get_latest_release_version() { + ver=$(cat updaterecord.txt | awk 'NR==2 {print $1}') + echo "$ver" +} + +function get_web_service_port() { + PROJECT_NAME=$(get_project_name) + WEB_CONTAINER_ID=$(docker ps --filter "label=com.docker.compose.project=$PROJECT_NAME" | grep 'dongtai-web:' | awk '{print $1}') + WEB_SERVICE_PORT=$(docker inspect --format='{{range $p, $conf := .NetworkSettings.Ports}} {{(index $conf 0).HostPort}} {{end}}' $WEB_CONTAINER_ID) + WEB_SERVICE_PORT=$(trim $WEB_SERVICE_PORT) + + echo "$WEB_SERVICE_PORT" + +} + +# get mysql container id +function get_mysql_container_id() { + mysql_container_id="" + PROJECT_NAME=$(get_project_name) + mysql_container_id=$(docker ps -a --filter "label=com.docker.compose.project=$PROJECT_NAME" | grep "mysql" | awk '{print $1}') + echo "$mysql_container_id" + +} + +# get server container id +function get_server_container_id() { + server_container_id="" + PROJECT_NAME=$(get_project_name) + server_container_id=$(docker ps -a --filter "label=com.docker.compose.project=dongtai" | grep "server" | awk '{print $NF}' | grep dongtai-server) + echo "$server_container_id" + +} + +# check if image registry is in China +function get_image_registry() { + if [ "$1" == 0 ]; then + registry_url="dongtai" + elif [ "$1" == 1 ]; then + registry_url="registry.cn-beijing.aliyuncs.com/huoxian_pub" + else + registry_url=$(docker ps --filter "label=com.docker.compose.project=$PROJECT_NAME" | grep "dongtai-web:" | awk 'i=index($2,"dongtai-web:") {print substr($2,0,i-2)}') + fi + echo "$registry_url" +} + +# checkout network +function network() { + local timeout=3 + local target=$DONGTAI_API + local ret_code=$(curl -I -s --connect-timeout ${timeout} ${target} -w %{http_code} | tail -n1) + if [ "$ret_code" == "200" ]; then + echo true + return + else + echo false + return + fi + echo false + return +} + + if [ ! $(network) == true ]; then + Error "Internet unavailable." + exit 1 + fi +# start to fetch the latest dtctl +# git pull + +# get the latest docker image tag from + +get_latest_image_tag_from_dockerhub() { + image="$1" + Info "start to get latest tag of $image" + tags=`wget -q https://registry.hub.docker.com/v1/repositories/${image}/tags -O - | sed -e 's/[][]//g' -e 's/"//g' -e 's/ //g' | tr '}' '\n' | awk -F: '{print $3}'` + + if [ -n "$2" ] + then + tags=` echo "${tags}" | grep "$2" ` + fi + echo "${tags}" | grep -E '^([0-9]+\.){0,2}(\*|[0-9]+)$' | tail -n 1 +} + +declare -a EXCLUDED_TABLES=( + "dongtai_webapi.sca2_package" + "dongtai_webapi.sca2_vul" + "dongtai_webapi.sca2_vul_package" + "dongtai_webapi.sca2_vul_package_range" + "dongtai_webapi.sca2_vul_package_version" + "dongtai_webapi.mysql_version_control" + "dongtai_webapi.project_version_control" +) + +IGNORED_TABLES_STRING="" +for TABLE in "${EXCLUDED_TABLES[@]}"; do + : + IGNORED_TABLES_STRING+=" --ignore-table=${TABLE} " +done + +get_database_schema() { + _mysql_container_id=$(get_mysql_container_id) + if [ -z $_mysql_container_id ]; then + Error "get_database_schema:mysql_container_id not exist" + exit 1 + fi + + echo "dongtai-iast" | docker exec -i $_mysql_container_id /bin/bash -c "mysqldump -u root -d dongtai_webapi ${IGNORED_TABLES_STRING} -p --skip-set-charset --skip-comments --skip-opt | sed 's/ AUTO_INCREMENT=[0-9]*//g' | sed 's/\/\*!*.*//g' " +} + +function current_hash() { + retval=$(get_database_schema | $sha | awk '{print $1}') + echo "$retval" +} + +function get_database_data_dir() { + mysql_id=$(get_mysql_container_id) + if [ -z $mysql_id ]; then + Notice "Mysql instance is not running" + exit + fi + dir=$(docker inspect --format='{{range .Mounts}} {{.Source}} {{end}}' $(get_mysql_container_id)) + echo "$dir" +} + +now=$(date '+%Y-%m-%d-%H-%M-%S') +function backup_mysql() { + backup_dir=$UPGRADE_DIR + if [ ! -z $1 ]; then + backup_dir=$1 + fi + backup_filename=$backup_dir/dongtai_iast-$now.sql + mkdir -p $backup_dir + Notice "Start to backup exist data,this will take a few minutes, don’t interrupt." + docker exec -i $(get_mysql_container_id) /bin/bash -c "mysqldump -uroot -pdongtai-iast --single-transaction -R -E --default-character-set=utf8mb4 --databases 'dongtai_webapi' " >$backup_filename || exit 1 + Info "Finished backup exist data..." + Info "$backup_filename" +} + +function restore_mysql() { + if [ -z $1 ]; then + Error "Input data filepath." + exit 1 + fi + # before restore,backup current data + backup_mysql + Info "Start to restore data, data filepath: $1" + docker exec -i $(get_mysql_container_id) /bin/bash -c "mysql -uroot -pdongtai-iast --default-character-set=utf8mb4 dongtai_webapi " <$1 || exit 1 + + Notice "Database restore finished." + +} + +if [ $(is_server_running) == true ]; then + if [ ! -z "$(get_mysql_container_id)" ]; then + MOUNT_TYPE=$(docker inspect --format='{{range .Mounts}} {{.Type}} {{end}}' $(get_mysql_container_id)) + fi +fi + +# check network and modify config.ini +function check_network() { + if [ $(network) == true ]; then + Info "Dongtai Online API accessable!" + else + DONGTAI_API_ACCESSABLE=false + if [ "$machine" == "Mac" ]; then + sed -i '' 's/^base_url = https\:\/\/iast\.huoxian\.cn\/openapi\/sca\/v1/base_url = http:\/\/dongtai-server:8000\/sca\/v1/' config-tutorial.ini + else + sed -i 's/^base_url = https\:\/\/iast\.huoxian\.cn\/openapi\/sca\/v1/base_url = http:\/\/dongtai-server:8000\/sca\/v1/' config-tutorial.ini + fi + Info "Dongtai Online API unaccessable!" + fi +} + +# Info "Mount Type:$MOUNT_TYPE" + +# create docker compose file +# Usage: create_docker_compose_file $VERSION $WEB_SERVICE_PORT $MYSQL_DATA_LOCATION +function create_docker_compose_file() { + + if [ ! -z "$1" ]; then + CHANGE_THIS_VERSION=$1 + fi + + if [ ! -z "$2" ]; then + WEB_SERVICE_PORT=$2 + fi + + if [ ! -z "$4" ] && [ $MOUNT_TYPE == "bind" ]; then + _DATA_DIR=$3 + else + _DATA_DIR="mysql-vol" + fi + Info "Data dir:$_DATA_DIR" + MYSQL_STR="" + REDIS_STR="" + + if [ "$3" == "install" ]; then + if [ "$REGISRTY" == 1 ]; then + registry=$(get_image_registry 1) + else + registry=$(get_image_registry 0) + fi + else + registry=$(get_image_registry 2) + fi + + if [ "$3" == "load_file" -o "$3" == "rm" ]; then + DONGTAI_WEB_VER=$(current_image_version dongtai-web) + DONGTAI_SERVER_VER=$(current_image_version dongtai-server) + DONGTAI_REDIS_VER=$(current_image_version dongtai-redis) + DONGTAI_MYSQL_VER=$(current_image_version dongtai-mysql) + fi + + + + MYSQL_IMAGE_NAME="dongtai-mysql" + if [ $DONGTAI_API_ACCESSABLE == false ]; then + if [[ $(first_gt_second $CHANGE_THIS_VERSION 1.2.0) == true ]]; then + MYSQL_IMAGE_NAME="dongtai-mysql-sca" + fi + fi + + if [ $DONGTAI_API_ACCESSABLE == false ]; then + if [[ $(first_gt_second $CHANGE_THIS_VERSION 1.8.2) == true ]]; then + MYSQL_IMAGE_NAME="dongtai-mysql" + fi + fi + + + if [ $SKIP_MYSQL == false ]; then + export MYSQL_STR=$( + cat <$output_location || exit 1 + echo "" + Notice "Exported database schema to: $output_location" + ;; + + "dbbackup") + exit_not_running + if [ ! 0 == $# ]; then # if options provided + while getopts ":d:h" opt; do + case "$opt" in + "d") + data_location=$OPTARG + ;; + "h") + Usage "\t$_filename dbbackup -d FILEPATH Backup mysql database" + exit 1 + ;; + ?) # Invalid option + Error "Invalid option: -${OPTARG}" + usage + exit 1 # Exit with erro + ;; + esac + done + fi + if [ -z "$data_location" ]; then + backup_mysql + else + backup_mysql $data_location + fi + + ;; + + "dbrestore") + if [ ! 0 == $# ]; then # if options provided + while getopts ":f:h" opt; do + case "$opt" in + "f") + data_location=$OPTARG + ;; + "h") + Usage "\t$_filename restore -f FILEPATH Restore mysql database" + exit 1 + ;; + ?) # Invalid option + Error "Invalid option: -${OPTARG}" + usage + exit 1 # Exit with erro + ;; + esac + done + fi + if [ -z "$data_location" ]; then + Error "Data filepath required." + exit 1 + else + restore_mysql $data_location + fi + + ;; + + "file") + exit_not_running + compose_file=$(create_docker_compose_file $(current_image_version) $(get_web_service_port) load_file $(get_database_data_dir) ) + file_path="$shell_dir/docker-compose.yml" + Info "File location:$file_path" + echo "$compose_file" >$file_path | exit 1 + ;; + + "logs") + exit_not_running + shift $((OPTIND - 1)) + op=$1 + shift + PROJECT_NAME=$(get_project_name) + docker logs -f --tail=200 $(docker ps | grep $PROJECT_NAME | grep "dongtai-$op:" | awk '{print $1}') || exit 1 + ;; + "install") # install iast server + #unset OPTIND + # Check if the Docker service is turned on + check_docker() { + Info "check docker servie status." + docker ps 1>/dev/null 2>/dev/null + + if [ $? -ne 0 ]; then + Error "docker service is down. please start docker service and rerun." + exit + else + Info "docker service is up." + fi + } + + check_docker_compose() { + Info "check docker-compose servie status." + DOCKER_COMPOSE_V2=`docker-compose --version | grep -E 'version.*v2' | awk '{print $4}'` || true + DOCKER_COMPOSE_V3="Please download docker-compose version 2.1.0 or above: https://github.com/docker/compose" + if ! [ -x "$(command -v docker-compose)" ]; then + Error 'docker-compose not installed.' + exit 1 + else + if [ ! -n "$DOCKER_COMPOSE_V2" ]; then + Error "$DOCKER_COMPOSE_V3" + exit 1 + else + if [[ $(first_gt_second ${DOCKER_COMPOSE_V2:1} 2.1.0) == true ]]; then + Info "docker-compose service is $DOCKER_COMPOSE_V2." + else + Error "$DOCKER_COMPOSE_V3" + exit 1 + fi + fi + fi + } + + # Info "mysql persistence" + # mkdir data + start_docker_compose() { + Info "Starting docker compose ..." + + if [ ! -z "$1" ]; then + docker_compose_file=$(create_docker_compose_file $CHANGE_THIS_VERSION $WEB_SERVICE_PORT "install" $1) + else + docker_compose_file=$(create_docker_compose_file $CHANGE_THIS_VERSION $WEB_SERVICE_PORT "install") + fi + + if [ "$docker_compose_file" = "" ]; then + Error "Create docker-compose.yaml error" + exit 1 + fi + + docker-compose -p $PROJECT_NAME -f <(echo "$docker_compose_file") up -d --remove-orphans + } + + # Specify the port of Web, OpenAPI service and check whether it is available + check_port() { + Info "check port status" + + is_port_inuse=false + + read -p "[+] please input web service port, default [80]:" WEB_SERVICE_PORT + if [ -z "$WEB_SERVICE_PORT" ]; then + WEB_SERVICE_PORT=80 + fi + + if ! [ -x "$(command -v lsof)" ]; then + if [ $(netstat -tunlp | grep $WEB_SERVICE_PORT | grep "LISTEN" | wc -l || true) -gt 0 ]; then + is_port_inuse=true + fi + else + if [ $(lsof -i:$WEB_SERVICE_PORT | grep "LISTEN" | wc -l || true) -gt 0 ]; then + is_port_inuse=true + fi + fi + + if [ $is_port_inuse == true ]; then + Error "port $WEB_SERVICE_PORT is already in use. please change default port." + exit + else + Info "port $WEB_SERVICE_PORT is ok." + fi + + } + + DATA_DIR="$shell_dir/data" + if [ ! 0 == $# ]; then # if options provided + while getopts ":d:m:s:v:r:h:" optname; do + case "$optname" in + "d") + DATA_DIR=$OPTARG + ;; + "s") + SKIP=$OPTARG + array=(${SKIP//,/ }) + for var in "${array[@]}"; do + if [ "$var" == "mysql" ]; then + SKIP_MYSQL=true + elif [ "$var" == "redis" ]; then + SKIP_REDIS=true + fi + done + ;; + "v") + CHANGE_THIS_VERSION=$OPTARG + ;; + "r") + REGISRTY=$OPTARG + ;; + "h") + usage + exit 1 + ;; + ":") + Error "option -${OPTARG} requires an argument" + usage + exit 1 + ;; + "?") + Error "Invalid option: -${OPTARG}" + usage + exit 1 # Exit with erro + ;; + esac + done + fi + + if [ -z "$CHANGE_THIS_VERSION" ] || [ "$CHANGE_THIS_VERSION" == "" ]; then + # CHANGE_THIS_VERSION=$(get_latest_release_version) + Info $CHANGE_THIS_VERSION + DONGTAI_WEB_VER=$(get_latest_image_tag_from_dockerhub dongtai/dongtai-web) + DONGTAI_SERVER_VER=$(get_latest_image_tag_from_dockerhub dongtai/dongtai-server) + DONGTAI_REDIS_VER=$(get_latest_image_tag_from_dockerhub dongtai/dongtai-redis) + DONGTAI_MYSQL_VER=$(get_latest_image_tag_from_dockerhub dongtai/dongtai-mysql) + CHANGE_THIS_VERSION=$DONGTAI_MYSQL_VER + else + DONGTAI_WEB_VER=$CHANGE_THIS_VERSION + DONGTAI_SERVER_VER=$CHANGE_THIS_VERSION + DONGTAI_REDIS_VER=$CHANGE_THIS_VERSION + DONGTAI_MYSQL_VER=$CHANGE_THIS_VERSION + fi + + + + check_network + check_docker + check_docker_compose + check_port + #start_docker_compose "$DATA_DIR" + start_docker_compose + + if [ $? -ne 0 ]; then + Error "Installation failed,Something wrong!" + exit + else + Notice "Installation success!" + fi + + ;; + "remove" | "rm") + exit_not_running + if [ ! 0 == $# ]; then # if options provided + while getopts ":d:h:" opt; do + case ${1} in + "-d") + DESTORY_DATA=true + ;; + "-h") + Usage "\t$_filename rm [-d] Uninstall iast server" + exit 1 + ;; + ?) # Invalid option + Error "Invalid option: -${OPTARG}" + usage + exit 1 # Exit with erro + ;; + esac + done + fi + + docker_compose_file=$(create_docker_compose_file $(current_image_version) $(get_web_service_port) "rm") + + PROJECT_NAME=$(get_project_name) + if [ "$DESTORY_DATA" == true ]; then + data_dir=$(get_database_data_dir) + Notice "Data volume will be deleted." + docker-compose -p $PROJECT_NAME -f <(echo "$docker_compose_file") down -v + #docker volume rm dongtai_iast-vol + if [ "$MOUNT_TYPE" == "bind" ]; then + Notice "Data will be deleted in $data_dir" + rm -rf $data_dir + fi + else + docker-compose -p $PROJECT_NAME -f <(echo "$docker_compose_file") down + fi + ;; + + "upgrade") + exit_not_running + TO_VERSION="" + FROM_VERSION="" + SHA_FILE=updaterecord.txt + WEB_CONTAINER_ID=$(docker ps | grep $PROJECT_NAME | grep 'dongtai-web:' | awk '{print $1}') + + function check_update_record_file() { + if [ ! -f "$SHA_FILE" ]; then + Error "$SHA_FILE does not exists!" + exit + fi + } + + function check_schema_hash() { + Info "Check database schema ..." + + # cat updaterecord.txt | awk "{ if($1==$FROM_VERSION) print $4}" + + FROM_DB_HASH=$(cat updaterecord.txt | awk -v FROM_VERSION=$FROM_VERSION '{ if($1==FROM_VERSION) print $4}') + CURRENT_DATABASE_HASH=$(current_hash) + + Info "FROM_DB_HASH:$FROM_DB_HASH" + Info "CURRENT_DATABASE_HASH:$CURRENT_DATABASE_HASH" + + if [ ! $CURRENT_DATABASE_HASH == $FROM_DB_HASH ]; then + Error "Your current database hash value not equals to the verison $FROM_VERSION, please check." + exit + fi + + Info "Database schema correct ..." + } + + function execute_update() { + # extract sql name and reverse list + + SQL_NAMES=$(cat updaterecord.txt | awk "/${TO_VERSION//./\\.}/,/${FROM_VERSION//./\\.}/ {print \$2}" | grep -vF "$FROM_VERSION" | awk "{array[NR]=\$0} END { for(i=NR;i>0;i--) {print array[i];} }") + mysql_container_id=$(get_mysql_container_id) + server_container_id=$(get_server_container_id) + # check whether mysql is ready + while :; do + Info "start to check mysql status ..." + tables=$(docker exec -i $mysql_container_id /bin/bash -c "mysql -uroot -p'dongtai-iast' --execute \"SHOW DATABASES;\"" || true) + if [[ ${tables["mysql"]} ]]; then + Notice "mysql instance ready!" + break + fi + sleep 2 + done + + # change sql store directory from /docker-entrypoint-initdb.d to /sql + # from 1.3.0 + sql_dir="/docker-entrypoint-initdb.d" + if [[ $(first_gt_second $TO_VERSION 1.2.0) == true ]]; then + sql_dir="/sql" + fi + + Info "Sql store directory: $sql_dir" + + # sql downloaded,start to execute sql + Info $SQL_NAMES + for SQL in $SQL_NAMES; do + Info "Start to load sql:[$SQL]" + docker exec -i $mysql_container_id /bin/bash -c "mysql -uroot -p'dongtai-iast' dongtai_webapi < $sql_dir/$SQL" + done + if [ $TO_VERSION = 1.8.0 ]; then + docker exec -i $mysql_container_id /bin/bash -c "mysql -uroot -p'dongtai-iast' dongtai_webapi < $sql_dir/utf8mb4_general_ci.sql" + Info "Sql character ok!" + docker exec -i $server_container_id /bin/bash -c "python manage.py update" + else + Info "mysql character set check completed OK!!" + fi + } + + function check_after_execute() { + Info "Check result..." + TO_DB_HASH=$(cat updaterecord.txt | awk -v TO_VERSION=$TO_VERSION '{ if($1==TO_VERSION) print $4}') + CURRENT_DATABASE_HASH=$(current_hash) + + Info "TO_DB_HASH:$TO_DB_HASH" + Info "CURRENT_DATABASE_HASH:$CURRENT_DATABASE_HASH" + + if [ ! $CURRENT_DATABASE_HASH == $TO_DB_HASH ]; then + Error "Your current database hash value not equals to the target:$TO_DB_HASH." + Error "Did you change the database schema manually?" + exit + fi + + Info "Current database schema correct ..." + Info "Upgrade Success ..." + } + + function upgrade_docker_image() { + WEB_PORT=$(docker inspect --format='{{range $p, $conf := .NetworkSettings.Ports}} {{(index $conf 0).HostPort}} {{end}}' $WEB_CONTAINER_ID) + WEB_PORT=$(trim $WEB_PORT) + + # 创建 DOCKER_COMPOSE_FILE + docker_compose_file=$(create_docker_compose_file $TO_VERSION $(get_web_service_port) $(get_database_data_dir) "upgrade") + PROJECT_NAME=$(get_project_name) + Info "Start to pull new images with tag $TO_VERSION" + docker-compose -p $PROJECT_NAME -f <(echo "$docker_compose_file") pull + + Info "Start new containers with tag $TO_VERSION " + docker-compose -p $PROJECT_NAME -f <(echo "$docker_compose_file") up -d --remove-orphans + + } + + function summary() { + Notice "-----Upgrade summary start-----" + Info "Ugrade from $FROM_VERSION to $TO_VERSION. \n" + Info "Backup file : $backup_filename" + Info "Executed sql as follow: " + for SQL in $SQL_NAMES; do + Info $SQL + done + Info "Ugrade workdir is $UPGRADE_DIR, this can be delete after upgraded. " + Notice "-----Upgrade summary end-----" + } + + if [ ! 0 == $# ]; then # if options provided + while getopts ":t:h" optname; do + case "$optname" in + "t") + TO_VERSION=$OPTARG + ;; + "h") + Usage "\t$_filename upgrade -f 1.0.5 -t 1.1.2 Upgrade iast server" + exit 1 + ;; + ":") ;; + + "?") + Error "Unknown option $OPTARG" + exit 1 + ;; + esac + done + fi + + FROM_VERSION=$(current_image_version) + UPGRADE_TO_VERSION=$TO_VERSION + if [ -z "$TO_VERSION" ]; then + TO_VERSION=$(get_latest_release_version) + UPGRADE_TO_VERSION="latest" + DONGTAI_WEB_VER=$(get_latest_image_tag_from_dockerhub dongtai/dongtai-web) + DONGTAI_SERVER_VER=$(get_latest_image_tag_from_dockerhub dongtai/dongtai-server) + DONGTAI_REDIS_VER=$(get_latest_image_tag_from_dockerhub dongtai/dongtai-redis) + DONGTAI_MYSQL_VER=$(get_latest_image_tag_from_dockerhub dongtai/dongtai-mysql) + else + DONGTAI_WEB_VER=$TO_VERSION + DONGTAI_SERVER_VER=$TO_VERSION + DONGTAI_REDIS_VER=$TO_VERSION + DONGTAI_MYSQL_VER=$TO_VERSION + fi + + Info "Upgrade: $FROM_VERSION -> $UPGRADE_TO_VERSION" + + f_gt_t=$(first_gt_second $FROM_VERSION $TO_VERSION) + + if [[ $f_gt_t == true ]]; then + Error "Your current verison is $FROM_VERSION, you can upgrade to later version. $TO_VERSION is invalid." + exit 1 + fi + + check_network + backup_mysql + check_update_record_file + #check_schema_hash + upgrade_docker_image + execute_update + #check_after_execute + summary + + ;; + "index") + ES_HOST="" + ES_ALIAS="dongtai-iast" + if [ ! 0 == $# ]; then # if options provided + while getopts ":t" optname; do + case "$optname" in + "t") + read -p "请输入您要创建的 elasticsearch 索引前缀名: 如 dongtai-iast :" ES_ALIA + if [ -z "$ES_ALIA" ]; then + Info "将使用默认前缀名: $ES_ALIAS" + else + ES_ALIAS=$(echo $ES_ALIA | tr "[A-Z]" "[a-z]" | sed 's/[^ -z]/x/g') + Info "索引前缀名已经转换为小写 请记录 $ES_ALIAS" + fi + ;; + ":") ;; + "?") + Error "Unknown option $OPTARG" + exit 1 + ;; + esac + done + fi + function dongtai_es() { + read -p "请输入您的 elasticsearch 地址: 如 http://127.0.0.1:9200 :" ES_IP + read -p "请输入您的 elasticsearch 用户: 如 elastic (无密码直接回车):" ES_USERNAME + read -p "请输入您的 elasticsearch 密码: 如 123456 (无密码直接回车):" ES_PASSWORD + if [ -z "$ES_USERNAME" ]; then + ES_HOST=$ES_IP + else + ES_APP=$(echo $ES_IP | sed "s#//# #g") + ARR=($ES_APP) + ES_HOST=${ARR[0]}//${ES_USERNAME}:${ES_PASSWORD}@${ARR[1]} + fi + Info $ES_HOST + ES_CODE=`curl -I -m 10 -o /dev/null -s -w %{http_code} ${ES_HOST}` + if [ $ES_CODE -eq 200 ]; then + Info $ES_CODE + else + Info "Please check that your elasticsearch configuration is correct !!!" + exit 1 + fi + } + + function dongtai_index() { + Notice "-----index create-----" + Info "index created $ES_HOST !!!" + #ilm + curl -H 'content-Type:application/json' -XPUT ${ES_HOST}/_ilm/policy/${ES_ALIAS}-dongtai-ilm-policy -d ' + { + "policy": { + "phases": { + "hot": { + "min_age": "0ms", + "actions": { + "rollover": { + "max_age": "90d", + "max_size": "60gb" + }, + "set_priority": { + "priority": 999 + } + } + } + } + } + }' + #template-dongtai-v1-method-pool + curl -H 'content-Type:application/json' -XPUT ${ES_HOST}/_template/${ES_ALIAS}-dongtai-v1-method-pool-template?include_type_name -d ' + { + "order": 1, + "index_patterns": [ + "'${ES_ALIAS}'-dongtai-v1-method-pool-index-*" + ], + "settings": { + "index": { + "lifecycle": { + "name": "'${ES_ALIAS}'-dongtai-ilm-policy", + "rollover_alias": "'${ES_ALIAS}'-alias-dongtai-v1-method-pool" + }, + "highlight": { + "max_analyzed_offset": "10000000" + }, + "refresh_interval": "30s", + "analysis": { + "analyzer": { + "ngram_analyzer": { + "tokenizer": "ngram_tokenizer" + } + }, + "tokenizer": { + "ngram_tokenizer": { + "token_chars": [ + "letter", + "digit", + "symbol", + "punctuation" + ], + "min_gram": "1", + "type": "ngram", + "max_gram": "2" + } + } + }, + "number_of_shards": "4", + "number_of_replicas": "1" + } + }, + "mappings": { + "_doc": { + "dynamic": "true", + "_meta": {}, + "_source": { + "includes": [], + "excludes": [] + }, + "dynamic_date_formats": [ + "strict_date_optional_time", + "yyyy/MM/dd HH:mm:ss Z||yyyy/MM/dd Z" + ], + "dynamic_templates": [], + "date_detection": true, + "numeric_detection": false, + "properties": { + "@timestamp": { + "type": "date" + }, + "agent_id": { + "type": "integer" + }, + "bind_project_id": { + "type": "long" + }, + "clent_ip": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "context_path": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "create_time": { + "type": "date", + "ignore_malformed": false, + "format": "strict_date_optional_time||epoch_second" + }, + "http_method": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "http_protocol": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "http_scheme": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "id": { + "type": "integer" + }, + "language": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "method_pool": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "pool_sign": { + "type": "keyword" + }, + "project_name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "project_version_id": { + "type": "long" + }, + "req_data": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "req_header": { + "type": "text" + }, + "req_header_for_search": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "req_params": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "res_body": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "res_header": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "token": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "update_time": { + "type": "date", + "ignore_malformed": false, + "format": "strict_date_optional_time||epoch_second" + }, + "uri": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "uri_sha1": { + "type": "keyword" + }, + "url": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "user_id": { + "type": "long" + } + } + } + } + }' + + + #template-dongtai-v1-vulnerability + curl -H 'content-Type:application/json' -XPUT ${ES_HOST}/_template/${ES_ALIAS}-dongtai-v1-vulnerability-template?include_type_name -d ' + { + "order": 1, + "index_patterns": [ + "'${ES_ALIAS}'-dongtai-v1-vulnerability-index-*" + ], + "settings": { + "index": { + "lifecycle": { + "name": "'${ES_ALIAS}'-dongtai-ilm-policy", + "rollover_alias": "'${ES_ALIAS}'-alias-dongtai-v1-vulnerability" + }, + "highlight": { + "max_analyzed_offset": "10000000" + }, + "refresh_interval": "30s", + "analysis": { + "analyzer": { + "ngram_analyzer": { + "tokenizer": "ngram_tokenizer" + } + }, + "tokenizer": { + "ngram_tokenizer": { + "token_chars": [ + "letter", + "digit", + "symbol", + "punctuation" + ], + "min_gram": "1", + "type": "ngram", + "max_gram": "2" + } + } + }, + "number_of_shards": "4", + "number_of_replicas": "1" + } + }, + "mappings": { + "_doc": { + "dynamic": "true", + "_meta": {}, + "_source": { + "includes": [], + "excludes": [] + }, + "dynamic_date_formats": [ + "strict_date_optional_time", + "yyyy/MM/dd HH:mm:ss Z||yyyy/MM/dd Z" + ], + "dynamic_templates": [], + "date_detection": true, + "numeric_detection": false, + "properties": { + "@timestamp": { + "type": "date" + }, + "agent_id": { + "type": "integer" + }, + "bind_project_id": { + "type": "integer" + }, + "bottom_stack": { + "type": "text" + }, + "client_ip": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "context_path": { + "type": "text" + }, + "counts": { + "type": "integer" + }, + "first_time": { + "type": "date", + "ignore_malformed": false, + "format": "strict_date_optional_time||epoch_second" + }, + "full_stack": { + "type": "text" + }, + "hook_type_id": { + "type": "integer" + }, + "http_method": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "http_protocol": { + "type": "text" + }, + "http_scheme": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "id": { + "type": "integer" + }, + "is_del": { + "type": "integer" + }, + "language": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "latest_time": { + "type": "date", + "ignore_malformed": false, + "format": "strict_date_optional_time||epoch_second" + }, + "latest_time_desc": { + "type": "integer" + }, + "level_id": { + "type": "integer" + }, + "level_id_desc": { + "type": "integer" + }, + "method_pool_id": { + "type": "integer" + }, + "param_name": { + "type": "text" + }, + "project_name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "project_version_id": { + "type": "integer" + }, + "req_data": { + "type": "text" + }, + "req_header": { + "type": "text" + }, + "req_params": { + "type": "text" + }, + "res_body": { + "type": "text" + }, + "res_header": { + "type": "text" + }, + "search_keywords": { + "type": "text" + }, + "status": { + "type": "text" + }, + "status_id": { + "type": "integer" + }, + "strategy_id": { + "type": "integer" + }, + "taint_position": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "taint_value": { + "type": "text" + }, + "token": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "top_stack": { + "type": "text" + }, + "uri": { + "type": "text" + }, + "url": { + "type": "text" + }, + "user_id": { + "type": "integer" + }, + "vul_title": { + "type": "text" + } + } + } + } + }' + + #template-dongtai-v1-asset + curl -H 'content-Type:application/json' -XPUT ${ES_HOST}/_template/${ES_ALIAS}-dongtai-v1-asset-template?include_type_name -d ' + { + "order": 1, + "index_patterns": [ + "'${ES_ALIAS}'-dongtai-v1-asset-index-*" + ], + "settings": { + "index": { + "lifecycle": { + "name": "'${ES_ALIAS}'-dongtai-ilm-policy", + "rollover_alias": "'${ES_ALIAS}'-alias-dongtai-v1-asset" + }, + "highlight": { + "max_analyzed_offset": "10000000" + }, + "refresh_interval": "30s", + "analysis": { + "analyzer": { + "ngram_analyzer": { + "tokenizer": "ngram_tokenizer" + } + }, + "tokenizer": { + "ngram_tokenizer": { + "token_chars": [ + "letter", + "digit", + "symbol", + "punctuation" + ], + "min_gram": "1", + "type": "ngram", + "max_gram": "2" + } + } + }, + "number_of_shards": "4", + "number_of_replicas": "1" + } + }, + "mappings": { + "_doc": { + "dynamic": "true", + "_meta": {}, + "_source": { + "includes": [], + "excludes": [] + }, + "dynamic_date_formats": [ + "strict_date_optional_time", + "yyyy/MM/dd HH:mm:ss Z||yyyy/MM/dd Z" + ], + "dynamic_templates": [], + "date_detection": true, + "numeric_detection": false, + "properties": { + "@timestamp": { + "type": "date" + }, + "agent_id": { + "type": "long" + }, + "department_id": { + "type": "long" + }, + "dependency_level": { + "type": "long" + }, + "dt": { + "type": "date", + "ignore_malformed": false, + "format": "strict_date_optional_time||epoch_second" + }, + "id": { + "type": "long" + }, + "is_del": { + "type": "long" + }, + "language": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "last_version": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "level_id": { + "type": "long" + }, + "license": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "package_name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "package_path": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "parent_dependency_id": { + "type": "long" + }, + "project_id": { + "type": "long" + }, + "project_name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "project_version_id": { + "type": "long" + }, + "safe_version": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "signature_algorithm": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "signature_value": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "talent_id": { + "type": "long" + }, + "user_id": { + "type": "long" + }, + "version": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "vul_count": { + "type": "long" + }, + "vul_critical_count": { + "type": "long" + }, + "vul_high_count": { + "type": "long" + }, + "vul_info_count": { + "type": "long" + }, + "vul_low_count": { + "type": "long" + }, + "vul_medium_count": { + "type": "long" + } + } + } + } + }' + + #template-dongtai-v1-asset-vul + curl -H 'content-Type:application/json' -XPUT ${ES_HOST}/_template/${ES_ALIAS}-dongtai-v1-asset-vul-template?include_type_name -d ' + { + "order": 1, + "index_patterns": [ + "'${ES_ALIAS}'-dongtai-v1-asset-vul-index-*" + ], + "settings": { + "index": { + "lifecycle": { + "name": "'${ES_ALIAS}'-dongtai-ilm-policy", + "rollover_alias": "'${ES_ALIAS}'-alias-dongtai-v1-asset-vul" + }, + "highlight": { + "max_analyzed_offset": "10000000" + }, + "refresh_interval": "30s", + "analysis": { + "analyzer": { + "ngram_analyzer": { + "tokenizer": "ngram_tokenizer" + } + }, + "tokenizer": { + "ngram_tokenizer": { + "token_chars": [ + "letter", + "digit", + "symbol", + "punctuation" + ], + "min_gram": "1", + "type": "ngram", + "max_gram": "2" + } + } + }, + "number_of_shards": "4", + "number_of_replicas": "1" + } + }, + "mappings": { + "_doc": { + "dynamic": "true", + "_meta": {}, + "_source": { + "includes": [], + "excludes": [] + }, + "dynamic_date_formats": [ + "strict_date_optional_time", + "yyyy/MM/dd HH:mm:ss Z||yyyy/MM/dd Z" + ], + "dynamic_templates": [], + "date_detection": true, + "numeric_detection": false, + "properties": { + "@timestamp": { + "type": "date" + }, + "agent_id": { + "type": "long" + }, + "aql": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "asset_agent_id": { + "type": "integer" + }, + "asset_id": { + "type": "long" + }, + "asset_project_id": { + "type": "integer" + }, + "asset_project_version_id": { + "type": "integer" + }, + "asset_user_id": { + "type": "integer" + }, + "asset_vul_id": { + "type": "integer" + }, + "asset_vul_relation_id": { + "type": "integer" + }, + "asset_vul_relation_is_del": { + "type": "integer" + }, + "bind_project_id": { + "type": "long" + }, + "clent_ip": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "context_path": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "create_time": { + "type": "date", + "ignore_malformed": false, + "format": "strict_date_optional_time||epoch_second" + }, + "cve_code": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "cve_id": { + "type": "integer", + "ignore_malformed": false, + "coerce": true + }, + "have_article": { + "type": "integer" + }, + "have_poc": { + "type": "integer" + }, + "http_method": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "http_protocol": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "http_scheme": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "id": { + "type": "long" + }, + "language": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "level_id": { + "type": "integer" + }, + "license": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "license_level": { + "type": "integer" + }, + "method_pool": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "package_hash": { + "type": "keyword" + }, + "package_language": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "package_latest_version": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "package_name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "package_safe_version": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "package_version": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "pool_sign": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "project_id": { + "type": "long" + }, + "project_version_id": { + "type": "long" + }, + "req_data": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "req_header": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "req_header_for_search": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "req_params": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "res_body": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "res_header": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "search_title": { + "type": "text" + }, + "update_time": { + "type": "date", + "ignore_malformed": false, + "format": "strict_date_optional_time||epoch_second" + }, + "update_time_desc": { + "type": "integer", + "ignore_malformed": false, + "coerce": true + }, + "uri_sha1": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "url": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "user_id": { + "type": "long" + }, + "vul_cve_nums": { + "type": "text" + }, + "vul_detail": { + "type": "text" + }, + "vul_name": { + "type": "text" + }, + "vul_publish_time": { + "type": "date", + "ignore_malformed": false, + "format": "strict_date_optional_time||epoch_second" + }, + "vul_serial": { + "type": "text" + }, + "vul_update_time": { + "type": "date", + "ignore_malformed": false, + "format": "strict_date_optional_time||epoch_second" + } + } + } + } + }' + + + #template-dongtai-v1-asset-vul + curl -H 'content-Type:application/json' -XPUT ${ES_HOST}/_template/${ES_ALIAS}-dongtai-v1-asset-aggr-template?include_type_name -d ' + { + "order": 1, + "index_patterns": [ + "'${ES_ALIAS}'-dongtai-v1-asset-aggr-index-*" + ], + "settings": { + "index": { + "lifecycle": { + "name": "'${ES_ALIAS}'-dongtai-ilm-policy", + "rollover_alias": "'${ES_ALIAS}'-alias-dongtai-v1-asset-aggr" + }, + "highlight": { + "max_analyzed_offset": "10000000" + }, + "refresh_interval": "30s", + "analysis": { + "analyzer": { + "ngram_analyzer": { + "tokenizer": "ngram_tokenizer" + } + }, + "tokenizer": { + "ngram_tokenizer": { + "token_chars": [ + "letter", + "digit", + "symbol", + "punctuation" + ], + "min_gram": "1", + "type": "ngram", + "max_gram": "2" + } + } + }, + "number_of_shards": "4", + "number_of_replicas": "1" + } + }, + "mappings": { + "_doc": { + "dynamic": "true", + "_meta": {}, + "_source": { + "includes": [], + "excludes": [] + }, + "dynamic_date_formats": [ + "strict_date_optional_time", + "yyyy/MM/dd HH:mm:ss Z||yyyy/MM/dd Z" + ], + "dynamic_templates": [], + "date_detection": true, + "numeric_detection": false, + "properties": { + "@timestamp": { + "type": "date" + }, + "id": { + "type": "integer" + }, + "is_del": { + "type": "integer" + }, + "language": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "last_version": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "level_id": { + "type": "integer" + }, + "license": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "package_name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "project_count": { + "type": "integer" + }, + "safe_version": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "signature_value": { + "type": "keyword" + }, + "version": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "vul_count": { + "type": "integer" + }, + "vul_critical_count": { + "type": "integer" + }, + "vul_high_count": { + "type": "integer" + }, + "vul_info_count": { + "type": "integer" + }, + "vul_low_count": { + "type": "integer" + }, + "vul_medium_count": { + "type": "integer" + } + } + } + } + }' + + curl -H 'content-Type:application/json' -XPUT ${ES_HOST}/${ES_ALIAS}-dongtai-v1-method-pool-index-000001 -d ' + { + "aliases": { + "'${ES_ALIAS}'-alias-dongtai-v1-method-pool": { + "is_write_index": true + } + } + }' + + curl -H 'content-Type:application/json' -XPUT ${ES_HOST}/${ES_ALIAS}-dongtai-v1-vulnerability-index-000001 -d ' + { + "aliases": { + "'${ES_ALIAS}'-alias-dongtai-v1-vulnerability": { + "is_write_index": true + } + } + }' + + curl -H 'content-Type:application/json' -XPUT ${ES_HOST}/${ES_ALIAS}-dongtai-v1-asset-index-000001 -d ' + { + "aliases": { + "'${ES_ALIAS}'-alias-dongtai-v1-asset": { + "is_write_index": true + } + } + }' + + curl -H 'content-Type:application/json' -XPUT ${ES_HOST}/${ES_ALIAS}-dongtai-v1-asset-aggr-index-000001 -d ' + { + "aliases": { + "'${ES_ALIAS}'-alias-dongtai-v1-asset-aggr": { + "is_write_index": true + } + } + }' + + curl -H 'content-Type:application/json' -XPUT ${ES_HOST}/${ES_ALIAS}-dongtai-v1-asset-vul-index-000001 -d ' + { + "aliases": { + "'${ES_ALIAS}'-alias-dongtai-v1-asset-vul": { + "is_write_index": true + } + } + }' + Info "index created Ok !!!" + Notice "-----index created Ok !!!-----" + } + + dongtai_es + dongtai_index + + ;; + + *) # Invalid subcommand + if [ ! -z $subcommand ]; then # Don't show if no subcommand provided + echo "Invalid subcommand: $subcommand" + fi + usage + exit 1 # Exit with error + ;; + esac +else # else if no options provided throw error + usage + exit 1 +fi \ No newline at end of file diff --git a/deploy/docker-compose/nginx.conf b/deploy/docker-compose/nginx.conf new file mode 100644 index 000000000..3190e060d --- /dev/null +++ b/deploy/docker-compose/nginx.conf @@ -0,0 +1,50 @@ +#user nobody; + worker_processes auto; + events { + worker_connections 65535; + } + http { + include mime.types; + default_type application/octet-stream; + sendfile on; + keepalive_timeout 65; + + #gzip on; + gzip on; + gzip_min_length 5k; + gzip_buffers 4 16k; + #gzip_http_version 1.0; + gzip_comp_level 3; + gzip_types text/plain application/javascript application/x-javascript text/css application/xml text/javascript application/x-httpd-php image/jpeg image/gif image/png; + gzip_vary on; + + server { + listen 80; + server_name 0.0.0.0; + client_max_body_size 100M; + location / { + root /usr/share/nginx/html; #站点目录 + index index.html index.htm; #添加属性。 + try_files $uri $uri/ /index.html; + } + location /api/ { + proxy_read_timeout 60; + proxy_pass http://dongtai-server:8000/api/; + } + location /upload/ { + proxy_pass http://dongtai-server:8000/upload/; + } + location /openapi/ { + proxy_set_header X-real-ip $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header User-Agent $http_user_agent; + proxy_set_header X-Host $http_x_forwarded_host; + proxy_read_timeout 60; + proxy_pass http://dongtai-server:8000/; + } + location = /50x.html { + root /usr/share/nginx/html; + } + } + } + diff --git a/deploy/docker-compose/nginx.conf-1.8.0 b/deploy/docker-compose/nginx.conf-1.8.0 new file mode 100644 index 000000000..4177cace6 --- /dev/null +++ b/deploy/docker-compose/nginx.conf-1.8.0 @@ -0,0 +1,53 @@ +#user nobody; + worker_processes auto; + events { + worker_connections 65535; + } + http { + include mime.types; + default_type application/octet-stream; + sendfile on; + keepalive_timeout 65; + + #gzip on; + gzip on; + gzip_min_length 5k; + gzip_buffers 4 16k; + #gzip_http_version 1.0; + gzip_comp_level 3; + gzip_types text/plain application/javascript application/x-javascript text/css application/xml text/javascript application/x-httpd-php image/jpeg image/gif image/png; + gzip_vary on; + + server { + listen 80; + server_name 0.0.0.0; + client_max_body_size 100M; + location / { + root /usr/share/nginx/html; #站点目录 + index index.html index.htm; #添加属性。 + try_files $uri $uri/ /index.html; + } + location /api/ { + proxy_read_timeout 60; + proxy_pass http://dongtai-server:8000/api/; + } + location /upload/ { + proxy_pass http://dongtai-server:8000/upload/; + } + location /openapi/ { + proxy_set_header X-real-ip $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header User-Agent $http_user_agent; + proxy_set_header X-Host $http_x_forwarded_host; + proxy_read_timeout 60; + proxy_pass http://dongtai-server:8000/; + } + location /log/ { + proxy_pass http://dongtai-logstash:8082/; + } + location = /50x.html { + root /usr/share/nginx/html; + } + } + } + diff --git a/deploy/docker-compose/nginx.conf-legacy b/deploy/docker-compose/nginx.conf-legacy new file mode 100644 index 000000000..c7c83d14f --- /dev/null +++ b/deploy/docker-compose/nginx.conf-legacy @@ -0,0 +1,51 @@ +#user nobody; + worker_processes 1; + events { + worker_connections 1024; + } + http { + include mime.types; + default_type application/octet-stream; + sendfile on; + keepalive_timeout 65; + + #gzip on; + gzip on; + gzip_min_length 5k; + gzip_buffers 4 16k; + #gzip_http_version 1.0; + gzip_comp_level 3; + gzip_types text/plain application/javascript application/x-javascript text/css application/xml text/javascript application/x-httpd-php image/jpeg image/gif image/png; + gzip_vary on; + + server { + listen 80; + server_name 0.0.0.0; + client_max_body_size 100M; + location / { + root /usr/share/nginx/html; #站点目录 + index index.html index.htm; #添加属性。 + try_files $uri $uri/ /index.html; + } + + location /api/ { + proxy_read_timeout 60; + proxy_pass http://dongtai-webapi:8000/api/; + } + + location /upload/ { + proxy_pass http://dongtai-webapi:8000/upload/; + } + location /openapi/ { + proxy_set_header X-real-ip $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header User-Agent $http_user_agent; + proxy_set_header X-Host $http_x_forwarded_host; + proxy_read_timeout 60; + proxy_pass http://dongtai-openapi:8000/; + } + location = /50x.html { + root /usr/share/nginx/html; + } + } + } \ No newline at end of file diff --git a/deploy/docker-compose/test/test_dtctl.bats b/deploy/docker-compose/test/test_dtctl.bats new file mode 100644 index 000000000..6848083c5 --- /dev/null +++ b/deploy/docker-compose/test/test_dtctl.bats @@ -0,0 +1,51 @@ +#!/usr/bin/env bats + +setup_file() { + # Initializing install 1.1.2 with schema hash 2a6c08ac348ac0f7f336588587eb05d5397ec84a + echo "Start to install server..." >&3 + run ./dtctl install -v 1.3.1 -r 0 <<< 8088 +} + +teardown_file() { + echo "Start to clean test playground..." >&3 + run ./dtctl rm -d +} + +@test "Print image version by: ./dtctl version" { + run ./dtctl version + [[ "${lines[1]}" =~ "1.3.1" ]] +} + +@test "Print usage by: ./dtctl -h" { + run ./dtctl -h + [[ "${lines[1]}" == "[Info] Usage:" ]] + } + +@test "Export docker-compose.yaml by: ./dtctl file" { + run ./dtctl file + [ -f "docker-compose.yml" ] +} + +@test "Export db schema by: ./dtctl dbschema" { + run ./dtctl dbschema + [ -f "export_db_schema.txt" ] +} + +@test "Export dbhash by: ./dtctl dbhash" { + run ./dtctl dbhash + [[ "${lines[1]}" =~ "current db hash" ]] +} + +# @test "Ugrade server by: ./dtctl upgrade -t 1.3.1" { +# run ./dtctl dbhash +# if [[ ! "${output}" =~ "150e0e2fa028ced9ae9f6246dce6a765041f9fb6" ]]; then +# skip "Current db is not 1.1.2" +# fi + +# ./dtctl upgrade -t 1.3.1 +# if [ ! -f "~/dongtai_iast_upgrade/dongtai_iast*" ]; then +# echo "mysql backup error!" +# fi +# run ./dtctl dbhash +# [[ "${lines[0]}" =~ "4b0735025ce3bf6b4294d76e82851493a64a940a" ]] +# } \ No newline at end of file diff --git a/deploy/docker-compose/updaterecord.txt b/deploy/docker-compose/updaterecord.txt new file mode 100644 index 000000000..0e95ace43 --- /dev/null +++ b/deploy/docker-compose/updaterecord.txt @@ -0,0 +1,25 @@ + +1.8.3 update-20220805-release-1.8.3-2.sql +1.8.3 update-20220805-release-1.8.3-1.sql +1.8.2 update-20220607-release-1.8.2-2.sql +1.8.2 update-20220607-release-1.8.2-1.sql +1.8.0 update-20220607-release-1.8.0-2.sql +1.8.0 update-20220607-release-1.8.0-1.sql +1.7.0 update-20220515-release-1.7.0-2.sql +1.7.0 update-20220515-release-1.7.0-1.sql +1.7.0 update-20220515-release-1.7.0-0.sql +1.6.0 update-20220407-release-1.6.0.sql +1.5.0 update-20220407-release-1.5.0.sql +1.4.0 update-20220316-release-1.4.0.sql 380a01f68a1472ab8354134fb27959800b0a2b5d 2877b580ea1c79045c813d1541e4040412aa0ac0 +1.3.1 update-20220218-release-1.3.1.sql 5444b3278243346d3f35290a3007a7e74d2dc96b 150e0e2fa028ced9ae9f6246dce6a765041f9fb6 +1.3.0 update-20220111-release-1.3.0.sql 1eb19f080762bc377900f4735a56194f74cfeb6f 765399be2bef3232ef47f5e3e3e6839086a1dce3 +1.2.0 update-20211230-release-1.2.0.sql d0753caf7977201b0dc52617242f20e75cf1a03d f5729758572aa8316ca8c2b2dd9c7c45ef074b21 +1.1.4 update-20211218-release-1.1.4.sql 99906d11141df04ffc62a45539a7af2d337ffa80 3abd051bcd32ad4b1903370d7fe945ffd674a3c2 +1.1.3 update-20211203-release-1.1.3.sql 320c79cb7f4f5a86a340921a58b237b630239963 4b0735025ce3bf6b4294d76e82851493a64a940a +1.1.2 update-20211123-release-1.1.2.sql 2a11c9391652253a99fa278f9dcc4cd3439f107a 2a6c08ac348ac0f7f336588587eb05d5397ec84a +1.1.1 update-20211120-release-1.1.1.sql ccb6484a727ab6e8b3b8f14ce637386df8b35536 2a6c08ac348ac0f7f336588587eb05d5397ec84a +1.1.0 update-20211105-release-1.1.0.sql 21481b1750d7ea216204134eb3a3442f01a66ed4 2555d3c7a3167af3a72acec97d7eb72a4fcc5021 +1.0.6 update-20211022-release-1.0.6.sql 4d6a305cec6e45bf7b13b8c1b0d4cb9c60efda89 a678f256323f98503c155bad36a2c36449c8ceae +1.0.5 update-20211009-release-1.0.5.sql 7e24a3f9a6a4d9d796f15c25572811a9bcf94e3b 20c4822921235f53a101f9a2a453dda84fac45ab +1.0.4 update-20210918-release-1.0.4.sql a9e77de344e05c34da4ac2a7f6e3d0631ce03a2c d20be8145757f8d33f2821b371e40fdfbe49823b +1.0.3 update-20210831-release-1.0.3.sql f54bc03d216fcc0e3f7e85b3bee5774b06ab7a10 d20be8145757f8d33f2821b371e40fdfbe49823b diff --git a/deploy/docker/entrypoint.sh b/deploy/docker/entrypoint.sh new file mode 100755 index 000000000..149369b47 --- /dev/null +++ b/deploy/docker/entrypoint.sh @@ -0,0 +1,24 @@ +#!/bin/bash +echo '启动uwsgi服务' +python manage.py compilemessages +sleep 2 +python /opt/dongtai/deploy/docker/version_update.py || true +echo $1 + +if [ "$1" = "worker" ]; then + celery -A dongtai_conf worker -l info $DONGTAI_CONCURRENCY -E --pidfile= +elif [ "$1" = "worker-beat" ]; then + celery -A dongtai_conf worker -l info -Q dongtai-periodic-task $DONGTAI_CONCURRENCY -E --pidfile= +elif [ "$1" = "worker-high-freq" ]; then + celery -A dongtai_conf worker -l info -Q dongtai-method-pool-scan,dongtai-replay-vul-scan $DONGTAI_CONCURRENCY -E --pidfile= +elif [ "$1" = "worker-es" ]; then + celery -A dongtai_conf worker -l info -Q dongtai-es-save-task $DONGTAI_CONCURRENCY -E --pidfile= +elif [ "$1" = "worker-sca" ]; then + celery -A dongtai_conf worker -l info -Q dongtai-sca-task $DONGTAI_CONCURRENCY -E --pidfile= +elif [ "$1" = "worker-other" ]; then + celery -A dongtai_conf worker -l info -X dongtai-periodic-task,dongtai-method-pool-scan,dongtai-replay-vul-scan,dongtai-sca-task $DONGTAI_CONCURRENCY -E --pidfile= +elif [ "$1" = "beat" ]; then + celery -A dongtai_conf beat -l info $DONGTAI_CONCURRENCY --pidfile= --scheduler django_celery_beat.schedulers:DatabaseScheduler +else + /usr/local/bin/uwsgi --ini /opt/dongtai/dongtai_conf/conf/uwsgi.ini $DONGTAI_CONCURRENCY +fi diff --git a/deploy/docker/version.sql b/deploy/docker/version.sql new file mode 100644 index 000000000..67e2c2f10 --- /dev/null +++ b/deploy/docker/version.sql @@ -0,0 +1,2 @@ +CREATE TABLE IF NOT EXISTS `project_version_control` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '自增主键', `version` varchar(63) DEFAULT NULL COMMENT '版本号', `component_name` varchar(255) DEFAULT NULL COMMENT 'sql名', `component_version_hash` varchar(255) DEFAULT NULL COMMENT 'sql哈希值', `additional` text COMMENT '额外注释', `update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), UNIQUE KEY `project_version_control_UN` (`component_name`) ) ENGINE = InnoDB CHARSET = utf8mb4; +DELETE FROM project_version_control WHERE 1=1; diff --git a/deploy/docker/version.sql.example b/deploy/docker/version.sql.example new file mode 100644 index 000000000..c4cd6b372 --- /dev/null +++ b/deploy/docker/version.sql.example @@ -0,0 +1,3 @@ +CREATE TABLE `project_version_control` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '自增主键', `version` varchar(63) DEFAULT NULL COMMENT '版本号', `component_name` varchar(255) DEFAULT NULL COMMENT 'sql名', `component_version_hash` varchar(255) DEFAULT NULL COMMENT 'sql哈希值', `additional` text COMMENT '额外注释', `update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), UNIQUE KEY `project_version_control_UN` (`component_name`) ) ENGINE = InnoDB AUTO_INCREMENT = 19 CHARSET = utf8mb4; +REPLACE INTO project_version_control (version, component_name, component_version_hash) VALUES('1.3.0', 'DongTai-webapi', '12313223121331232132'); +REPLACE INTO project_version_control (version, component_name, component_version_hash) VALUES('1.3.0', 'DongTai', '12313223121331232132'); diff --git a/deploy/docker/version_update.py b/deploy/docker/version_update.py new file mode 100644 index 000000000..1d5a6572e --- /dev/null +++ b/deploy/docker/version_update.py @@ -0,0 +1,36 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : version_update +# @created : 星期四 1月 20, 2022 15:42:27 CST +# +# @description : +###################################################################### + + +from configparser import ConfigParser +import os +import MySQLdb +import sys +BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + +config = ConfigParser() +status = config.read(os.path.join(BASE_DIR, '../dongtai_conf/conf/config.ini')) +if len(status) == 0: + print("config file not exist. stop running") + sys.exit(0) +DBCONFIG = { + 'user': config.get("mysql", 'user'), + 'db': config.get("mysql", 'name'), + 'passwd': config.get("mysql", 'password'), + 'host': config.get("mysql", 'host'), + 'port': int(config.get("mysql", 'port')), +} +db = MySQLdb.connect(**DBCONFIG, use_unicode=True, charset="utf8mb4") +cursor = db.cursor() +for line in open(os.path.join(BASE_DIR, 'docker/version.sql'), + encoding='utf-8'): + if not line.strip(): + continue + cursor.execute(line) +cursor.close() +db.commit() diff --git a/deploy/kubernetes/README.MD b/deploy/kubernetes/README.MD new file mode 100644 index 000000000..446dbdf83 --- /dev/null +++ b/deploy/kubernetes/README.MD @@ -0,0 +1,144 @@ +# Setup in Kubnernetes + +[中文版本(Chinese version)](README.ZH-CN.MD) + +## Prepare in advance + +- Kubernetes version: 1.9+ +- Kubectl has been installed on the client +- You can use `kubectl auth can-i` if you have authorization for the following operations: + - create secrets + - create deployments + - create configmaps + - create namespaces + - create StatefulSet + - create Service + +## Deployment + +### Setup with shell + +Usage: +``` +./install.sh -m NodePort -s mysql -n dongtai-iast +``` + +m: access mode(mode), optional: `NodePort` `LoadBalancer`, default: NodePort + +s: skipped resources(skip), optional: `mysql` `redis` `mysql,redis`, default: don't skip + +n: specify the namespace, default: `dongtai-iast` + +#### Custom configuration + +If you want to modify the configuration of mysql and Redis, manually modify the configuration `manifest/4.deploy-iast-server.yml` The top part of `[mysql]` and `redis`. + +> Note: Assuming that `mysql` and `redis` can be configured for production, this deployment scheme can be used for production deployment. + +#### Access + +##### NodePort + +##### Obtain an available Node IP address + +```shell script +kubectl get nodes -o wide | awk {'print $1" " $2 " " $7'} | column -t +``` + +##### Obtain an available NodePort + +```shell script +kubectl get svc dongtai-web-pub-svc -n dongtai-iast -o=jsonpath='{.spec.ports[0].nodePort}' +``` + +##### Access Link: + +```shell script +http://${NodeIP}:${PORT} +``` + +##### LoadBalancer + +##### Obtain the available LoadBalancer IP address or DNS + +```shell script +kubectl get svc dongtai-web-pub-svc dongtai-engine-pub-svc -n dongtai-iast +``` + +#### Uninstall + +```shell script +kubectl delete namespace ${YourNamespace} +``` + +### Setup with helm + +Before install, you have to make sure the helm is available. Follow [this guide](https://helm.sh/docs/intro/install/) to install helm. + +1. Add and update helm chart repo for `Dongtai Iast` + + ``` + helm repo add dongtai https://charts.dongtai.io/iast + helm repo update + ``` + +2. Install + + ``` + helm install --create-namespace -n dongtai dongtai-iast dongtai/dongtai-iast + ``` + + This command will deploy dongtai servers in dongtai namespace and expose service with `ClusterIP`. + +#### Custom configuration + +If you want to modify the configuration of mysql and Redis,you can overwrite the values by your own file,Suppose you have a file in `/tmp/my-values.yml` with values as follow: + +``` yaml +mysql: + host: my-dongtai-mysql + port: 3306 + name: my-dongtai_webapi + user: root + password: my-dongtai-iast + +redis: + host: my-dongtai-redis + port: 6379 + password: 123456 + db: 0 +``` + +``` shell script + helm install --create-namespace -n dongtai --values /tmp/my-values.yaml dongtai-iast dongtai/dongtai-iast +``` + +You can also pass in a single value using **`--set`**, for example, you can switch defalut access type `ClusterIP` to `NodePort` + +```shell +helm install --create-namespace -n dongtai-test --set accessType=NodePort --set imageVersion=1.3.1 dongtai-iast dongtai/dongtai-iast + +``` + +Avaliable values: + +**skipMysql**: false (default) +**skipRedis**: false (default) +**accessType**: ClusterIP(default), Options: ClusterIP,NodePort,LoadBalancer +**imageVersion**: latest (default) + +#### Uninstall +``` +helm uninstall dongtai-iast -n dongtai +``` + + + + +### Scale + +With `kubectl scale` command, you can scale `dongtai` deployment to your desire count. + +``` +kubectl scale deployments ${deployment-names} --replicas=${count} -n ${your-namespace} +``` diff --git a/deploy/kubernetes/README.ZH-CN.MD b/deploy/kubernetes/README.ZH-CN.MD new file mode 100644 index 000000000..0e470ed99 --- /dev/null +++ b/deploy/kubernetes/README.ZH-CN.MD @@ -0,0 +1,139 @@ +# Kubnernetes环境快速部署 +[English](README.MD) + +## 提前准备 + +- Kubernetes 版本:1.9+ +- 客户端已经安装kubectl +- 具备以下操作的授权,可以使用`kubectl auth can-i`验证: + - create secrets + - create deployments + - create configmaps + - create namespaces + - create StatefulSet + - create Service + +## 部署 + +### 脚本安装 + +用法: +``` +./install.sh -m NodePort -s mysql -n dongtai-iast +``` + +m: 访问模式(mode),可选:`NodePort` `LoadBalancer`,默认为:NodePort + +s: 跳过的资源(skip),可选: `mysql` `redis` `mysql,redis`,默认:不跳过 + +n: 指定namespace,默认为:`dongtai-iast` + +#### 自定义配置 + +如需修改mysql和redis的配置,需要手动修改 `manifest/4.deploy-iast-server.yml`最上部分的`[mysql]`和`redis`部分配置。 + +> 说明:假设`mysql`和`redis`的配置可用于生产环境,此部署方案即可用于生产环境部署。 + + +#### 访问 + +##### NodePort + +##### 获取可用的Node IP +```shell script +kubectl get nodes -o wide | awk {'print $1" " $2 " " $7'} | column -t +``` + +#### 获取可用的NodePort + +```shell script +kubectl get svc dongtai-web-pub-svc -n dongtai-iast -o=jsonpath='{.spec.ports[0].nodePort}' +``` + +##### 访问地址: +```shell script +http://${NodeIP}:${PORT} +``` + +##### LoadBalancer + +##### 获取可用的LoadBalancer IP或者DNS +```shell script +kubectl get svc dongtai-web-pub-svc dongtai-engine-pub-svc -n dongtai-iast +``` + +#### 卸载 + +```shell script +kubectl delete namespace ${YourNamespace} +``` + + +### 使用 helm 安装 + +安装之前请确保已经安装`helm`.通过下面链接[this guide](https://helm.sh/docs/intro/install/)安装. + +1. 添加、更新仓库 + + ``` + helm repo add dongtai https://charts.dongtai.io/iast + helm repo update + ``` + +2. 安装 + + ``` + helm install --create-namespace -n dongtai dongtai-iast dongtai/dongtai-iast + ``` + + 这个命令将会在dongtai命名空间部署 `Dongtai` 服务,并且使用 `ClusterIP` 方式暴露服务. + +#### 自定义配置 + +如果需要自定义mysql和redis的配置,你可以使用自己的文件覆盖默认的values文件. +假如你有一个自定义文件 `/tmp/my-values.yml` 如下: + +``` yaml +mysql: + host: my-dongtai-mysql + port: 3306 + name: my-dongtai_webapi + user: root + password: my-dongtai-iast + +redis: + host: my-dongtai-redis + port: 6379 + password: 123456 + db: 0 +``` + +``` shell script + helm install --create-namespace -n dongtai --values /tmp/my-values.yaml dongtai-iast dongtai/dongtai-iast +``` + +你也可以使用 `--set`来覆盖单个值, 例如: 你可以使用 `--set` 将 `ClusterIP` 切换成 `NodePort` + +```shell +helm install --create-namespace -n dongtai-test --set accessType=NodePort --set imageVersion=1.3.1 dongtai-iast dongtai/dongtai-iast +``` +可用值: + +**skipMysql**: false (default) +**skipRedis**: false (default) +**accessType**: ClusterIP(default), Options: ClusterIP,NodePort,LoadBalancer +**imageVersion**: latest (default) + +#### 卸载 +``` +helm uninstall dongtai-iast -n dongtai +``` + + +### 扩缩容 + +使用 `kubectl scale` 命令可以把 `dongtai` 服务扩容成你期望的数量。 + +``` +kubectl scale deployments ${deployment-names} --replicas=${count} -n ${your-namespace} +``` \ No newline at end of file diff --git a/deploy/kubernetes/helm/.helmignore b/deploy/kubernetes/helm/.helmignore new file mode 100644 index 000000000..0e8a0eb36 --- /dev/null +++ b/deploy/kubernetes/helm/.helmignore @@ -0,0 +1,23 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*.orig +*~ +# Various IDEs +.project +.idea/ +*.tmproj +.vscode/ diff --git a/deploy/kubernetes/helm/Chart.yaml b/deploy/kubernetes/helm/Chart.yaml new file mode 100644 index 000000000..61af4e333 --- /dev/null +++ b/deploy/kubernetes/helm/Chart.yaml @@ -0,0 +1,29 @@ +apiVersion: v2 +name: dongtai-iast +description: A Helm chart for Install Dongtai Iast in Kubernetes + +# A chart can be either an 'application' or a 'library' chart. +# +# Application charts are a collection of templates that can be packaged into versioned archives +# to be deployed. +# +# Library charts provide useful utilities or functions for the chart developer. They're included as +# a dependency of application charts to inject those utilities and functions into the rendering +# pipeline. Library charts do not define any templates and therefore cannot be deployed. +type: application + +# This is the chart version. This version number should be incremented each time you make changes +# to the chart and its templates, including the app version. +# Versions are expected to follow Semantic Versioning (https://semver.org/) +version: 0.0.1 + +# This is the version number of the application being deployed. This version number should be +# incremented each time you make changes to the application. Versions are not expected to +# follow Semantic Versioning. They should reflect the version the application is using. +appVersion: 1.0.5 + +maintainers: +- email: hardy4yooz@gmail.com + name: Hardy + +icon: https://dongtai.io/static/img/logo.png diff --git a/deploy/kubernetes/helm/templates/NOTES.txt b/deploy/kubernetes/helm/templates/NOTES.txt new file mode 100644 index 000000000..d1e6a45d5 --- /dev/null +++ b/deploy/kubernetes/helm/templates/NOTES.txt @@ -0,0 +1,32 @@ +Welcom to use Dongtai IAST ! + +AccessType is {{ .Values.accessType -}}. + +{{ if eq .Values.accessType "NodePort" -}} +Available node ip: +{{- $nodes := (lookup "v1" "Node" "" "") }} +{{ if $nodes }} +{{ range $index, $val := $nodes.items }} + {{- range $key,$address := $val.status.addresses }} + {{- if eq $address.type "ExternalIP" }} + {{- print $address.address }} + {{- end }} + {{- end }} +{{ end }} +{{- end }} + +Get `dongtai-web service port`: + + $ kubectl get service dongtai-web-svc -n {{ .Release.Namespace }} -o=jsonpath='{.spec.ports[0].nodePort}' + +{{ else if eq .Values.accessType "LoadBalancer" }} + +Get EXTERNAL-IP ip or DNS: + + $ kubectl get service dongtai-web-svc -n {{ .Release.Namespace }} + +{{ else if eq .Values.accessType "ClusterIP" }} + + Your should expose your service [dongtai-web-svc] manually. + +{{ end }} \ No newline at end of file diff --git a/deploy/kubernetes/helm/templates/_helpers.tpl b/deploy/kubernetes/helm/templates/_helpers.tpl new file mode 100644 index 000000000..954bc99f2 --- /dev/null +++ b/deploy/kubernetes/helm/templates/_helpers.tpl @@ -0,0 +1,236 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Expand the name of the chart. +*/}} +{{- define "dongtai.name" -}} +{{- default .Release.Name | trunc 63 | trimSuffix "-" -}} +{{- end -}} + + +{{/* +Create a default fully qualified app name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +If release name contains chart name it will be used as a full name. +*/}} +{{- define "dongtai.fullname" -}} +{{- if .Values.fullnameOverride -}} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}} +{{- else -}} +{{- $name := default .Chart.Name .Values.nameOverride -}} +{{- if contains $name .Release.Name -}} +{{- .Release.Name | trunc 63 | trimSuffix "-" -}} +{{- else -}} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} +{{- end -}} +{{- end -}} +{{- end -}} + +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "dongtai.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}} +{{- end -}} + +{{/* +Common labels +*/}} +{{- define "dongtai.labels" -}} +{{ include "dongtai.version" . }} +helm.sh/chart: {{ include "dongtai.chart" . }} +{{ include "dongtai.selectorLabels" . }} +{{- if .Chart.AppVersion }} +app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} +{{ end -}} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end -}} + +{{- define "dongtai.pv" -}} +pv.kubernetes.io/bind-completed: "yes" +pv.kubernetes.io/bound-by-controller: "yes" +{{- end -}} + + +{{- define "dongtai.istiolabels" -}} +sidecar.istio.io/inject: "true" +{{- end -}} + +{{- define "dongtai.version" -}} +version: v1 +{{- end -}} + +{{/* +Selector labels +*/}} +{{- define "dongtai.selectorLabels" -}} +app.kubernetes.io/name: {{ include "dongtai.name" . }} +app.kubernetes.io/instance: {{ .Release.Name }} +{{- end -}} + +{{/* +Create the name of the service account to use +*/}} +{{- define "dongtai.serviceAccountName" -}} +{{- if .Values.serviceAccount.create -}} +{{- default (include "dongtai.fullname" .) .Values.serviceAccount.name -}} +{{- else -}} +{{- default "default" .Values.serviceAccount.name -}} +{{- end -}} +{{- end -}} + +{{- define "deploy.config" -}} +{{ include "deploy.imagePullPolicy" . }} +{{ include "deploy.resources" . }} +volumeMounts: + - name: {{ template "dongtai.fullname" . }}-configfile + mountPath: /opt/dongtai/dongtai_conf/conf/config.ini + subPath: config.ini + - name: {{ template "dongtai.fullname" . }}-log-path + mountPath: /tmp/logstash +{{- end -}} +{{- define "deploy.imagePullPolicy" -}} +imagePullPolicy: Always +{{- end -}} + +{{- define "deploy.resources" -}} +resources: + limits: + cpu: "1000m" + memory: 1000Mi + requests: + cpu: "500m" + memory: 1000Mi +{{- end -}} +{{- define "deploy.config.vo" -}} +volumes: + - name: {{ template "dongtai.fullname" . }}-configfile + configMap: + name: dongtai-iast-config.ini + - name: {{ template "dongtai.fullname" . }}-log-path + persistentVolumeClaim: + {{ include "deploy.config.persistentVolumeClaim" . }} +{{- end -}} + +{{- define "deploy.config.persistentVolumeClaim" -}} +claimName: {{.Values.storage.persistentVolumeClaim}} +{{- end -}} + +{{/* +Create the name of the service account to use +*/}} + +{{- define "config.ini" -}} + [mysql] + host = {{.Values.mysql.host}} + port = {{.Values.mysql.port}} + name = {{.Values.mysql.name}} + user = {{.Values.mysql.user}} + password = {{.Values.mysql.password}} + [redis] + host = {{.Values.redis.host}} + port = {{.Values.redis.port}} + password = {{.Values.redis.password}} + db = {{.Values.redis.db}} + [engine] + url = {{.Values.enginUrl}} + [apiserver] + url = {{.Values.apiServer}} + [security] + csrf_trust_origins = {{.Values.csrfTrustOrigins}} + secret_key = {{.Values.secretKey}} + [smtp] + server = {{.Values.smtp.server}} + user = {{.Values.smtp.user}} + password = {{.Values.smtp.password}} + from_addr = {{.Values.smtp.from_addr}} + ssl = {{.Values.smtp.ssl}} + cc_addr = {{.Values.smtp.cc_addr}} + port = {{.Values.smtp.port}} + [sca] + base_url = https://sca.huoxian.cn/ + timeout = 5 + token = {{.Values.sca.sca_token}} + [task] + retryable = true + max_retries = 3 + async_send = true + async_send_delay = 5 + [log_service] + host = dongtai-logstash-svc + port = 8083 + [common_file_path] + tmp_path = /tmp/logstash + report_img = report/img + report_pdf = report/pdf + report_word = report/word + report_excel = report/excel + [elastic_search] + enable = false + host = http://user:passwd@127.0.0.1:9200 + vulnerability_index = dongtai-iast-alias-dongtai-v1-vulnerability + asset_aggr_index = dongtai-iast-alias-dongtai-v1-asset-aggr + asset_index = dongtai-iast-alias-dongtai-v1-asset + method_pool_index = dongtai-iast-alias-dongtai-v1-method-pool + asset_vul_index = dongtai-iast-alias-dongtai-v1-asset-vul + [other] + domain = http://localhost.domain/ + demo_session_cookie_domain = .huoxian.cn + logging_level = INFO + cache_preheat = True +{{- end -}} + +{{/* +Create the name of the service account to use +*/}} +{{- define "nginx.conf" -}} + worker_processes auto; + events { + worker_connections 65535; + } + http { + include mime.types; + default_type application/octet-stream; + sendfile on; + keepalive_timeout 65; + #gzip on; + gzip on; + gzip_min_length 5k; + gzip_buffers 4 16k; + #gzip_http_version 1.0; + gzip_comp_level 3; + gzip_types text/plain application/javascript application/x-javascript text/css application/xml text/javascript application/x-httpd-php image/jpeg image/gif image/png; + gzip_vary on; + server { + listen 80; + server_name 0.0.0.0; + client_max_body_size 100M; + location / { + root /usr/share/nginx/html; #站点目录 + index index.html index.htm; #添加属性。 + try_files $uri $uri/ /index.html; + } + location /api/ { + proxy_read_timeout 60; + proxy_pass http://dongtai-server-svc:80/api/; + } + location /upload/ { + proxy_pass http://dongtai-server-svc:80/upload/; + } + location /openapi/ { + proxy_set_header X-real-ip $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header User-Agent $http_user_agent; + proxy_set_header X-Host $http_x_forwarded_host; + proxy_read_timeout 60; + proxy_pass http://dongtai-server-svc:80/; + } + location /log/ { + proxy_pass http://dongtai-logstash-svc:8082/; + } + location = /50x.html { + root /usr/share/nginx/html; + } + } + } +{{- end -}} + diff --git a/deploy/kubernetes/helm/templates/dongtai-cm.yml b/deploy/kubernetes/helm/templates/dongtai-cm.yml new file mode 100644 index 000000000..4bc1391ae --- /dev/null +++ b/deploy/kubernetes/helm/templates/dongtai-cm.yml @@ -0,0 +1,34 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: dongtai-iast-config.ini + namespace: {{ .Release.Namespace }} +data: + config.ini: |- + {{ include "config.ini" . }} + {{- if (.Files.Glob "config/**") }} + {{ range $path, $bytes := .Files.Glob "config/**" }} + {{- $path | replace "config/" "" | b64enc | replace "=" "-" | indent 2 }}: | + {{ $.Files.Get $path | b64enc | indent 4 }} + {{ end }} + {{ end }} +--- +apiVersion: v1 +kind: ConfigMap +metadata: + name: logstash-cm + namespace: {{ .Release.Namespace }} +data: + database: {{.Values.mysql.host}}:{{.Values.mysql.port}}/{{.Values.mysql.name}} + username: {{.Values.mysql.user}} + password: {{.Values.mysql.password}} +--- +apiVersion: v1 +data: + nginx.conf: |- + {{ include "nginx.conf" . }} +kind: ConfigMap +metadata: + name: dongtai-web-nginx-conf + namespace: {{.Release.Namespace}} +--- \ No newline at end of file diff --git a/deploy/kubernetes/helm/templates/dongtai-date.yml b/deploy/kubernetes/helm/templates/dongtai-date.yml new file mode 100644 index 000000000..1362c166c --- /dev/null +++ b/deploy/kubernetes/helm/templates/dongtai-date.yml @@ -0,0 +1,67 @@ +{{- if not .Values.skipRedis -}} +--- +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: {{ template "dongtai.fullname" . }}-redis + namespace: {{.Release.Namespace}} +spec: + selector: + matchLabels: + app: {{ template "dongtai.fullname" . }}-redis + {{- include "dongtai.labels" . | nindent 6}} + serviceName: dongtai-redis + template: + metadata: + annotations: + build_number: "{{ template "dongtai.fullname" . }}" + {{- if not .Values.skipistio }}{{ include "dongtai.istiolabels" . }}{{ end }} + labels: + app: {{ template "dongtai.fullname" . }}-redis + release: {{ .Release.Name }} + {{- include "dongtai.labels" . | nindent 8 }} + spec: + containers: + - image: {{ .Values.images }}/dongtai-redis:{{ .Values.tag }} + imagePullPolicy: Always + name: {{ template "dongtai.fullname" . }}-redis + ports: + - containerPort: 6379 + name: redis + protocol: TCP +--- +{{- end -}} +{{- if not .Values.skipMysql -}} +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ template "dongtai.fullname" . }}-mysql + namespace: {{.Release.Namespace}} +spec: + selector: + matchLabels: + app: {{ template "dongtai.fullname" . }}-mysql + release: {{ .Release.Name }} + {{- include "dongtai.labels" . | nindent 6 }} + strategy: + type: Recreate + template: + metadata: + annotations: + build_number: "{{ template "dongtai.fullname" . }}" + {{- if not .Values.skipistio }}{{ include "dongtai.istiolabels" . }}{{ end }} + labels: + app: {{ template "dongtai.fullname" . }}-mysql + release: {{ .Release.Name }} + {{- include "dongtai.labels" . | nindent 8 }} + spec: + containers: + - image: {{ .Values.images }}/dongtai-mysql:{{ .Values.tag }} + name: mysql-container + imagePullPolicy: Always + ports: + - containerPort: 3306 + name: tcp-mysql +--- +{{- end -}} \ No newline at end of file diff --git a/deploy/kubernetes/helm/templates/dongtai-deploy.yml b/deploy/kubernetes/helm/templates/dongtai-deploy.yml new file mode 100644 index 000000000..630cc77f1 --- /dev/null +++ b/deploy/kubernetes/helm/templates/dongtai-deploy.yml @@ -0,0 +1,374 @@ +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ template "dongtai.fullname" . }}-web + namespace: {{.Release.Namespace}} + annotations: + kubesphere.io/description: web + labels: + app: {{ template "dongtai.fullname" . }}-web + {{- include "dongtai.labels" . | nindent 4 }} +spec: + replicas: {{.Values.replicaCount}} + selector: + matchLabels: + app: {{ template "dongtai.fullname" . }}-web + {{- include "dongtai.labels" . | nindent 6 }} + template: + metadata: + annotations: + build_number: "{{ template "dongtai.fullname" . }}" + {{- if not .Values.skipistio }}{{ include "dongtai.istiolabels" . }}{{ end }} + labels: + app: {{ template "dongtai.fullname" . }}-web + {{- include "dongtai.labels" . | nindent 8 }} + spec: + containers: + - name: {{ template "dongtai.fullname" . }}-web-container + image: {{ .Values.images }}/dongtai-web:{{ .Values.tag }} + imagePullPolicy: Always + {{ include "deploy.resources" . | nindent 10 }} + volumeMounts: + - name: configfile + mountPath: /etc/nginx/nginx.conf + subPath: nginx.conf + volumes: + - name: configfile + configMap: + name: dongtai-web-nginx-conf +--- +# dongtai-server服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ template "dongtai.fullname" . }}-server + namespace: {{.Release.Namespace}} + annotations: + kubesphere.io/description: server + labels: + app: {{ template "dongtai.fullname" . }}-server + {{- include "dongtai.labels" . | nindent 4 }} +spec: + replicas: {{.Values.replicaCount}} + selector: + matchLabels: + app: {{ template "dongtai.fullname" . }}-server + {{- include "dongtai.labels" . | nindent 6 }} + template: + metadata: + annotations: + build_number: "{{ template "dongtai.fullname" . }}" + {{- if not .Values.skipistio }}{{ include "dongtai.istiolabels" . }}{{ end }} + labels: + app: {{ template "dongtai.fullname" . }}-server + {{- include "dongtai.labels" . | nindent 8 }} + spec: + containers: + - name: {{ template "dongtai.fullname" . }}-server-container + image: {{ .Values.images }}/dongtai-server:{{ .Values.tag }} + env: + - name: DONGTAI_CONCURRENCY + value: --processes 4 + {{- include "deploy.config" . | nindent 10 }} + {{- include "deploy.config.vo" . | nindent 6 }} +--- + +#dongtai-worker-task服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ template "dongtai.fullname" . }}-worker-task + namespace: {{.Release.Namespace}} + annotations: + kubesphere.io/description: {{ template "dongtai.fullname" . }}-worker-task + labels: + app: {{ template "dongtai.fullname" . }}-worker-task + {{- include "dongtai.labels" . | nindent 4 }} +spec: + replicas: {{.Values.replicaCount}} + selector: + matchLabels: + app: {{ template "dongtai.fullname" . }}-worker-task + {{- include "dongtai.labels" . | nindent 6 }} + template: + metadata: + labels: + app: {{ template "dongtai.fullname" . }}-worker-task + {{- include "dongtai.labels" . | nindent 8 }} + spec: + containers: + - name: {{ template "dongtai.fullname" . }}-worker-task-container + image: {{ .Values.images }}/dongtai-server:{{ .Values.tag }} + command: [ "/bin/bash","/opt/dongtai/deploy/docker/entrypoint.sh" ] + args: [ "beat" ] + {{- include "deploy.config" . | nindent 10 }} + {{- include "deploy.config.vo" . | nindent 6 }} +--- +# dongtai-worker-high-freq服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ template "dongtai.fullname" . }}-worker-high-freq + namespace: {{.Release.Namespace}} + annotations: + kubesphere.io/description: {{ template "dongtai.fullname" . }}-worker-high-freq + labels: + app: {{ template "dongtai.fullname" . }}-worker-high-freq + {{- include "dongtai.labels" . | nindent 4 }} +spec: + replicas: {{.Values.replicaCount}} + selector: + matchLabels: + app: {{ template "dongtai.fullname" . }}-worker-high-freq + {{- include "dongtai.labels" . | nindent 6 }} + template: + metadata: + labels: + app: {{ template "dongtai.fullname" . }}-worker-high-freq + {{- include "dongtai.labels" . | nindent 8 }} + spec: + containers: + - name: {{ template "dongtai.fullname" . }}-worker-high-freq-container + image: {{ .Values.images }}/dongtai-server:{{ .Values.tag }} + command: [ "/bin/sh","/opt/dongtai/deploy/docker/entrypoint.sh" ] + args: [ "worker-high-freq" ] + env: + - name: DONGTAI_CONCURRENCY + value: -P gevent --concurrency=121 + {{- include "deploy.config" . | nindent 10 }} + {{- include "deploy.config.vo" . | nindent 6 }} +--- +# dongtai-worker-beat服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ template "dongtai.fullname" . }}-worker-beat + namespace: {{.Release.Namespace}} + annotations: + kubesphere.io/description: {{ template "dongtai.fullname" . }}-worker-beat + labels: + app: {{ template "dongtai.fullname" . }}-worker-beat + {{- include "dongtai.labels" . | nindent 4 }} +spec: + replicas: {{.Values.replicaCount}} + selector: + matchLabels: + app: {{ template "dongtai.fullname" . }}-worker-beat + {{- include "dongtai.labels" . | nindent 6 }} + template: + metadata: + labels: + app: {{ template "dongtai.fullname" . }}-worker-beat + {{- include "dongtai.labels" . | nindent 8 }} + spec: + containers: + - name: {{ template "dongtai.fullname" . }}-worker-beat-container + image: {{ .Values.images }}/dongtai-server:{{ .Values.tag }} + command: [ "/bin/sh","/opt/dongtai/deploy/docker/entrypoint.sh" ] + args: [ "worker-beat" ] + env: + - name: DONGTAI_CONCURRENCY + value: --concurrency=2 + {{- include "deploy.config" . | nindent 10 }} + {{- include "deploy.config.vo" . | nindent 6 }} + +--- +# dongtai-worker-other服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ template "dongtai.fullname" . }}-worker-other + namespace: {{.Release.Namespace}} + annotations: + kubesphere.io/description: {{ template "dongtai.fullname" . }}-worker-other + labels: + app: {{ template "dongtai.fullname" . }}-worker-other + {{- include "dongtai.labels" . | nindent 4 }} +spec: + replicas: {{.Values.replicaCount}} + selector: + matchLabels: + app: {{ template "dongtai.fullname" . }}-worker-other + {{- include "dongtai.labels" . | nindent 6 }} + template: + metadata: + labels: + app: {{ template "dongtai.fullname" . }}-worker-other + {{- include "dongtai.labels" . | nindent 8 }} + spec: + containers: + - name: {{ template "dongtai.fullname" . }}-worker-other-container + image: {{ .Values.images }}/dongtai-server:{{ .Values.tag }} + command: [ "/bin/sh","/opt/dongtai/deploy/docker/entrypoint.sh" ] + args: [ "worker-other" ] + env: + - name: DONGTAI_CONCURRENCY + value: --concurrency=2 + {{- include "deploy.config" . | nindent 10 }} + {{- include "deploy.config.vo" . | nindent 6 }} +# dongtai-worker-sca服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ template "dongtai.fullname" . }}-worker-sca + namespace: {{.Release.Namespace}} + annotations: + kubesphere.io/description: {{ template "dongtai.fullname" . }}-worker-sca + labels: + app: {{ template "dongtai.fullname" . }}-worker-sca + {{- include "dongtai.labels" . | nindent 4 }} +spec: + replicas: {{.Values.replicaCount}} + selector: + matchLabels: + app: {{ template "dongtai.fullname" . }}-worker-sca + {{- include "dongtai.labels" . | nindent 6 }} + template: + metadata: + labels: + app: {{ template "dongtai.fullname" . }}-worker-sca + {{- include "dongtai.labels" . | nindent 8 }} + spec: + containers: + - name: {{ template "dongtai.fullname" . }}-worker-sca-container + image: {{ .Values.images }}/dongtai-server:{{ .Values.tag }} + command: [ "/bin/sh","/opt/dongtai/deploy/docker/entrypoint.sh" ] + args: [ "worker-sca" ] + env: + - name: DONGTAI_CONCURRENCY + value: --concurrency=2 + {{- include "deploy.config" . | nindent 10 }} + {{- include "deploy.config.vo" . | nindent 6 }} +--- +# dongtai-worker-es服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ template "dongtai.fullname" . }}-worker-es + namespace: {{.Release.Namespace}} + annotations: + kubesphere.io/description: {{ template "dongtai.fullname" . }}-worker-es + labels: + app: {{ template "dongtai.fullname" . }}-worker-es + {{- include "dongtai.labels" . | nindent 4 }} +spec: + replicas: {{.Values.replicaCount}} + selector: + matchLabels: + app: {{ template "dongtai.fullname" . }}-worker-es + {{- include "dongtai.labels" . | nindent 6 }} + template: + metadata: + labels: + app: {{ template "dongtai.fullname" . }}-worker-es + {{- include "dongtai.labels" . | nindent 8 }} + spec: + containers: + - name: {{ template "dongtai.fullname" . }}-worker-es-container + image: {{ .Values.images }}/dongtai-server:{{ .Values.tag }} + command: [ "/bin/sh","/opt/dongtai/deploy/docker/entrypoint.sh" ] + args: [ "worker-es" ] + env: + - name: DONGTAI_CONCURRENCY + value: --concurrency=2 + {{- include "deploy.config" . | nindent 10 }} + {{- include "deploy.config.vo" . | nindent 6 }} +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ template "dongtai.fullname" . }}-logstash + namespace: {{.Release.Namespace}} + labels: + app: {{ template "dongtai.fullname" . }}-logstash + {{- include "dongtai.labels" . | nindent 4 }} +spec: + replicas: {{.Values.replicaCount}} + selector: + matchLabels: + app: {{ template "dongtai.fullname" . }}-logstash + {{- include "dongtai.labels" . | nindent 6 }} + template: + metadata: + annotations: + build_number: "{{ template "dongtai.fullname" . }}" + {{- if not .Values.skipistio }}{{ include "dongtai.istiolabels" . }}{{ end }} + labels: + app: {{ template "dongtai.fullname" . }}-logstash + {{- include "dongtai.labels" . | nindent 8 }} + spec: + containers: + - image: {{ .Values.images }}/dongtai-logrotate:{{ .Values.tag }} + name: logrotate + securityContext: + runAsUser: 0 + {{ include "deploy.imagePullPolicy" . }} + volumeMounts: + - name: {{ template "dongtai.fullname" . }}-log-path + mountPath: /tmp/logstash + - image: {{ .Values.images }}/dongtai-logstash:{{ .Values.tag }} + name: logstash + securityContext: + runAsUser: 0 + {{ include "deploy.imagePullPolicy" . }} + env: + - name: DATABASE + valueFrom: + configMapKeyRef: + key: database + name: logstash-cm + - name: USERNAME + valueFrom: + configMapKeyRef: + key: username + name: logstash-cm + - name: PASSWORD + valueFrom: + configMapKeyRef: + key: password + name: logstash-cm + ports: + - containerPort: 8082 + protocol: TCP + name: agent-http + - containerPort: 8083 + protocol: TCP + name: log-http + volumeMounts: + - name: {{ template "dongtai.fullname" . }}-log-path + mountPath: /tmp/logstash + resources: + requests: + cpu: 1000m + memory: 2000Mi + limits: + cpu: 2000m + memory: 4000Mi + livenessProbe: + failureThreshold: 1 + periodSeconds: 5 + successThreshold: 1 + tcpSocket: + port: 9600 + timeoutSeconds: 1 + readinessProbe: + failureThreshold: 3 + initialDelaySeconds: 30 + periodSeconds: 5 + successThreshold: 1 + tcpSocket: + port: 9600 + timeoutSeconds: 1 + startupProbe: + failureThreshold: 40 + periodSeconds: 5 + successThreshold: 1 + tcpSocket: + port: 9600 + timeoutSeconds: 1 + volumes: + - name: {{ template "dongtai.fullname" . }}-log-path + persistentVolumeClaim: + {{ include "deploy.config.persistentVolumeClaim" . }} +--- \ No newline at end of file diff --git a/deploy/kubernetes/helm/templates/dongtai-pv.yml b/deploy/kubernetes/helm/templates/dongtai-pv.yml new file mode 100644 index 000000000..955c739d4 --- /dev/null +++ b/deploy/kubernetes/helm/templates/dongtai-pv.yml @@ -0,0 +1,18 @@ +{{- if .Values.storage.storageClassName -}} +--- +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: +# annotations: +#{{/* {{- include "dongtai.pv" . | nindent 4 }} */}} + name: {{.Values.storage.persistentVolumeClaim}} + namespace: {{ .Release.Namespace }} +spec: + accessModes: + - ReadWriteMany + resources: + requests: + storage: 2G + storageClassName: {{.Values.storage.storageClassName}} +--- +{{- end -}} diff --git a/deploy/kubernetes/helm/templates/dongtai-svc.yaml b/deploy/kubernetes/helm/templates/dongtai-svc.yaml new file mode 100644 index 000000000..920b26dc5 --- /dev/null +++ b/deploy/kubernetes/helm/templates/dongtai-svc.yaml @@ -0,0 +1,92 @@ +{{- if not .Values.skipMysql -}} +--- +apiVersion: v1 +kind: Service +metadata: + name: dongtai-mysql + namespace: {{.Release.Namespace}} + labels: + app: {{ template "dongtai.fullname" . }}-mysql +spec: + selector: + app: {{ template "dongtai.fullname" . }}-mysql + ports: + - protocol: TCP + port: 3306 + targetPort: 3306 +--- +{{- end -}} +{{- if not .Values.skipRedis -}} +--- +apiVersion: v1 +kind: Service +metadata: + labels: + app: {{ template "dongtai.fullname" . }}-redis + name: dongtai-redis + namespace: {{.Release.Namespace}} +spec: + ports: + - port: 6379 + protocol: TCP + targetPort: 6379 + selector: + app: {{ template "dongtai.fullname" . }}-redis +--- +{{- end -}} +--- +apiVersion: v1 +kind: Service +metadata: + name: dongtai-server-svc + namespace: {{.Release.Namespace}} +spec: + ports: + - port: 80 + protocol: TCP + targetPort: 8000 + selector: + app: {{ template "dongtai.fullname" . }}-server + type: ClusterIP +--- +{{- if or (eq .Values.accessType "NodePort") (eq .Values.accessType "LoadBalancer") (eq .Values.accessType "ClusterIP") -}} +# Expose dongtai-web svc with {{ .Values.accessType }} +--- +apiVersion: v1 +kind: Service +metadata: + labels: + app: {{ template "dongtai.fullname" . }}-web + name: dongtai-web-svc + namespace: {{ .Release.Namespace }} +spec: + ports: + - port: 80 + protocol: TCP + targetPort: 80 + selector: + app: {{ template "dongtai.fullname" . }}-web + type: {{ .Values.accessType }} +--- +{{- end -}} +--- +#dongtai-logstash服务 +apiVersion: v1 +kind: Service +metadata: + name: dongtai-logstash-svc + namespace: {{.Release.Namespace}} +spec: + type: ClusterIP + ports: + - name: agent-http + port: 8082 + targetPort: 8082 + protocol: TCP + - name: log-http + port: 8083 + targetPort: 8083 + protocol: TCP + selector: + app: {{ template "dongtai.fullname" . }}-logstash +--- diff --git a/logs/.gitignore b/deploy/kubernetes/helm/templates/tests/test-connection.yaml similarity index 100% rename from logs/.gitignore rename to deploy/kubernetes/helm/templates/tests/test-connection.yaml diff --git a/deploy/kubernetes/helm/values.yaml b/deploy/kubernetes/helm/values.yaml new file mode 100644 index 000000000..7f127f06c --- /dev/null +++ b/deploy/kubernetes/helm/values.yaml @@ -0,0 +1,46 @@ +# Default values for dongtai. +# This is a YAML-formatted file. +# Declare variables to be passed into your templates. + +replicaCount: 1 + +# dongtai config +# appNamespace: dongtai-iast +skipistio: true +skipMysql: false +skipRedis: false +accessType: ClusterIP +images: registry.cn-beijing.aliyuncs.com/huoxian_pub # or dongtai +tag: latest + +mysql: + host: dongtai-mysql + port: 3306 + name: dongtai_webapi + user: root + password: dongtai-iast + +redis: + host: dongtai-redis + port: 6379 + password: 123456 + db: 0 + +sca: + sca_token: + +storage: + storageClassName: null + # or You can choose to specify the storage class above, or create a custom multi-node read-write pvc below !!! + persistentVolumeClaim: iast-agent-pvc + +csrfTrustOrigins: .example.com +secretKey: vbjlvbxfvazjfprywuxgyclmvhtmselddsefxxlcixovmqfpgy + +smtp: + server: smtp_server + user: smtp_user + password: smtp_password + from_addr: from_addr + ssl: False + cc_addr: cc_addr \ No newline at end of file diff --git a/deploy/kubernetes/install.sh b/deploy/kubernetes/install.sh new file mode 100755 index 000000000..91a15889e --- /dev/null +++ b/deploy/kubernetes/install.sh @@ -0,0 +1,222 @@ +#!/bin/bash +CURRENT_PATH=$(cd "$(dirname "$0")" || exit;pwd) +SKIP_MYSQL=false +SKIP_REDIS=false +ACCESS_TYPE=ClusterIP +NAMESPACE=dongtai-iast +# latest_version="`wget -qO- -t1 -T2 "https://api.github.com/repos/HXSecurity/DongTai/releases/latest" | jq -r '.tag_name'`" +# REALEASE_VERSION=${latest_version:1} +Info(){ + echo -e "[Info] $1" 2>&1 +} + +Error(){ + echo -e "\033[31m[Error] $1 \033[0m" +} + +Todo(){ + echo -e "\033[36m[Todo] $1 \033[0m" +} + +Notice(){ + echo -e "\033[33m[Important] $1 \033[0m" +} + +while getopts ":m:s:n:h" optname +do + case "$optname" in + "n") + NAMESPACE=$OPTARG + ;; + "m") + ACCESS_TYPE=$OPTARG + ;; + "s") + SKIP=$OPTARG + array=(${SKIP//,/ }) + for var in "${array[@]}" + do + if [ "$var" == "mysql" ]; then + SKIP_MYSQL=true + elif [ "$var" == "redis" ]; then + SKIP_REDIS=true + fi + done + ;; + "h") + Info "Usage: ./install.sh -m ClusterIP -s mysql -n dongtai-iast" + exit 1 + ;; + ":") + Error "No argument value for option $OPTARG" + ;; + "?") + Error "Unknown option $OPTARG" + ;; + *) + Error "Unknown error while processing options" + ;; + esac +done + +OUT="$(uname -s)" + +case "${OUT}" in + Linux*) machine=Linux;; + Darwin*) machine=Mac;; + CYGWIN*) machine=Cygwin;; + MINGW*) machine=MinGw;; + *) machine="UNKNOWN:${OUT}" +esac + +cd "$CURRENT_PATH" || exit + +check_env(){ + if ! [ -x "$(command -v kubectl)" ]; then + Error "kubectl not installed." + exit 1 + fi + context=$(kubectl config current-context) + Info "Current context: $context" + +} + +check_permission(){ + Info "Checking kubernetes resources permission ..." + auths=("secrets" "deployments" "configmaps" "namespaces" "StatefulSet" "Service" ) + for auth in "${auths[@]}"; + do + out=$(kubectl auth can-i create "$auth") + if [ "$out" != "yes" ]; then + Error "No permission to create $auth." + exit 1 + fi + done +} + +get_latest_image_tag_from_dockerhub() { + image="$1" + # Info "start to get latest tag of $image" + tags=`wget -q https://registry.hub.docker.com/v1/repositories/dongtai/${image}/tags -O - | sed -e 's/[][]//g' -e 's/"//g' -e 's/ //g' | tr '}' '\n' | awk -F: '{print $3}'` + + if [ -n "$2" ] + then + tags=` echo "${tags}" | grep "$2" ` + fi + echo "${tags}" | grep -E '^([0-9]+\.){0,2}(\*|[0-9]+)$' | tail -n 1 +} + + +deploy(){ + cd "$CURRENT_PATH" || exit + ORIG=$1 + FILENAME="$CURRENT_PATH/manifest/$1" + NEW_FILENAME="$FILENAME.temp" + NEW_NAMESPACE=$2 + cp "$FILENAME" "$NEW_FILENAME" + Info "Copying temporary file $NEW_FILENAME ..." + + if [ "${machine}" == "Mac" ]; then + case $ORIG in + "2.deploy-redis.yml") + TAG=$(get_latest_image_tag_from_dockerhub dongtai-redis) + sed -i "" "s/CHANGE_THIS_NAMESPACE/$NEW_NAMESPACE/g" "$NEW_FILENAME" >/dev/null + sed -i "" "s/CHANGE_THIS_VERSION/$TAG/g" "$NEW_FILENAME" >/dev/null + ;; + "3.deploy-mysql.yml") + MYSQL_TAG=$(get_latest_image_tag_from_dockerhub dongtai-mysql) + sed -i "" "s/CHANGE_THIS_NAMESPACE/$NEW_NAMESPACE/g" "$NEW_FILENAME" >/dev/null + sed -i "" "s/CHANGE_THIS_VERSION/$MYSQL_TAG/g" "$NEW_FILENAME" >/dev/null + ;; + "4.deploy-iast-server.yml") + WEB_TAG=$(get_latest_image_tag_from_dockerhub dongtai-web) + SERVER_TAG=$(get_latest_image_tag_from_dockerhub dongtai-server) + LOGSTASH_TAG=$(get_latest_image_tag_from_dockerhub dongtai-logstash) + LOGROTATE_TAG=$(get_latest_image_tag_from_dockerhub dongtai-logrotate) + sed -i "" "s/CHANGE_THIS_NAMESPACE/$NEW_NAMESPACE/g" "$NEW_FILENAME" >/dev/null + sed -i "" "s/dongtai-web:CHANGE_THIS_VERSION/dongtai-web:$WEB_TAG/g" "$NEW_FILENAME" >/dev/null + sed -i "" "s/dongtai-server:CHANGE_THIS_VERSION/dongtai-server:$SERVER_TAG/g" "$NEW_FILENAME" >/dev/null + sed -i "" "s/dongtai-logstash:CHANGE_THIS_VERSION/dongtai-logstash:$LOGSTASH_TAG/g" "$NEW_FILENAME" >/dev/null + sed -i "" "s/dongtai-logrotate:CHANGE_THIS_VERSION/dongtai-logrotate:$LOGROTATE_TAG/g" "$NEW_FILENAME" >/dev/null + ;; + *) + sed -i "" "s/CHANGE_THIS_NAMESPACE/$NEW_NAMESPACE/g" "$NEW_FILENAME" >/dev/null + esac + elif [ "${machine}" == "Linux" ]; then + case $ORIG in + "2.deploy-redis.yml") + sed -i "s/CHANGE_THIS_NAMESPACE/$NEW_NAMESPACE/g" "$NEW_FILENAME" >/dev/null + sed -i "s/CHANGE_THIS_VERSION/$(get_latest_image_tag_from_dockerhub dongtai-redis)/g" "$NEW_FILENAME" >/dev/null + ;; + "3.deploy-mysql.yml") + sed -i "s/CHANGE_THIS_NAMESPACE/$NEW_NAMESPACE/g" "$NEW_FILENAME" >/dev/null + sed -i "s/CHANGE_THIS_VERSION/$(get_latest_image_tag_from_dockerhub dongtai-mysql)/g" "$NEW_FILENAME" >/dev/null + ;; + "4.deploy-iast-server.yml") + WEB_TAG=$(get_latest_image_tag_from_dockerhub dongtai-web) + SERVER_TAG=$(get_latest_image_tag_from_dockerhub dongtai-server) + LOGSTASH_TAG=$(get_latest_image_tag_from_dockerhub dongtai-logstash) + LOGROTATE_TAG=$(get_latest_image_tag_from_dockerhub dongtai-logrotate) + sed -i "s/CHANGE_THIS_NAMESPACE/$NEW_NAMESPACE/g" "$NEW_FILENAME" >/dev/null + sed -i "s/dongtai-web:CHANGE_THIS_VERSION/dongtai-web:$WEB_TAG/g" "$NEW_FILENAME" >/dev/null + sed -i "s/dongtai-server:CHANGE_THIS_VERSION/dongtai-server:$SERVER_TAG/g" "$NEW_FILENAME" >/dev/null + sed -i "" "s/dongtai-logstash:CHANGE_THIS_VERSION/dongtai-logstash:$LOGSTASH_TAG/g" "$NEW_FILENAME" >/dev/null + sed -i "" "s/dongtai-logrotate:CHANGE_THIS_VERSION/dongtai-logrotate:$LOGROTATE_TAG/g" "$NEW_FILENAME" >/dev/null + ;; + *) + sed -i "s/CHANGE_THIS_NAMESPACE/$NEW_NAMESPACE/g" "$NEW_FILENAME" >/dev/null + esac + else + Error "Unsupported shell version." + rm "$NEW_FILENAME" + exit 1 + fi + + kubectl apply -f "$NEW_FILENAME" + + Info "Cleaning temporary file $NEW_FILENAME ..." + rm "$NEW_FILENAME" +} + +start_deploy(){ + Notice "NAMESPACE: $NAMESPACE, ACCESS_TYPE:$ACCESS_TYPE, SKIP_MYSQL:$SKIP_MYSQL, SKIP_REDIS:$SKIP_REDIS" + Info "Starting deploy to kubernetes ..." + deploy "1.create-namespace.yml" "$NAMESPACE" + if [ $SKIP_REDIS == false ]; then + deploy "2.deploy-redis.yml" "$NAMESPACE" + fi + if [ $SKIP_MYSQL == false ]; then + deploy "3.deploy-mysql.yml" "$NAMESPACE" + fi + deploy "4.deploy-iast-server.yml" "$NAMESPACE" +} +SERVICE_TYPES="NodePort LoadBalancer ClusterIP" +expose_services(){ + if [[ "$SERVICE_TYPES" =~ "$ACCESS_TYPE" ]] + then + kubectl expose deployments/dongtai-web --name=dongtai-web-pub-svc --port=8000 --target-port=80 -n "$NAMESPACE" --type="$ACCESS_TYPE" + # kubectl expose deployments/dongtai-openapi --name=dongtai-openapi-pub-svc --port=8000 --target-port=8000 -n "$NAMESPACE" --type="$ACCESS_TYPE" + else + Error "-m option: $SERVICE_TYPES" + fi +} + +check_env +check_permission +start_deploy +expose_services + +Info "Installation success!" +Todo "Check services status few minutes later: kubectl get po -n $NAMESPACE" + +if [ "$ACCESS_TYPE" == "NodePort" ]; then + Info "Available node ip:" + kubectl get nodes -o wide | awk {'print $7'} | column -t + Info "dongtai-web service port:] $(kubectl get svc dongtai-web-pub-svc -n "$NAMESPACE" -o=jsonpath='{.spec.ports[0].nodePort}')" + # Info "dongtai-openapi service port:] $(kubectl get svc dongtai-openapi-pub-svc -n "$NAMESPACE" -o=jsonpath='{.spec.ports[0].nodePort}')" + +elif [ "$ACCESS_TYPE" == "LoadBalancer" ]; then + Todo "Get EXTERNAL-IP ip or dns by: kubectl get svc dongtai-web-pub-svc -n $NAMESPACE" +else + Todo "Your should expose your service [dongtai-web-pub-svc] manually." +fi diff --git a/deploy/kubernetes/manifest/1.create-namespace.yml b/deploy/kubernetes/manifest/1.create-namespace.yml new file mode 100644 index 000000000..c9b743b5f --- /dev/null +++ b/deploy/kubernetes/manifest/1.create-namespace.yml @@ -0,0 +1,9 @@ +--- +# 创建命名空间 +apiVersion: v1 +kind: Namespace +metadata: + name: CHANGE_THIS_NAMESPACE +spec: + finalizers: + - kubernetes diff --git a/deploy/kubernetes/manifest/2.deploy-redis.yml b/deploy/kubernetes/manifest/2.deploy-redis.yml new file mode 100644 index 000000000..8a95a0461 --- /dev/null +++ b/deploy/kubernetes/manifest/2.deploy-redis.yml @@ -0,0 +1,86 @@ +--- +apiVersion: v1 +data: + redis.conf: | + bind 0.0.0.0 + port 6379 + requirepass 123456 + pidfile .pid + appendonly yes + cluster-config-file nodes-6379.conf + pidfile /data/middleware-data/redis/log/redis-6379.pid + cluster-config-file /data/middleware-data/redis/conf/redis.conf + dir /data/middleware-data/redis/data/ + logfile "/data/middleware-data/redis/log/redis-6379.log" + cluster-node-timeout 5000 + protected-mode no +kind: ConfigMap +metadata: + name: redis-conf + namespace: CHANGE_THIS_NAMESPACE +--- +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: redis + namespace: CHANGE_THIS_NAMESPACE +spec: + selector: + matchLabels: + app: redis + serviceName: redis + template: + metadata: + labels: + app: redis + spec: + containers: + - command: [ "/bin/sh","-c","exec redis-server /data/middleware-data/redis/conf/redis.conf" ] + image: dongtai/dongtai-redis:CHANGE_THIS_VERSION + imagePullPolicy: Always + name: redis + ports: + - containerPort: 6379 + name: redis + protocol: TCP + volumeMounts: + - mountPath: /data/middleware-data/redis/conf/ + name: redis-config + - mountPath: /data/middleware-data/redis/ + name: data + dnsPolicy: ClusterFirst + initContainers: + - command: [ "/bin/sh","-c","mkdir -p /data/middleware-data/redis/log/ /data/middleware-data/redis/conf/ /data/middleware-data/redis/data/" ] + image: busybox:latest + imagePullPolicy: Always + name: init-redis + volumeMounts: + - mountPath: /data/middleware-data/redis/ + name: data + volumes: + - configMap: + defaultMode: 420 + name: redis-conf + name: redis-config + - hostPath: + path: /data/middleware-data/redis/ + type: "" + name: data + +--- +apiVersion: v1 +kind: Service +metadata: + labels: + app: redis + statefulset.kubernetes.io/pod-name: redis-0 + name: dongtai-redis + namespace: CHANGE_THIS_NAMESPACE +spec: + ports: + - port: 6379 + protocol: TCP + targetPort: 6379 + selector: + app: redis + statefulset.kubernetes.io/pod-name: redis-0 diff --git a/deploy/kubernetes/manifest/3.deploy-mysql.yml b/deploy/kubernetes/manifest/3.deploy-mysql.yml new file mode 100644 index 000000000..0522eebf7 --- /dev/null +++ b/deploy/kubernetes/manifest/3.deploy-mysql.yml @@ -0,0 +1,39 @@ +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: mysql + namespace: CHANGE_THIS_NAMESPACE +spec: + selector: + matchLabels: + app: mysql + strategy: + type: Recreate + template: + metadata: + labels: + app: mysql + spec: + containers: + - image: dongtai/dongtai-mysql:CHANGE_THIS_VERSION + name: mysql-container + imagePullPolicy: Always + ports: + - containerPort: 3306 + name: mysql +--- +apiVersion: v1 +kind: Service +metadata: + name: dongtai-mysql + namespace: CHANGE_THIS_NAMESPACE + labels: + app: mysql +spec: + selector: + app: mysql + ports: + - protocol: TCP + port: 3306 + targetPort: 3306 \ No newline at end of file diff --git a/deploy/kubernetes/manifest/4.deploy-iast-server.yml b/deploy/kubernetes/manifest/4.deploy-iast-server.yml new file mode 100644 index 000000000..e60766e83 --- /dev/null +++ b/deploy/kubernetes/manifest/4.deploy-iast-server.yml @@ -0,0 +1,653 @@ +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + annotations: + pv.kubernetes.io/bind-completed: "yes" + pv.kubernetes.io/bound-by-controller: "yes" + name: app-agent-pvc + namespace: CHANGE_THIS_NAMESPACE +spec: + accessModes: + - ReadWriteMany + resources: + requests: + storage: 2G + storageClassName: #您的存储类名字 + #volumeMode: Filesystem + #volumeName: 您的pv名字 +--- +# 创建配置文件 +apiVersion: v1 +data: + # 您的数据库连接地址/您的数据库名字 + database: dongtai-mysql:3306/dongtai_webapi + username: root + password: dongtai-iast +kind: ConfigMap +metadata: + name: logstash-cm + namespace: CHANGE_THIS_NAMESPACE +--- +apiVersion: v1 +data: + config.ini: |- + [mysql] + host = dongtai-mysql + port = 3306 + name = dongtai_webapi + user = root + password = dongtai-iast + + [redis] + host = dongtai-redis + port = 6379 + password = 123456 + db = 0 + + [engine] + url = http://dongtai-engine-svc:80 + + [apiserver] + #url = http://dongtai-openapi-svc:80 + url = http://dongtai-web-pub-svc:80 + + [security] + csrf_trust_origins = .huoxian.cn:1024,.huoxian.cn,.example.com + + [smtp] + server = smtp_server + user = smtp_user + password = smtp_password + from_addr = from_addr + ssl = False + cc_addr = cc_addr + port = 2525 + + [sca] + #https://iast.huoxian.cn/openapi/sca/v1 + base_url = https://sca.huoxian.cn/ + timeout = 5 + token = + + + [task] + retryable = true + max_retries = 3 + async_send = true + async_send_delay = 5 + + [log_service] + host = dongtai-logstash + port = 8083 + + [common_file_path] + tmp_path = /tmp/logstash + report_img = report/img + report_pdf = report/pdf + report_word = report/word + report_excel = report/excel + + [other] + domain = http://localhost.domain/ + demo_session_cookie_domain = .huoxian.cn + logging_level = INFO + cache_preheat = True + + [elastic_search] + enable = false + host = http://dongtai:dongtaies@dongtaies:9200 + vulnerability_index = alias-dongtai-v1-vulnerability-dev + asset_aggr_index = alias-dongtai-v1-asset-aggr-dev + asset_index = alias-dongtai-v1-asset-dev + method_pool_index = alias-dongtai-v1-method-pool-dev + asset_vul_index = alias-dongtai-v1-asset-vul-dev + +kind: ConfigMap +metadata: + name: dongtai-iast-config.ini + namespace: CHANGE_THIS_NAMESPACE +--- +# dongtai-server服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: dongtai-server + namespace: CHANGE_THIS_NAMESPACE + annotations: + kubesphere.io/description: dongtai-server + labels: + app: dongtai-server +spec: + replicas: 1 + selector: + matchLabels: + app: dongtai-server + template: + metadata: + labels: + app: dongtai-server + spec: + containers: + - name: dongtai-server-container + image: dongtai/dongtai-server:CHANGE_THIS_VERSION + imagePullPolicy: Always + env: + - name: DONGTAI_CONCURRENCY + value: --processes 4 + volumeMounts: + - name: configfile + mountPath: /opt/dongtai/dongtai_conf/conf/config.ini + subPath: config.ini + - name: log-path + mountPath: /tmp/logstash + resources: + limits: + cpu: "500m" + memory: 1000Mi + requests: + cpu: "500m" + memory: 1000Mi + volumes: + - name: configfile + configMap: + name: dongtai-iast-config.ini + - name: log-path + persistentVolumeClaim: + claimName: app-agent-pvc +--- +# dongtai-web服务 +apiVersion: v1 +kind: Service +metadata: + name: dongtai-server-svc + namespace: CHANGE_THIS_NAMESPACE +spec: + ports: + - port: 80 + protocol: TCP + targetPort: 8000 + selector: + app: dongtai-server + type: ClusterIP +--- +# 创建nginx.conf配置 +apiVersion: v1 +data: + nginx.conf: |- + #user nobody; + worker_processes auto; + events { + worker_connections 65535; + } + http { + include mime.types; + default_type application/octet-stream; + sendfile on; + keepalive_timeout 65; + + #gzip on; + gzip on; + gzip_min_length 5k; + gzip_buffers 4 16k; + #gzip_http_version 1.0; + gzip_comp_level 3; + gzip_types text/plain application/javascript application/x-javascript text/css application/xml text/javascript application/x-httpd-php image/jpeg image/gif image/png; + gzip_vary on; + + server { + listen 80; + server_name 0.0.0.0; + client_max_body_size 100M; + location / { + root /usr/share/nginx/html; #站点目录 + index index.html index.htm; #添加属性。 + try_files $uri $uri/ /index.html; + } + location /api/ { + proxy_read_timeout 60; + proxy_pass http://dongtai-server-svc:80/api/; + } + location /upload/ { + proxy_pass http://dongtai-server-svc:80/upload/; + } + location /openapi/ { + proxy_set_header X-real-ip $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header User-Agent $http_user_agent; + proxy_set_header X-Host $http_x_forwarded_host; + proxy_read_timeout 60; + proxy_pass http://dongtai-server-svc:80/; + } + location /log/ { + proxy_pass http://dongtai-logstash-svc:8082/; + } + location = /50x.html { + root /usr/share/nginx/html; + } + } + } +kind: ConfigMap +metadata: + name: dongtai-web-nginx-conf + namespace: CHANGE_THIS_NAMESPACE +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: dongtai-web + namespace: CHANGE_THIS_NAMESPACE + annotations: + kubesphere.io/description: dongtai前端项目 + labels: + app: dongtai-web +spec: + replicas: 1 + selector: + matchLabels: + app: dongtai-web + template: + metadata: + labels: + app: dongtai-web + spec: + containers: + - name: dongtai-web-container + image: dongtai/dongtai-web:CHANGE_THIS_VERSION + imagePullPolicy: Always + volumeMounts: + - name: configfile + mountPath: /etc/nginx/nginx.conf + subPath: nginx.conf + resources: + limits: + cpu: "500m" + memory: 1000Mi + requests: + cpu: "500m" + memory: 1000Mi + volumes: + - name: configfile + configMap: + name: dongtai-web-nginx-conf +--- +#dongtai-worker-task服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: dongtai-worker-task + namespace: CHANGE_THIS_NAMESPACE + annotations: + kubesphere.io/description: dongtai-worker-task + labels: + app: dongtai-worker-task +spec: + replicas: 1 + selector: + matchLabels: + app: dongtai-worker-task + template: + metadata: + labels: + app: dongtai-worker-task + spec: + containers: + - name: dongtai-worker-task-container + image: dongtai/dongtai-server:CHANGE_THIS_VERSION + command: [ "/bin/bash","/opt/dongtai/deploy/docker/entrypoint.sh" ] + args: [ "beat" ] + imagePullPolicy: Always + volumeMounts: + - name: configfile + mountPath: /opt/dongtai/dongtai_conf/conf/config.ini + subPath: config.ini + resources: + limits: + cpu: "500m" + memory: 1000Mi + requests: + cpu: "500m" + memory: 1000Mi + volumes: + - name: configfile + configMap: + name: dongtai-iast-config.ini +--- +# dongtai-worker-high-freq服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: dongtai-worker-high-freq + namespace: CHANGE_THIS_NAMESPACE + annotations: + kubesphere.io/description: dongtai-worker-high-freq + labels: + app: dongtai-worker-high-freq +spec: + replicas: 1 + selector: + matchLabels: + app: dongtai-worker-high-freq + template: + metadata: + labels: + app: dongtai-worker-high-freq + spec: + containers: + - name: dongtai-worker-high-freq-container + image: dongtai/dongtai-server:CHANGE_THIS_VERSION + command: [ "/bin/sh","/opt/dongtai/deploy/docker/entrypoint.sh" ] + args: [ "worker-high-freq" ] + imagePullPolicy: Always + env: + - name: DONGTAI_CONCURRENCY + value: -P gevent --concurrency=121 + volumeMounts: + - name: configfile + mountPath: /opt/dongtai/dongtai_conf/conf/config.ini + subPath: config.ini + resources: + limits: + cpu: "1000m" + memory: 2000Mi + requests: + cpu: "500m" + memory: 1000Mi + volumes: + - name: configfile + configMap: + name: dongtai-iast-config.ini +--- +# dongtai-worker-beat服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: dongtai-worker-beat + namespace: CHANGE_THIS_NAMESPACE + annotations: + kubesphere.io/description: dongtai-worker-beat + labels: + app: dongtai-worker-beat +spec: + replicas: 1 + selector: + matchLabels: + app: dongtai-worker-beat + template: + metadata: + labels: + app: dongtai-worker-beat + spec: + containers: + - name: dongtai-worker-beat-container + image: dongtai/dongtai-server:CHANGE_THIS_VERSION + command: [ "/bin/sh","/opt/dongtai/deploy/docker/entrypoint.sh" ] + args: [ "worker-beat" ] + imagePullPolicy: Always + env: + - name: DONGTAI_CONCURRENCY + value: --concurrency=12 + volumeMounts: + - name: configfile + mountPath: /opt/dongtai/dongtai_conf/conf/config.ini + subPath: config.ini + resources: + limits: + cpu: "500m" + memory: 1000Mi + requests: + cpu: "500m" + memory: 1000Mi + volumes: + - name: configfile + configMap: + name: dongtai-iast-config.ini + +--- +# dongtai-worker-other服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: dongtai-worker-other + namespace: CHANGE_THIS_NAMESPACE + annotations: + kubesphere.io/description: dongtai-worker-other + labels: + app: dongtai-worker-other +spec: + replicas: 1 + selector: + matchLabels: + app: dongtai-worker-other + template: + metadata: + labels: + app: dongtai-worker-other + spec: + containers: + - name: dongtai-worker-other-container + image: dongtai/dongtai-server:CHANGE_THIS_VERSION + command: [ "/bin/sh","/opt/dongtai/deploy/docker/entrypoint.sh" ] + args: [ "worker-other" ] + env: + - name: DONGTAI_CONCURRENCY + value: --concurrency=12 + imagePullPolicy: Always + volumeMounts: + - name: configfile + mountPath: /opt/dongtai/dongtai_conf/conf/config.ini + subPath: config.ini + resources: + limits: + cpu: "500m" + memory: 1000Mi + requests: + cpu: "500m" + memory: 1000Mi + volumes: + - name: configfile + configMap: + name: dongtai-iast-config.ini +--- +# dongtai-worker-sca服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: dongtai-worker-sca + namespace: CHANGE_THIS_NAMESPACE + annotations: + kubesphere.io/description: dongtai-worker-sca + labels: + app: dongtai-worker-sca +spec: + replicas: 1 + selector: + matchLabels: + app: dongtai-worker-sca + template: + metadata: + labels: + app: dongtai-worker-sca + spec: + containers: + - name: dongtai-worker-sca-container + image: dongtai/dongtai-server:CHANGE_THIS_VERSION + command: [ "/bin/sh","/opt/dongtai/deploy/docker/entrypoint.sh" ] + args: [ "worker-sca" ] + imagePullPolicy: Always + env: + - name: DONGTAI_CONCURRENCY + value: --concurrency=12 + volumeMounts: + - name: configfile + mountPath: /opt/dongtai/dongtai_conf/conf/config.ini + subPath: config.ini + resources: + limits: + cpu: "500m" + memory: 1000Mi + requests: + cpu: "500m" + memory: 1000Mi + volumes: + - name: configfile + configMap: + name: dongtai-iast-config.ini +--- +# dongtai-worker-es服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: dongtai-worker-es + namespace: CHANGE_THIS_NAMESPACE + annotations: + kubesphere.io/description: dongtai-worker-es + labels: + app: dongtai-worker-es +spec: + replicas: 1 + selector: + matchLabels: + app: dongtai-worker-es + template: + metadata: + labels: + app: dongtai-worker-es + spec: + containers: + - name: dongtai-worker-es-container + image: dongtai/dongtai-server:CHANGE_THIS_VERSION + command: [ "/bin/sh","/opt/dongtai/deploy/docker/entrypoint.sh" ] + args: [ "worker-es" ] + imagePullPolicy: Always + env: + - name: DONGTAI_CONCURRENCY + value: --concurrency=12 + volumeMounts: + - name: configfile + mountPath: /opt/dongtai/dongtai_conf/conf/config.ini + subPath: config.ini + resources: + limits: + cpu: "500m" + memory: 1000Mi + requests: + cpu: "500m" + memory: 1000Mi + volumes: + - name: configfile + configMap: + name: dongtai-iast-config.ini +--- +#dongtai-logstash服务 +apiVersion: v1 +kind: Service +metadata: + name: dongtai-logstash-svc + namespace: CHANGE_THIS_NAMESPACE +spec: + type: ClusterIP + ports: + - name: agent-http + port: 8082 + targetPort: 8082 + protocol: TCP + - name: log-http + port: 8083 + targetPort: 8083 + protocol: TCP + selector: + app: dongtai-logstash +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: dongtai-logstash + namespace: CHANGE_THIS_NAMESPACE + labels: + app: dongtai-logstash +spec: + replicas: 1 + selector: + matchLabels: + app: dongtai-logstash + template: + metadata: + annotations: + build_number: "dongtai-logstash" + labels: + app: dongtai-logstash + spec: + containers: + - image: dongtai/dongtai-logstash:CHANGE_THIS_VERSION + name: logstash + securityContext: + runAsUser: 0 + imagePullPolicy: Always + env: + - name: DATABASE + valueFrom: + configMapKeyRef: + key: database + name: logstash-cm + - name: USERNAME + valueFrom: + configMapKeyRef: + key: username + name: logstash-cm + - name: PASSWORD + valueFrom: + configMapKeyRef: + key: password + name: logstash-cm + ports: + - containerPort: 8082 + protocol: TCP + name: agent-http + - containerPort: 8083 + protocol: TCP + name: log-http + volumeMounts: + - name: log-path + mountPath: /tmp/logstash + resources: + requests: + cpu: 1500m + memory: 2200Mi + limits: + cpu: 2000m + memory: 3200Mi + livenessProbe: + failureThreshold: 1 + periodSeconds: 5 + successThreshold: 1 + tcpSocket: + port: 9600 + timeoutSeconds: 1 + readinessProbe: + failureThreshold: 3 + initialDelaySeconds: 30 + periodSeconds: 5 + successThreshold: 1 + tcpSocket: + port: 9600 + timeoutSeconds: 1 + startupProbe: + failureThreshold: 40 + periodSeconds: 5 + successThreshold: 1 + tcpSocket: + port: 9600 + timeoutSeconds: 1 + - image: dongtai/dongtai-logrotate:CHANGE_THIS_VERSION + name: logrotate + securityContext: + runAsUser: 0 + imagePullPolicy: Always + volumeMounts: + - name: log-path + mountPath: /tmp/logstash + volumes: + - name: log-path + persistentVolumeClaim: + claimName: app-agent-pvc +--- \ No newline at end of file diff --git a/deploy/kubernetes/upgrade-legacy.yaml b/deploy/kubernetes/upgrade-legacy.yaml new file mode 100644 index 000000000..62f49a45e --- /dev/null +++ b/deploy/kubernetes/upgrade-legacy.yaml @@ -0,0 +1,298 @@ +--- +# 创建配置文件 +apiVersion: v1 +data: + config.ini: |- + [mysql] + host = {{dongtai-mysql}} + port = {{3306}} + name = {{dongtai_webapi}} + user = {{root}} + password = {{dongtai-iast}} + + [redis] + host = {{dongtai-redis}} + port = {{6379}} + password = {{123456}} + db = 0 + + [engine] + url = http://dongtai-engine-svc:80 + + [apiserver] + url = http://dongtai-server-svc:80 + + [security] + csrf_trust_origins = .example.com + secret_key = vbjlvbxfvazjfprywuxgyclmvhtmselddsefxxlcixovmqfpgy + + [smtp] + server = smtp_server + user = smtp_user + password = smtp_password + from_addr = from_addr + ssl = False + cc_addr = cc_addr + port = 25 + + [sca] + base_url = https://iast.huoxian.cn/openapi/sca/v1 + +kind: ConfigMap +metadata: + name: dongtai-iast-config.ini + namespace: {{CHANGE_THIS_NAMESPACE}} +--- +# dongtai-server服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: dongtai-server + namespace: {{CHANGE_THIS_NAMESPACE}} + annotations: + kubesphere.io/description: dongtai-server + labels: + app: dongtai-server +spec: + replicas: 1 + selector: + matchLabels: + app: dongtai-server + template: + metadata: + labels: + app: dongtai-server + spec: + containers: + - name: dongtai-server-container + image: registry.cn-beijing.aliyuncs.com/huoxian_pub/dongtai-server:{{CHANGE_THIS_VERSION}} + imagePullPolicy: Always + volumeMounts: + - name: configfile + mountPath: /opt/dongtai/webapi/conf/config.ini + subPath: config.ini + resources: + limits: + cpu: "500m" + memory: 1000Mi + requests: + cpu: "500m" + memory: 1000Mi + volumes: + - name: configfile + configMap: + name: dongtai-iast-config.ini +--- +# dongtai-web服务 +apiVersion: v1 +kind: Service +metadata: + name: dongtai-server-svc + namespace: {{CHANGE_THIS_NAMESPACE}} +spec: + ports: + - port: 80 + protocol: TCP + targetPort: 8000 + selector: + app: dongtai-server + type: ClusterIP +--- +# 创建nginx.conf配置 +apiVersion: v1 +data: + nginx.conf: |- + #user nobody; + worker_processes 1; + events { + worker_connections 1024; + } + http { + include mime.types; + default_type application/octet-stream; + sendfile on; + keepalive_timeout 65; + + #gzip on; + gzip on; + gzip_min_length 5k; + gzip_buffers 4 16k; + #gzip_http_version 1.0; + gzip_comp_level 3; + gzip_types text/plain application/javascript application/x-javascript text/css application/xml text/javascript application/x-httpd-php image/jpeg image/gif image/png; + gzip_vary on; + + server { + listen 80; + server_name 127.0.0.1; + client_max_body_size 100M; + location / { + root /usr/share/nginx/html; #站点目录 + index index.html index.htm; #添加属性。 + try_files $uri $uri/ /index.html; + } + + location /api/ { + proxy_read_timeout 60; + proxy_pass http://dongtai-server-svc:80/api/; + } + + location /upload/ { + proxy_pass http://dongtai-server-svc:80/upload/; + } + + location /openapi/ { + proxy_set_header X-real-ip $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header User-Agent $http_user_agent; + proxy_set_header X-Host $http_x_forwarded_host; + proxy_read_timeout 60; + proxy_pass http://dongtai-server-svc:80/; + } + + location = /50x.html { + root /usr/share/nginx/html; + } + } + } +kind: ConfigMap +metadata: + name: dongtai-web-nginx-conf + namespace: {{CHANGE_THIS_NAMESPACE}} +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: dongtai-web + namespace: {{CHANGE_THIS_NAMESPACE}} + annotations: + kubesphere.io/description: dongtai前端项目 + labels: + app: dongtai-web +spec: + replicas: 1 + selector: + matchLabels: + app: dongtai-web + template: + metadata: + labels: + app: dongtai-web + spec: + containers: + - name: dongtai-web-container + image: registry.cn-beijing.aliyuncs.com/huoxian_pub/dongtai-web:{{CHANGE_THIS_VERSION}} + imagePullPolicy: Always + volumeMounts: + - name: configfile + mountPath: /etc/nginx/nginx.conf + subPath: nginx.conf + resources: + limits: + cpu: "500m" + memory: 1000Mi + requests: + cpu: "500m" + memory: 1000Mi + volumes: + - name: configfile + configMap: + name: dongtai-web-nginx-conf +--- +# dongtai-engine服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: dongtai-engine + namespace: {{CHANGE_THIS_NAMESPACE}} + annotations: + kubesphere.io/description: dongtai-engine + labels: + app: dongtai-engine +spec: + replicas: 1 + selector: + matchLabels: + app: dongtai-engine + template: + metadata: + labels: + app: dongtai-engine + spec: + containers: + - name: dongtai-engine-container + image: registry.cn-beijing.aliyuncs.com/huoxian_pub/dongtai-server:{{CHANGE_THIS_VERSION}} + command: ["/bin/sh", "/opt/dongtai/webapi/docker/entrypoint.sh"] + args: ["worker"] + imagePullPolicy: Always + volumeMounts: + - name: configfile + mountPath: /opt/dongtai/webapi/conf/config.ini + subPath: config.ini + resources: + limits: + cpu: "500m" + memory: 1000Mi + requests: + cpu: "500m" + memory: 1000Mi + volumes: + - name: configfile + configMap: + name: dongtai-iast-config.ini +--- +apiVersion: v1 +kind: Service +metadata: + name: dongtai-engine-svc + namespace: {{CHANGE_THIS_NAMESPACE}} +spec: + ports: + - port: 80 + protocol: TCP + targetPort: 8000 + selector: + app: dongtai-engine + type: ClusterIP +--- +#dongtai-engine-task服务 +apiVersion: apps/v1 +kind: Deployment +metadata: + name: dongtai-engine-task + namespace: {{CHANGE_THIS_NAMESPACE}} + annotations: + kubesphere.io/description: dongtai-engine-task + labels: + app: dongtai-engine-task +spec: + replicas: 1 + selector: + matchLabels: + app: dongtai-engine-task + template: + metadata: + labels: + app: dongtai-engine-task + spec: + containers: + - name: dongtai-engine-task-container + image: registry.cn-beijing.aliyuncs.com/huoxian_pub/dongtai-server:{{CHANGE_THIS_VERSION}} + command: ["/bin/bash", "/opt/dongtai/webapi/docker/entrypoint.sh"] + args: ["beat"] + imagePullPolicy: Always + volumeMounts: + - name: configfile + mountPath: /opt/dongtai/webapi/conf/config.ini + subPath: config.ini + resources: + limits: + cpu: "500m" + memory: 1000Mi + requests: + cpu: "500m" + memory: 1000Mi + volumes: + - name: configfile + configMap: + name: dongtai-iast-config.ini diff --git a/deploy/latest_image.sh b/deploy/latest_image.sh new file mode 100755 index 000000000..282e0fcd2 --- /dev/null +++ b/deploy/latest_image.sh @@ -0,0 +1,8 @@ +#!/bin/bash +images=("dongtai-server" "dongtai-web" "dongtai-mysql" "dongtai-redis" "dongtai-logrotate" "dongtai-logstash") +for image in ${images[*]}; do + tags=`wget -q https://registry.hub.docker.com/v1/repositories/dongtai/${image}/tags -O - | sed -e 's/[][]//g' -e 's/"//g' -e 's/ //g' | tr '}' '\n' | awk -F: '{print $3}' | grep -E '^([0-9]+\.){0,2}(\*|[0-9]+)$' | tail -n 1 ` + echo "$image" + echo -e "\t dongtai/$image:$tags" + echo -e "\t registry.cn-beijing.aliyuncs.com/huoxian_pub/$image:$tags" +done \ No newline at end of file diff --git a/dongtai_common/__init__.py b/dongtai_common/__init__.py new file mode 100644 index 000000000..e6110813f --- /dev/null +++ b/dongtai_common/__init__.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/7/12 下午5:49 + +default_app_config = 'dongtai_common.apps.DongTaiConfig' diff --git a/dongtai_common/apps.py b/dongtai_common/apps.py new file mode 100644 index 000000000..784ab19dc --- /dev/null +++ b/dongtai_common/apps.py @@ -0,0 +1,5 @@ +from django.apps import AppConfig + + +class DongTaiConfig(AppConfig): + name = 'dongtai_common' diff --git a/dongtai_common/common/__init__.py b/dongtai_common/common/__init__.py new file mode 100644 index 000000000..a3a4eb0f4 --- /dev/null +++ b/dongtai_common/common/__init__.py @@ -0,0 +1,9 @@ + +# loop base data return +def baseLoopResult(result_query): + result = {} + for item in result_query: + ind = str(item['id']) + del item['id'] + result[ind] = item + return result \ No newline at end of file diff --git a/dongtai_common/common/utils/__init__.py b/dongtai_common/common/utils/__init__.py new file mode 100644 index 000000000..3d835c366 --- /dev/null +++ b/dongtai_common/common/utils/__init__.py @@ -0,0 +1,90 @@ +class DongTaiAppConfigPatch(): + def ready(self): + try: + from dongtai_conf.plugin import monkey_patch + monkey_patch(self.name) + except ImportError as e: + print(e) + pass + + + +class CSPMiddleware: + def __init__(self, get_response): + self.get_response = get_response + + def __call__(self, request): + response = self.get_response(request) + response[ + 'Content-Security-Policy'] = "default-src * ; img-src *;media-src *;script-src 'self' cdn.jsdelivr.net 'unsafe-inline'" + return response + +from django.core.cache import cache +from functools import wraps + +import copy + +def make_hash(obj): + """Make a hash from an arbitrary nested dictionary, list, tuple or + set. + + """ + if isinstance(obj, set) or isinstance(obj, tuple) or isinstance(obj, list): + return hash(tuple([make_hash(e) for e in obj])) + elif isinstance(obj, str): + return hash(tuple(ord(i) for i in obj)) + elif not isinstance(obj, dict): + return hash(obj) + + new_obj = copy.deepcopy(obj) + for k, v in new_obj.items(): + new_obj[k] = make_hash(v) + + return hash(tuple(frozenset(new_obj.items()))) + + +def cached(function, + random_range: tuple = (50, 100), + use_celery_update: bool = False): + """Return a version of this function that caches its results for + the time specified. + + >>> def foo(x): print "called"; return 1 + >>> cached(foo)('whatever') + called + 1 + >>> cached(foo)('whatever') + 1 + + """ + import random + from dongtai_engine.preheat import function_flush + + @wraps(function) + def get_cache_or_call(*args, **kwargs): + # known bug: if the function returns None, we never save it in + # the cache + cache_key = make_hash( + (function.__module__ + function.__name__, args, kwargs)) + cached_result = cache.get(cache_key) + if random_range: + cache_time = random.randint(*random_range) + if use_celery_update: + function_flush.apply_async(args=(function.__module__, + function.__name__, cache_time, + tuple(args), kwargs)) + if cached_result is None: + result = function(*args, **kwargs) + cache.set(cache_key, result, cache_time) + return result + else: + return cached_result + + get_cache_or_call.__origin__name__ = 'cached' + get_cache_or_call.__random_range__ = random_range + return get_cache_or_call + + +def cached_decorator(random_range, use_celery_update=False): + return lambda x: cached( + x, random_range, use_celery_update=use_celery_update) diff --git a/dongtai_common/endpoint/__init__.py b/dongtai_common/endpoint/__init__.py new file mode 100644 index 000000000..398b9c665 --- /dev/null +++ b/dongtai_common/endpoint/__init__.py @@ -0,0 +1,345 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/7/16 下午4:45 +# project: dongtai +import json +import logging + +from django.contrib.admin.models import LogEntryManager, LogEntry, CHANGE +from django.contrib.contenttypes.models import ContentType +from django.core.paginator import Paginator +from django.db.models import QuerySet +from django.http import JsonResponse +from django.views.decorators.csrf import csrf_exempt +from dongtai_common.models import User +from dongtai_common.models.agent import IastAgent +from rest_framework.authentication import SessionAuthentication, TokenAuthentication +from rest_framework.views import APIView +from rest_framework import status, exceptions +from django.core.paginator import PageNotAnInteger, EmptyPage + +from dongtai_common.models.asset import Asset +from dongtai_common.models.asset_aggr import AssetAggr +from dongtai_common.models.asset_vul import IastVulAssetRelation, IastAssetVul +from dongtai_common.permissions import UserPermission, ScopedPermission, SystemAdminPermission, TalentAdminPermission +from dongtai_common.utils import const +from django.utils.translation import gettext_lazy as _ +from django.db.models import Q, Count + +logger = logging.getLogger('dongtai-core') + + +class EndPoint(APIView): + """ + 基于APIView封装的API入口处理类,需要针对请求进行统一处理的都通过该类实现 + """ + name = "api-v1" + description = "ApiServer接口" + + def __init__(self, **kwargs): + + """ + Constructor. Called in the URLconf; can contain helpful extra + keyword arguments, and other things. + """ + # Go through keyword arguments, and either save their values to our + # instance, or raise an error. + super().__init__(**kwargs) + self.log_manager = LogEntryManager() + self.log_manager.model = LogEntry + + def load_json_body(self, request): + """ + Attempts to load the request body when it's JSON. + + The end result is ``request.json_body`` having a value. When it can't + load the body as JSON, for any reason, ``request.json_body`` is None. + + The request flow is unaffected and no exceptions are ever raised. + """ + + request.json_body = None + + if not request.META.get("CONTENT_TYPE", "").startswith("application/json"): + return + + if not len(request.body): + return + + try: + request.json_body = json.loads(request.body.decode('utf-8')) + except json.JSONDecodeError: + return + + @csrf_exempt + def dispatch(self, request, *args, **kwargs): + """ + 处理HTTP请求的入口方法 + :param request: HTTP请求 + :param args: 请求参数 + :param kwargs: + :return: HTTP响应体 + """ + self.args = args + self.kwargs = kwargs + request = self.initialize_request(request, *args, **kwargs) + self.request = request + self.headers = self.default_response_headers # deprecate? + + try: + self.initial(request, *args, **kwargs) + + # Get the appropriate handler method + if request.method.lower() in self.http_method_names: + handler = getattr(self, request.method.lower(), + self.http_method_not_allowed) + else: + handler = self.http_method_not_allowed + response = handler(request, *args, **kwargs) + except Exception as exc: + logger.error(f'url: {self.request.path},exc:{exc}', exc_info=True) + response = self.handle_exception(exc) + return self.finalize_response(request, response, *args, **kwargs) + + self.response = self.finalize_response(request, response, *args, **kwargs) + if self.request.user is not None and self.request.user.is_active and handler.__module__.startswith('dongtai_web') and self.description is not None: + self.log_manager.log_action( + user_id=self.request.user.id, + content_type_id=ContentType.objects.get_or_create(app_label=self.request.content_type)[0].id, + object_id='', + object_repr='', + action_flag=CHANGE, + change_message=f'访问{self.description}接口' + ) + return self.response + + def handle_exception(self, exc): + """ + Handle any exception that occurs, by returning an appropriate response, + or re-raising the error. + """ + if isinstance(exc, exceptions.Throttled): + exc.status_code = status.HTTP_429_TOO_MANY_REQUESTS + elif isinstance(exc, (exceptions.NotAuthenticated, + exceptions.AuthenticationFailed)): + # WWW-Authenticate header for 401 responses, else coerce to 403 + auth_header = self.get_authenticate_header(self.request) + + if auth_header: + exc.auth_header = auth_header + else: + exc.status_code = status.HTTP_403_FORBIDDEN + + exception_handler = self.get_exception_handler() + + context = self.get_exception_handler_context() + response = exception_handler(exc, context) + + if response is None: + self.raise_uncaught_exception(exc) + + response.exception = True + return response + + def parse_args(self, request): + pass + + @staticmethod + def get_paginator(queryset, page: int = 1, page_size: int = 20): + """ + 根据模型集合、页号、每页大小获取分页数据 + :param queryset: + :param page: + It is recommended to set the pagesize below 50, + if it exceeds 50, it will be changed to 50 + :param page_size: + :return: + """ + page_size = min(50, int(page_size)) + page = int(page) + try: + page_info = Paginator(queryset, per_page=page_size) + page_summary = { + "alltotal": page_info.count, + "num_pages": page_info.num_pages, + "page_size": page_size + } + except BaseException: + page_summary = { + "alltotal": 0, + "num_pages": 0, + "page_size": page_size + } + try: + page_info.validate_number(page) + page_list = page_info.get_page(page).object_list + except BaseException: + return page_summary, [] + return page_summary, page_list + + @staticmethod + def get_auth_users(user): + """ + 通过用户查询有访问权限的用户列表 + :param user: + :return: + """ + if user.is_anonymous: + users = User.objects.filter(username=const.USER_BUGENV) + elif user.is_system_admin(): + users = User.objects.all() + elif user.is_talent_admin(): + talent = user.get_talent() + departments = talent.departments.all() + users = User.objects.filter(department__in=departments) + else: + users = User.objects.filter(id=user.id).all() + + return users + + @staticmethod + def get_auth_agents_with_user(user): + """ + 通过用户查询有访问权限的agent列表 + :param user: + :return: + """ + return EndPoint.get_auth_agents(EndPoint.get_auth_users(user)) + + @staticmethod + def get_auth_agents(users): + """ + 通过用户列表查询有访问权限的agent列表 + :param users: + :return: + """ + return IastAgent.objects.filter(user__in=users) + # if isinstance(users, QuerySet): + # return IastAgent.objects.filter(user__in=users) + # else: + # return IastAgent.objects.filter(user=users) + + @staticmethod + def get_auth_assets(users): + """ + 通过用户列表查询有访问权限的asset列表 + :param users: + :return: + """ + return Asset.objects.filter(user__in=users, is_del=0) + + @staticmethod + def get_auth_asset_aggrs(auth_assets): + """ + 通过用户列表查询有访问权限的asset aggr列表 + :param users: + :return: + """ + auth_assets = auth_assets.values('signature_value').annotate(total=Count('signature_value')) + auth_hash = [] + for asset in auth_assets: + auth_hash.append(asset['signature_value']) + auth_hash = list(set(auth_hash)) + queryset = AssetAggr.objects.filter(signature_value__in=auth_hash, is_del=0) + return queryset + + @staticmethod + def get_auth_asset_vuls(assets): + """ + 通过用户列表查询有访问权限的asset vul列表 + :param users: + :return: + """ + permission_assets = assets.filter(dependency_level__gt=0).values('id').all() + auth_assets = [_i['id'] for _i in permission_assets] + + vul_asset_ids = IastVulAssetRelation.objects.filter(asset_id__in=auth_assets, is_del=0).values( + 'asset_vul_id').all() + perm_vul_ids = [] + if vul_asset_ids: + perm_vul_ids = [_i['asset_vul_id'] for _i in vul_asset_ids] + + return perm_vul_ids + + @staticmethod + def get_auth_and_anonymous_agents(user): + query_user = [] + if user.is_active: + query_user = user + + if query_user == []: + dt_range_user = User.objects.filter(username=const.USER_BUGENV).first() + if dt_range_user: + query_user = dt_range_user + return EndPoint.get_auth_agents_with_user(query_user) + + +class MixinAuthEndPoint(EndPoint): + """ + 通过Token和Sessin验证的API入口 + """ + authentication_classes = (SessionAuthentication, TokenAuthentication,) + + +class AnonymousAuthEndPoint(EndPoint): + """ + 具有匿名用户权限验证的API入口 + """ + authentication_classes = [] + + +class AnonymousAndUserEndPoint(MixinAuthEndPoint): + permission_classes = [] + + +class UserEndPoint(MixinAuthEndPoint): + permission_classes = (UserPermission,) + + +class OpenApiEndPoint(EndPoint): + authentication_classes = (TokenAuthentication,) + permission_classes = (UserPermission,) + + +class EngineApiEndPoint(EndPoint): + authentication_classes = (SessionAuthentication, TokenAuthentication) + permission_classes = (UserPermission,) + + +class SystemAdminEndPoint(EndPoint): + authentication_classes = (SessionAuthentication, TokenAuthentication) + # authentication_classes = (TokenAuthentication,) + permission_classes = (SystemAdminPermission,) + + +class TalentAdminEndPoint(EndPoint): + authentication_classes = (SessionAuthentication, TokenAuthentication) + permission_classes = (TalentAdminPermission,) + + +class R: + """ + Anyway, to prevent information exposure through an exception ,don't directly return exception message in response . + ref: https://cwe.mitre.org/data/definitions/497.html + ref: https://cwe.mitre.org/data/definitions/209.html + """ + @staticmethod + def success(status=201, data=None, msg=_("success"), page=None, **kwargs): + resp_data = {"status": status, "msg": msg} + if data is not None: + resp_data['data'] = data + if page: + resp_data['page'] = page + + for key, value in kwargs.items(): + resp_data[key] = value + + return JsonResponse(resp_data) + + @staticmethod + def failure(status=202, data=None, msg=_("failure")): + resp_data = {"status": status, "msg": msg} + if data: + resp_data['data'] = data + return JsonResponse(resp_data) diff --git a/dongtai_common/engine/__init__.py b/dongtai_common/engine/__init__.py new file mode 100644 index 000000000..f98a1fb25 --- /dev/null +++ b/dongtai_common/engine/__init__.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/7/21 下午7:06 +# project: dongtai-engine diff --git a/dongtai_common/engine/vul_engine.py b/dongtai_common/engine/vul_engine.py new file mode 100644 index 000000000..325cb1f12 --- /dev/null +++ b/dongtai_common/engine/vul_engine.py @@ -0,0 +1,278 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/7/21 下午7:07 +# project: dongtai-engine +import logging +import copy + +from django.utils.functional import cached_property + +logger = logging.getLogger('dongtai-dongtai_conf') + + +class VulEngine(object): + """ + 根据策略和方法池查找是否存在漏洞,此类不进行策略和方法池的权限验证 + """ + + def __init__(self): + """ + 构造函数,初始化相关数据 + """ + self._method_pool = None + self.method_pool_asc = None + self._vul_method_signature = None + self.hit_vul = False + self.vul_stack = None + self.pool_value = None + self.vul_source_signature = None + self.graph_data = { + 'nodes': [], + 'edges': [] + } + self.method_counts = 0 + self.taint_link_size = 0 + self.edge_code = 1 + self.taint_value = '' + self.vul_type = None + + @property + def method_pool(self): + """ + 方法池数据 + :return: + """ + return self._method_pool + + @method_pool.setter + def method_pool(self, method_pool): + """ + 设置方法池数据,根据方法调用ID对数据进行倒序排列,便于后续检索漏洞 + :param method_pool: + :return: + """ + self._method_pool = sorted(method_pool, key=lambda e: e.__getitem__('invokeId'), reverse=True) + + @property + def vul_method_signature(self): + return self._vul_method_signature + + @vul_method_signature.setter + def vul_method_signature(self, vul_method_signature): + self._vul_method_signature = vul_method_signature + + def prepare(self, method_pool, vul_method_signature): + """ + 对方法池、漏洞方法签名及其他数据进行预处理 + :param method_pool: 方法池,list + :param vul_method_signature: 漏洞方法签名,str + :return: + """ + self.method_pool = method_pool + self.vul_method_signature = vul_method_signature + self.hit_vul = False + self.vul_stack = list() + self.pool_value = -1 + self.vul_source_signature = '' + self.method_counts = len(self.method_pool) + + def hit_vul_method(self, method): + # print(self.vul_method_signature) + if f"{method.get('className')}.{method.get('methodName')}" == self.vul_method_signature: + self.hit_vul = True + return True + + def do_propagator(self, method, current_link): + is_source = method.get('source') + target_hash = method.get('targetHash') + + for hash in target_hash: + if hash in self.pool_value: + if is_source: + current_link.append(method) + self.vul_source_signature = f"{method.get('className')}.{method.get('methodName')}" + return True + else: + current_link.append(method) + self.pool_value = method.get('sourceHash') + break + + @cached_property + def method_pool_signatures(self): + signatures = set() + + for method in self.method_pool: + signatures.add(f"{method.get('className').replace('/', '.')}.{method.get('methodName')}") + return signatures + + def search(self, method_pool, vul_method_signature, vul_type=None): + self.vul_type = vul_type + self.prepare(method_pool, vul_method_signature) + size = len(self.method_pool) + for index in range(size): + method = self.method_pool[index] + if self.hit_vul_method(method) is None: + continue + + if 'sourceValues' in method: + self.taint_value = method['sourceValues'] + # 找到sink点所在索引后,开始向后递归 + current_link = list() + + vul_method_detail = self.copy_method(method_detail=method, sink=True) + current_link.append(vul_method_detail) + self.pool_value = set(method.get('sourceHash')) + self.vul_source_signature = None + logger.info(f'==> current taint hash: {self.pool_value}') + if self.loop(index, size, current_link): + break + self.vul_filter() + + + def vul_filter(self): + # 分析是否存在过滤条件,排除误报 + # 根据漏洞类型,查询filter方法 + # 检查vul_ + if self.vul_source_signature: + # mark there has a vul + # if vul_type has filter, do escape + stack_count = len(self.vul_stack) + for index in range(0, stack_count): + stack = self.vul_stack[index] + for item in stack: + if 'java.net.URL.' == item["signature"]: + url = item['sourceValues'] + origin_source = stack[0]['targetValues'] + from urllib.parse import urlparse + o = urlparse(url) + if origin_source not in f'{o.scheme}://{o.netloc}{o.path}': + print(origin_source, url) + self.vul_stack[index] = [] + break + vul_source_signature = self.vul_source_signature + self.vul_source_signature = None + for index in range(0, stack_count): + if self.vul_stack[index]: + self.vul_source_signature = vul_source_signature + else: + continue + + @staticmethod + def copy_method(method_detail, sink=False, source=False, propagator=False, filter=False): + vul_method_detail = copy.deepcopy(method_detail) + vul_method_detail['originClassName'] = vul_method_detail['originClassName'].split('.')[-1] + # todo 根据类型进行拼接 + if source: + vul_method_detail['tag'] = 'source' + vul_method_detail[ + 'code'] = f'{vul_method_detail["targetValues"]} = {vul_method_detail["signature"]}(...)' + elif propagator: + vul_method_detail['tag'] = 'propagator' + vul_method_detail[ + 'code'] = f'{vul_method_detail["targetValues"]} = {vul_method_detail["signature"]}(..., {vul_method_detail["sourceValues"]}, ...)' + elif filter: + vul_method_detail['tag'] = 'filter' + vul_method_detail[ + 'code'] = f'{vul_method_detail["targetValues"]} = {vul_method_detail["signature"]}(..., {vul_method_detail["sourceValues"]}, ...)' + elif sink: + vul_method_detail['tag'] = 'sink' + vul_method_detail[ + 'code'] = f'{vul_method_detail["signature"]}(..., {vul_method_detail["sourceValues"]}, ...)' + else: + vul_method_detail['code'] = vul_method_detail["signature"] + return vul_method_detail + + def loop(self, index, size, current_link): + for sub_index in range(index + 1, size): + sub_method = self.method_pool[sub_index] + sub_target_hash = set(sub_method.get('targetHash')) + sub_target_rpc_hash = set(sub_method.get('targetHashForRpc',[])) + if ((sub_target_hash and sub_target_hash & self.pool_value) or + (sub_target_rpc_hash and sub_target_rpc_hash & self.pool_value) + ) and check_service_propagate_method_state(sub_method): + logger.info("stisfied {sub_method}") + if sub_method.get('source'): + current_link.append(self.copy_method(sub_method, source=True)) + self.vul_source_signature = f"{sub_method.get('className')}.{sub_method.get('methodName')}" + self.vul_stack.append(current_link[::-1]) + self.taint_value = sub_method['targetValues'] + current_link.pop() + return True + else: + current_link.append(self.copy_method(sub_method, propagator=True)) + old_pool_value = self.pool_value + self.pool_value = set(sub_method.get('sourceHash')) + if self.loop(sub_index, size, current_link): + return True + self.pool_value = old_pool_value + current_link.pop() + else: + logger.debug("not stisfied {sub_method}") + + def search_sink(self, method_pool, vul_method_signature): + self.prepare(method_pool, vul_method_signature) + if vul_method_signature in self.method_pool_signatures: + return True + + def dfs(self, current_hash, left_node, left_index): + """ + 深度优先搜索,搜索污点流图中的边 + :param current_hash: 当前污点数据,set() + :param left_node: 上层节点方法的调用ID + :param left_index: 上层节点方法在方法队列中的编号 + :return: + """ + not_found = True + for index in range(left_index + 1, self.method_counts): + data = self.method_pool_asc[index] + if current_hash & set(data['sourceHash']): + not_found = False + right_node = str(data['invokeId']) + self.graph_data['edges'].append({ + 'id': str(self.edge_code), + 'source': left_node, + 'target': right_node, + }) + self.edge_code = self.edge_code + 1 + data['sourceHash'] = list(set(data['sourceHash']) - current_hash) + self.dfs(set(data['targetHash']), right_node, index) + + if not_found: + self.taint_link_size = self.taint_link_size + 1 + + def create_node(self): + """ + 创建污点流图中使用的节点数据 + :return: + """ + for data in self.method_pool_asc: + source = ','.join([str(_) for _ in data['sourceHash']]) + target = ','.join([str(_) for _ in data['targetHash']]) + node = { + 'id': str(data['invokeId']), + 'name': f"{data['className'].replace('/', '.').split('.')[-1]}.{data['methodName']}({source}) => {target}", + 'dataType': 'source' if data['source'] else 'sql', + 'conf': [ + {'label': 'source', 'value': source}, + {'label': 'target', 'value': target}, + {'label': 'caller', 'value': f"{data['callerClass']}.{data['callerMethod']}()"} + ] + } + self.graph_data['nodes'].append(node) + + def result(self): + if self.vul_source_signature: + return True, self.vul_stack, self.vul_source_signature, self.vul_method_signature, self.taint_value + return False, None, None, None, None + + def get_taint_links(self): + return self.graph_data, self.taint_link_size, self.method_counts + + +def check_service_propagate_method_state(method): + if method.get("traceId", "") and not method.get( + "servicePropagateMethodState", False) and not method.get( + 'source', False): + return False + return True diff --git a/dongtai_common/engine/vul_engine_v2.py b/dongtai_common/engine/vul_engine_v2.py new file mode 100644 index 000000000..35e062394 --- /dev/null +++ b/dongtai_common/engine/vul_engine_v2.py @@ -0,0 +1,237 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/7/21 下午7:07 +# project: dongtai-engine +from copy import deepcopy + +from django.utils.functional import cached_property + + +class VulEngineV2(object): + """ + 根据策略和方法池查找是否存在漏洞,此类不进行策略和方法池的权限验证 + """ + + def __init__(self): + """ + 构造函数,初始化相关数据 + """ + self._method_pool = None + self.method_pool_asc = None + self._vul_method_signature = None + self.hit_vul = False + self.vul_stack = None + self.pool_value = None + self.vul_source_signature = None + self.node_data = {} + self.nodes = dict() + self.raw_graph_data = {} + self.raw_node_data = {} + self.graphy_data = { + 'nodes': [], + 'edges': [] + } + self.method_counts = 0 + self.taint_link_size = 0 + self.edge_code = 1 + + @property + def method_pool(self): + """ + 方法池数据 + :return: + """ + return self._method_pool + + @method_pool.setter + def method_pool(self, method_pool): + """ + 设置方法池数据,根据方法调用ID对数据进行倒序排列,便于后续检索漏洞 + :param method_pool: + :return: + """ + self._method_pool = sorted(filter( + lambda x: not ('type' in x.keys() and 'stack' in x.keys()), + method_pool), + key=lambda e: e.__getitem__('invokeId'), + reverse=True) + + @property + def vul_method_signature(self): + return self._vul_method_signature + + @vul_method_signature.setter + def vul_method_signature(self, vul_method_signature): + self._vul_method_signature = vul_method_signature + + def prepare(self, method_pool, vul_method_signature): + """ + 对方法池、漏洞方法签名及其他数据进行预处理 + :param method_pool: 方法池,list + :param vul_method_signature: 漏洞方法签名,str + :return: + """ + self.method_pool = method_pool + self.vul_method_signature = vul_method_signature + self.hit_vul = False + self.vul_stack = list() + self.pool_value = -1 + self.vul_source_signature = '' + self.method_counts = len(self.method_pool) + + def hit_vul_method(self, method): + if f"{method.get('className')}.{method.get('methodName')}" == self.vul_method_signature: + self.hit_vul = True + self.pool_value = method.get('sourceHash') + return True + + def do_propagator(self, method, current_link): + is_source = method.get('source') + target_hash = method.get('targetHash') + + if is_source: + for hash in target_hash: + if hash in self.pool_value: + current_link.append(method) + self.vul_source_signature = f"{method.get('className')}.{method.get('methodName')}" + return True + else: + for hash in target_hash: + if hash in self.pool_value: + current_link.append(method) + self.pool_value = method.get('sourceHash') + break + + @cached_property + def method_pool_signatures(self): + signatures = list() + for method in self.method_pool: + signatures.append(f"{method.get('className').replace('/', '.')}.{method.get('methodName')}") + return signatures + + def search_sink(self, method_pool, vul_method_signature): + self.prepare(method_pool, vul_method_signature) + if vul_method_signature in self.method_pool_signatures: + return True + + def search_all_link(self): + """ + 从方法池中搜索所有的污点传播链 + :return: + """ + self.edge_code = 1 + self.method_pool_asc = self.method_pool[::-1] + self.create_graph() + # self.filter_invalid_data() + self.create_edge() + + def create_edge(self): + """ + 创建污点链的边 + :return: + """ + edges = list() + node_ids = set() + for head, subs in self.raw_graph_data.items(): + node_ids.add(head) + for sub_node in subs: + node_ids.add(sub_node) + edges.append({ + 'id': str(self.edge_code), + 'source': str(head), + 'target': sub_node + }) + self.edge_code = self.edge_code + 1 + + nodes = [self.raw_node_data[int(node_id)] for node_id in node_ids] + self.graphy_data['nodes'] = nodes + self.graphy_data['edges'] = edges + + @staticmethod + def create_node(data): + """ + 创建污点流图中使用的节点数据 + :return: + """ + source = ','.join([str(_) for _ in data['sourceHash']]) + target = ','.join([str(_) for _ in data['targetHash']]) + classname = data['className'].replace('/', '.').split('.')[-1] + invoke_id = str(data['invokeId']) + node = { + 'id': invoke_id, + 'name': f"{classname}.{data['methodName']}({source}) => {target}", + 'dataType': 'source' if data['source'] else 'sql', + 'nodeType': classname, + 'conf': [ + {'label': '调用方法', 'value': f"{data['callerClass']}.{data['callerMethod']}()"}, + {'label': '行号', 'value': data['callerLineNumber']}, + {'label': '污点来源为', 'value': source}, + {'label': '污点转换为', 'value': target}, + {'label': '初始污点', 'value': data['sourceValues']} if 'sourceValues' in data else {}, + {'label': '传播后污点', 'value': data['targetValues']} if 'targetValues' in data else {}, + ] + } + return node + + def create_graph(self): + node_count = len(self.method_pool_asc) + for index in range(node_count): + node = self.method_pool_asc[index] + invoke_id = node['invokeId'] + self.raw_node_data[invoke_id] = self.create_node(node) + if invoke_id not in self.raw_graph_data: + self.raw_graph_data[invoke_id] = list() + for _index in range(index + 1, node_count): + _node = self.method_pool_asc[_index] + if set(node['targetHash']) & set(_node['sourceHash']): + self.raw_graph_data[invoke_id].append(str(_node['invokeId'])) + + def filter_invalid_data(self): + raw_node_data_copy = deepcopy(self.raw_node_data) + + while True: + status, self.raw_graph_data, raw_node_data = self.remove_invalid(self.raw_graph_data, raw_node_data_copy) + if status is False: + break + + def remove_invalid(self, raw_graph_data, raw_node_data): + has_invalid = False + invalid_node = list() + for head, subs in raw_graph_data.items(): + if not subs: + invalid_node.append(head) + + for head in invalid_node: + del raw_graph_data[head] + + sorted_graph_data = sorted(raw_graph_data.keys(), reverse=True) + for key in sorted_graph_data: + sub_nodes = raw_graph_data[key] + leaf_nodes = list(filter(lambda x: int(x) not in raw_graph_data, sub_nodes)) + if leaf_nodes: + filtered_leaf_nodes = set(filter(self.filter_invalid_node, leaf_nodes)) + raw_graph_data[key] = filtered_leaf_nodes | (set(sub_nodes) - set(leaf_nodes)) + filtered_node_count = len(filtered_leaf_nodes) + sub_node_count = len(leaf_nodes) + if sub_node_count != filtered_node_count: + has_invalid = True + return has_invalid, raw_graph_data, raw_node_data + + @staticmethod + def is_invalid_node(classname): + return classname in ('List', 'String', 'StringBuilder', 'StringReader', 'Enumeration', 'Map',) + + def filter_invalid_node(self, node_id): + node = self.raw_node_data[int(node_id)] + if self.is_invalid_node(node['nodeType']): + return False + return True + + def result(self): + if self.vul_source_signature: + return True, self.vul_stack, self.vul_source_signature, self.vul_method_signature + return False, None, None, None + + def get_taint_links(self): + return self.graphy_data, self.taint_link_size, self.method_counts diff --git a/dongtai_common/models/__init__.py b/dongtai_common/models/__init__.py new file mode 100644 index 000000000..13d8b5c65 --- /dev/null +++ b/dongtai_common/models/__init__.py @@ -0,0 +1,151 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2021/1/25 下午6:43 +# software: PyCharm +# project: dongtai-models + +from .user import User +from . import api_route +LANGUAGE_DICT = {'JAVA': 1, 'PYTHON': 2, 'PHP': 3, 'GO': 4} +### aggregation +LANGUAGE_ID_DICT = {"1":"JAVA", "2": "PYTHON", "3": "PHP", "4":"GO"} +AVAILABILITY_DICT = { + "1": "存在利用代码", + "2": "存在分析文章", + "3": "无利用信息", +} +SOURCE_TYPE_DICT = { + "1": "应用漏洞", + "2": "组件漏洞" +} +AGGREGATION_ORDER = { + "1":"vul.level_id", + "2":"rel.create_time", + "3":"vul.update_time", +} + +APP_VUL_ORDER = { + "1":"level_id", + "2":"first_time", + "3":"latest_time", + "4":"status_id", +} +#license 风险等级 +LICENSE_RISK = { + "1": "高", + "2": "中", + "3": "低", + "0": "无风险", + "4": "无风险", +} +LICENSE_RISK_DESC = { + "1": "禁止商业闭源集成", + "2": "限制性商业闭源集成", + "3": "部分商业闭源集成", + "0": "无商业闭源集成", + "4": "无商业闭源集成", +} +# 漏洞等级 +APP_LEVEL_RISK = { + "1": "高危", + "2": "中危", + "3": "低危", + "4": "无风险", + "5": "提示", + "0": "无风险" +} +# 图片生成 +PNG_TREND_LEVEL = { + "1":"高危漏洞", + "2":"中危漏洞", + "3":"低危漏洞", + "4":"提示信息" +} +# 组件漏洞可利用性 +SCA_AVAILABILITY_DICT = { + "1":"存在利用代码", + "2":"存在分析文章", + "3":"无利用信息" +} +# default share config key +SHARE_CONFIG_DICT={ + "jira_url":"", + "jira_id":"", + "gitlab_url":"", + "gitlab_id":"", + "zendao_url":"", + "zendao_id":"" +} + +NOTIFY_TYPE_DICT = { + "1": "webHook", + "2": "GitLab", + "3": "Jira", + "4": "ZenDao", + "5": "FeiShu", + "6": "WeiXin", + "7": "DingDing", +} +### end +WHITE_DOMAIN_NOTIFY = [ + "open.feishu.cn", + "qyapi.weixin.qq.com", + "oapi.dingtalk.com" +] + +VUL_TYPE_CSS = { + "1":"sca-height", + "2":"sca-middle", + "3":"sca-low", + "4":"sca-info", +} + +VUL_DEP_CSS = { + "1":"height", + "2":"middle", + "3":"low", + "4":"info", +} + +# export report default info +DEFAULT_EXPORT_REPORT_DICT = { + "description": { + "user_id":"user_id", + "report_name":"report_name", + "project_name": "", + "version_name": "version_name", + "api_vount": "", + "vul_level_count":{}, + "license_level_count":{}, + "project_create_time": "", + "report_create_time": "", + }, + "risk_analysis": { + "content": "", + "level_png": "", + "trend_png": "", + "app_vul_type": { + "1":{}, + "2":{}, + "3":{}, + "4":{}, + "5":{}, + }, + "sca_vul_type": { + "1": {}, + "2": {}, + "3": {}, + "4": {}, + "5": {}, + }, + "license_type": {} + }, + "risk_details": { + "app_vul_detail": [], + "sca_vul_detail": [], + "license_vul_detail": [] + }, + "sca_list": {}, + "api_site_map": {} +} diff --git a/dongtai_common/models/agent.py b/dongtai_common/models/agent.py new file mode 100644 index 000000000..a06934836 --- /dev/null +++ b/dongtai_common/models/agent.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/11/30 下午5:29 +# software: PyCharm +# project: dongtai-models +from django.db import models +from django.utils.translation import gettext_lazy as _ + +from dongtai_common.models import User + +from dongtai_common.models.server import IastServer +from dongtai_common.utils.settings import get_managed +from dongtai_common.models.project import IastProject +from dongtai_common.models.project_version import IastProjectVersion + +class IastAgent(models.Model): + token = models.CharField(max_length=255, blank=True, null=True) + version = models.CharField(max_length=255, blank=True, null=True) + latest_time = models.IntegerField(blank=True, null=True) + user = models.ForeignKey(User, models.DO_NOTHING) + server = models.ForeignKey( + to=IastServer, + on_delete=models.DO_NOTHING, + related_name='agents', + null=True, + related_query_name='agent', + verbose_name=_('server'), + ) + is_audit = models.IntegerField(blank=True, null=True) + is_running = models.IntegerField(blank=True, null=True) + is_core_running = models.IntegerField(blank=True, null=True) + control = models.IntegerField(blank=True, null=True) + is_control = models.IntegerField(blank=True, null=True) + bind_project = models.ForeignKey(IastProject, + on_delete=models.DO_NOTHING, + blank=True, + null=True, + default=-1) + project_version = models.ForeignKey(IastProjectVersion, + on_delete=models.DO_NOTHING, + blank=True, + null=True, + default=-1) + project_name = models.CharField(max_length=255, blank=True, null=True) + online = models.PositiveSmallIntegerField(blank=True, default=0) + language = models.CharField(max_length=10, blank=True, null=True) + filepathsimhash = models.CharField(max_length=255, + default='', + blank=True, + null=True) + servicetype = models.CharField(max_length=255, + default='', + blank=True, + null=True) + alias = models.CharField(default='', max_length=255, blank=True, null=True) + startup_time = models.IntegerField(default=0, null=False) + register_time = models.IntegerField(default=0, null=False) + actual_running_status = models.IntegerField(default=1, null=False) + except_running_status = models.IntegerField(default=1, null=False) + state_status = models.IntegerField(default=1, null=False) + + + class Meta: + managed = get_managed() + db_table = 'iast_agent' diff --git a/dongtai_common/models/agent_config.py b/dongtai_common/models/agent_config.py new file mode 100644 index 000000000..fbd64eaad --- /dev/null +++ b/dongtai_common/models/agent_config.py @@ -0,0 +1,151 @@ +from django.db import models +from dongtai_common.models import User +from dongtai_common.utils.settings import get_managed +from time import time +from django.db.models import IntegerChoices +from django.utils.translation import gettext_lazy as _ +# agent 阀值监控配置 +class IastAgentConfig(models.Model): + user = models.ForeignKey(User, models.DO_NOTHING) + details = models.JSONField() + hostname = models.CharField(max_length=255, blank=True, null=True) + ip = models.CharField(max_length=100, blank=True, null=True) + port = models.IntegerField(blank=True, null=True) + cluster_name = models.CharField(max_length=255, blank=True, null=True) + cluster_version = models.CharField(max_length=100, blank=True, null=True) + priority = models.IntegerField(blank=True, null=True) + create_time = models.IntegerField(blank=True, null=True) + + class Meta: + managed = get_managed() + db_table = 'iast_agent_config' + # agent 阀值监控配置 + +class TargetOperator(IntegerChoices): + EQUAL = 1, _("等于") + NOT_EQUAL = 2, _("不等于") + CONTAIN = 3, _("包含") + NOT_CONTAIN = 4, _("不包含") + +class MetricOperator(IntegerChoices): + GREATER = 5, _("大于") + +class MetricGroup(IntegerChoices): + SYSTEM = 1, _("性能指标") + JVM = 2, _("JVM指标") + APPLICATION = 3, _("应用指标") + +class DealType(IntegerChoices): + UNLOAD = 1, _("完全卸载") + RELIVE = 2, _("恢复后启动") + + +class TargetType(IntegerChoices): + ACCOUNT_NAME = 1, _("帐号") + PROJECT_NAME = 2, _("项目名") + PROTOCOL = 3, _("协议") + AGENT_NAME = 4, _("Agent名称") + AGENT_IP = 5, _("Agent IP") + AGENT_PATH = 6, _("Agent 启动路径") + PORT = 7, _("端口") + AGENT_LANGUAGE = 8, _("语言") + + +#keep match with agent ,ignore its naming style +class MetricType(IntegerChoices): + cpuUsagePercentage = 1, _("系统CPU使用率阈值") + sysMemUsagePercentage = 2, _("系统内存使用率阈值") + sysMemUsageUsed = 3, _("系统内存使用值阈值") + jvmMemUsagePercentage = 4, _("JVM内存使用率阈值") + jvmMemUsageUsed = 5, _("JVM内存使用值阈值") + threadCount = 6, _("总线程数阈值") + daemonThreadCount = 7, _("守护线程数阈值") + dongTaiThreadCount = 8, _("洞态IAST线程数阈值") + hookLimitTokenPerSecond = 9, _("单请求HOOK限流") + heavyTrafficLimitTokenPerSecond = 10, _("每秒限制处理请求数量(QPS)") + apiResponseTime = 11, _("请求响应时间阈值") + + +UNIT_DICT = { + 1: "%", + 2: "%", + 3: "kb", + 4: "%", + 5: "kb", + 6: "个", + 6: "个", + 7: "个", + 8: "个", + 9: "次", + 10: '次', + 11: 'ms', +} + +class SystemMetricType(IntegerChoices): + cpuUsagePercentage = 1, _("系统CPU使用率阈值") + sysMemUsagePercentage = 2, _("系统内存使用率阈值") + sysMemUsageUsed = 3, _("系统内存使用值阈值") + + +class JVMMetricType(IntegerChoices): + jvmMemUsagePercentage = 4, _("JVM内存使用率阈值") + jvmMemUsageUsed = 5, _("JVM内存使用值阈值") + threadCount = 6, _("总线程数阈值") + daemonThreadCount = 7, _("守护线程数阈值") + dongTaiThreadCount = 8, _("洞态IAST线程数阈值") + + +class ApplicationMetricType(IntegerChoices): + hookLimitTokenPerSecond = 9, _("单请求HOOK限流") + heavyTrafficLimitTokenPerSecond = 10, _("每秒限制处理请求数量(QPS)") + apiResponseTime = 11, _("请求响应时间阈值") + +class IastCircuitConfig(models.Model): + user = models.ForeignKey(User, models.DO_NOTHING) + name = models.CharField(max_length=200, blank=True, null=True) + metric_types = models.CharField(max_length=2000, blank=True, null=True) + target_types = models.CharField(max_length=2000, + blank=True, + null=True, + db_column='targets') + system_type = models.IntegerField(blank=True, null=True) + is_enable = models.IntegerField(blank=True, null=True) + is_deleted = models.IntegerField(default=0, blank=True, null=True) + deal = models.IntegerField(blank=True, null=True) + interval = models.IntegerField(blank=True, null=True) + metric_group = models.IntegerField(blank=True, null=True) + priority = models.IntegerField(blank=True, null=True) + create_time = models.IntegerField(blank=True, + null=True, + default=int(time())) + update_time = models.IntegerField(blank=True, + null=True, + default=int(time())) + + class Meta: + managed = get_managed() + db_table = 'iast_circuit_configs' + + +class IastCircuitTarget(models.Model): + circuit_config = models.ForeignKey(IastCircuitConfig, + on_delete=models.CASCADE) + target_type = models.IntegerField(blank=True, null=True) + opt = models.IntegerField(blank=True, null=True) + value = models.CharField(max_length=200, blank=True, null=True) + + class Meta: + managed = get_managed() + db_table = 'iast_circuit_targets' + + +class IastCircuitMetric(models.Model): + circuit_config = models.ForeignKey(IastCircuitConfig, + on_delete=models.CASCADE) + metric_type = models.IntegerField(blank=True, null=True) + opt = models.IntegerField(blank=True, null=True) + value = models.CharField(max_length=200, blank=True, null=True) + + class Meta: + managed = get_managed() + db_table = 'iast_circuit_metrics' diff --git a/dongtai_common/models/agent_method_pool.py b/dongtai_common/models/agent_method_pool.py new file mode 100644 index 000000000..191f99c8a --- /dev/null +++ b/dongtai_common/models/agent_method_pool.py @@ -0,0 +1,108 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2021/1/14 下午3:35 +# software: PyCharm +# project: dongtai-models +from django.db import models +from django.utils.translation import gettext_lazy as _ + +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.hook_strategy import HookStrategy +from dongtai_common.utils.settings import get_managed + +# 'id', 'agent', 'uri', 'http_method', 'http_scheme', 'req_header', 'req_params', 'req_data', 'taint_value','param_name' +class MethodPool(models.Model): + agent = models.ForeignKey(IastAgent, + models.DO_NOTHING, + blank=True, + null=True, + db_constraint=False) + url = models.CharField(max_length=2000, blank=True, null=True) + uri = models.CharField(max_length=2000, blank=True, null=True) + http_method = models.CharField(max_length=10, blank=True, default='') + http_scheme = models.CharField(max_length=20, blank=True, null=True) + http_protocol = models.CharField(max_length=255, blank=True, null=True) + req_header = models.CharField(max_length=2000, blank=True, null=True) + req_params = models.CharField(max_length=2000, blank=True, null=True) + req_data = models.CharField(max_length=4000, blank=True, null=True) + res_header = models.CharField(max_length=1000, blank=True, null=True) + res_body = models.TextField(blank=True, null=True) + req_header_fs = models.TextField(blank=True, + null=True, + db_column='req_header_for_search') + context_path = models.CharField(max_length=255, blank=True, null=True) + method_pool = models.TextField(blank=True, + null=True) # This field type is a guess. + pool_sign = models.CharField(unique=True, + max_length=40, + blank=True, + null=True) # This field type is a guess. + clent_ip = models.CharField(max_length=255, blank=True, null=True) + create_time = models.IntegerField(blank=True, null=True) + update_time = models.IntegerField(blank=True, null=True) + uri_sha1 = models.CharField(max_length=40, + blank=True, + default='', + db_index=True) + sinks = models.ManyToManyField( + HookStrategy, + verbose_name=_('sinks'), + blank=True, + related_name="method_pools", + related_query_name="method_pool", + ) + + class Meta: + managed = get_managed() + db_table = 'iast_agent_method_pool' + indexes = [models.Index(fields=['uri_sha1', 'http_method', 'agent'])] + + +from django_elasticsearch_dsl import Document +from django_elasticsearch_dsl.registries import registry +from django_elasticsearch_dsl import Document, fields +from dongtai_conf.settings import METHOD_POOL_INDEX + + +@registry.register_document +class MethodPoolDocument(Document): + user_id = fields.IntegerField(attr="agent.user_id") + bind_project_id = fields.IntegerField(attr="agent.bind_project_id") + project_version_id = fields.IntegerField(attr="agent.project_version_id") + req_header_for_search = fields.TextField(attr="req_header_fs") + language = fields.TextField(attr="agent.language") + agent_id = fields.TextField(attr="agent_id") + + def generate_id(self, object_instance): + return '-'.join( + [str(object_instance.agent_id), + str(object_instance.pool_sign)]) + + class Index: + name = METHOD_POOL_INDEX + + class Django: + model = MethodPool + + fields = [ + 'res_header', + 'uri_sha1', + 'url', + 'update_time', + 'res_body', + 'req_params', + 'req_header', + 'req_data', + 'pool_sign', + 'method_pool', + 'id', + 'http_scheme', + 'http_protocol', + 'http_method', + 'create_time', + 'context_path', + 'clent_ip', + ] + + ignore_signals = False diff --git a/dongtai_common/models/agent_properties.py b/dongtai_common/models/agent_properties.py new file mode 100644 index 000000000..d48810c8f --- /dev/null +++ b/dongtai_common/models/agent_properties.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2021/1/14 下午2:54 +# software: PyCharm +# project: dongtai-models +from django.db import models +from dongtai_common.utils.settings import get_managed + +from dongtai_common.models.agent import IastAgent + + +class IastAgentProperties(models.Model): + hook_type = models.IntegerField(blank=True, null=True) + dump_class = models.IntegerField(blank=True, null=True) + create_time = models.IntegerField(blank=True, null=True) + update_time = models.IntegerField(blank=True, null=True) + updated_by = models.IntegerField(blank=True, null=True) + agent = models.ForeignKey(IastAgent, models.DO_NOTHING, blank=True, null=True) + + class Meta: + managed = get_managed() + db_table = 'iast_agent_properties' diff --git a/dongtai_common/models/agent_thirdservice.py b/dongtai_common/models/agent_thirdservice.py new file mode 100644 index 000000000..1f5f9fc06 --- /dev/null +++ b/dongtai_common/models/agent_thirdservice.py @@ -0,0 +1,23 @@ +from django.db import models +from dongtai_common.utils.settings import get_managed +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.project import IastProject + + +class IastThirdPartyService(models.Model): + agent = models.ForeignKey(IastAgent, + on_delete=models.CASCADE, + db_constraint=False, + db_index=True, + db_column='agent_id') + project = models.ForeignKey(IastProject, + models.DO_NOTHING, + blank=True, + default=-1, + db_constraint=False) + address = models.CharField(max_length=255, blank=True, null=True) + service_type = models.CharField(max_length=255, blank=True, null=True) + port = models.CharField(max_length=255, blank=True, null=True) + class Meta: + managed = get_managed() + db_table = 'iast_third_party_service' diff --git a/dongtai_common/models/agent_webhook_setting.py b/dongtai_common/models/agent_webhook_setting.py new file mode 100644 index 000000000..d17e22c99 --- /dev/null +++ b/dongtai_common/models/agent_webhook_setting.py @@ -0,0 +1,18 @@ +from django.db import models +from dongtai_common.models import User +from dongtai_common.utils.settings import get_managed + + +# agent report static forward by type +class IastAgentUploadTypeUrl(models.Model): + user = models.ForeignKey(User, models.DO_NOTHING) + type_id = models.IntegerField(blank=True, null=True) + send_num = models.IntegerField(blank=True, null=True,default=0) + url = models.CharField(max_length=255, blank=True, null=True) + headers = models.JSONField() + + create_time = models.IntegerField(blank=True, null=True) + + class Meta: + managed = get_managed() + db_table = 'iast_agent_upload_type_url' diff --git a/dongtai_common/models/api_route.py b/dongtai_common/models/api_route.py new file mode 100644 index 000000000..1f74f5d99 --- /dev/null +++ b/dongtai_common/models/api_route.py @@ -0,0 +1,109 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : api_route +# @created : Tuesday Aug 17, 2021 17:43:27 CST +# +# @description : +###################################################################### + +from django.db import models +from dongtai_common.utils.settings import get_managed +from dongtai_common.models.agent import IastAgent + + +class HttpMethod(models.Model): + method = models.CharField(max_length=100, blank=True) + + class Meta: + managed = get_managed() + db_table = 'iast_http_method' + + +class IastApiMethod(models.Model): + method = models.CharField(max_length=100, blank=True) + http_method = models.ManyToManyField( + HttpMethod, blank=True, through='IastApiMethodHttpMethodRelation') + + class Meta: + managed = get_managed() + db_table = 'iast_api_methods' + + +class IastApiMethodHttpMethodRelation(models.Model): + api_method = models.ForeignKey(IastApiMethod, + on_delete=models.CASCADE, + db_constraint=False, + db_column='api_method_id') + http_method = models.ForeignKey(HttpMethod, + on_delete=models.CASCADE, + db_constraint=False, + db_column='http_method_id') + + class Meta: + managed = get_managed() + db_table = 'iast_http_method_relation' + unique_together = ['api_method_id', 'http_method_id'] +class FromWhereChoices(models.IntegerChoices): + FROM_AGENT = 1 + FROM_METHOD_POOL = 2 + + +class IastApiRoute(models.Model): + path = models.CharField(max_length=255, blank=True) + code_class = models.CharField(max_length=255, + blank=True, + db_column='code_class') + description = models.CharField(max_length=500, blank=True) + method = models.ForeignKey(IastApiMethod, + on_delete=models.DO_NOTHING, + db_constraint=False, + db_index=True, + db_column='method_id') + code_file = models.CharField(max_length=500, + blank=True, + db_column='code_file') + controller = models.CharField(max_length=100, blank=True) + agent = models.ForeignKey(IastAgent, + on_delete=models.CASCADE, + db_constraint=False, + db_index=True, + db_column='agent_id') + from_where = models.IntegerField(default=FromWhereChoices.FROM_AGENT, + choices=FromWhereChoices.choices) + class Meta: + managed = get_managed() + db_table = 'iast_api_route' + unique_together = ['path', 'method'] + + +class IastApiParameter(models.Model): + name = models.CharField(max_length=100, blank=True) + parameter_type = models.CharField(max_length=100, + blank=True, + default='', + db_column='type') + annotation = models.CharField(max_length=500, blank=True) + route = models.ForeignKey(IastApiRoute, + on_delete=models.CASCADE, + db_constraint=False, + db_index=True, + db_column='route_id') + + class Meta: + managed = get_managed() + db_table = 'iast_api_parameter' + unique_together = ['name', 'route_id'] + + +class IastApiResponse(models.Model): + return_type = models.CharField(max_length=100, blank=True) + route = models.ForeignKey(IastApiRoute, + on_delete=models.CASCADE, + db_constraint=False, + db_index=True, + db_column='route_id') + + class Meta: + managed = get_managed() + db_table = 'iast_api_response' + unique_together = ['return_type', 'route_id'] diff --git a/dongtai_common/models/application.py b/dongtai_common/models/application.py new file mode 100644 index 000000000..555796f2d --- /dev/null +++ b/dongtai_common/models/application.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/5/25 14:47 +# software: PyCharm +# project: dongtai-models +from django.db import models +from dongtai_common.utils.settings import get_managed + +from dongtai_common.models import User + + +class IastApplicationModel(models.Model): + user = models.ForeignKey(User, models.DO_NOTHING, blank=True, null=True) + name = models.CharField(max_length=255, blank=True, null=True) + path = models.CharField(max_length=255, blank=True, null=True) + status = models.CharField(max_length=255, blank=True, null=True) + server_id = models.IntegerField(blank=True, null=True) + vul_count = models.IntegerField(blank=True, null=True) + dt = models.IntegerField(blank=True, null=True) + + class Meta: + managed = get_managed() + db_table = 'iast_application' + unique_together = (('name', 'path'),) diff --git a/dongtai_common/models/aql_info.py b/dongtai_common/models/aql_info.py new file mode 100644 index 000000000..2fac9fd91 --- /dev/null +++ b/dongtai_common/models/aql_info.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/8/20 15:10 +# software: PyCharm +# project: dongtai-models + +from django.db import models +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.hook_type import HookType +from dongtai_common.models.vul_level import IastVulLevel +from dongtai_common.utils.settings import get_managed +from dongtai_web.dongtai_sca.models import VulPackage + + +class AqlInfo(models.Model): + + vul_title = models.CharField(max_length=255, blank=True, null=True) + safe_version = models.CharField(max_length=255, blank=True, null=True) + latest_version = models.CharField(max_length=255, blank=True, null=True) + source_license = models.CharField(max_length=255, blank=True, null=True) + aql = models.CharField(max_length=255, blank=True, null=True) + + availability = models.SmallIntegerField(blank=True, null=True) + license_risk = models.SmallIntegerField(blank=True, null=True) + vul_type_name = models.CharField(max_length=255, blank=True, null=True) + level = models.ForeignKey(IastVulLevel, models.DO_NOTHING, blank=True, null=True) + cve_relation_id = models.IntegerField(blank=True, null=True) + + + class Meta: + managed = get_managed() + db_table = 'iast_aql_info' diff --git a/dongtai_common/models/asset.py b/dongtai_common/models/asset.py new file mode 100644 index 000000000..3e9d0facf --- /dev/null +++ b/dongtai_common/models/asset.py @@ -0,0 +1,152 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/8/20 15:10 +# software: PyCharm +# project: dongtai-models + +from django.db import models +from django.utils.translation import gettext_lazy as _ + +from dongtai_common.models import User +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.project import IastProject +from dongtai_common.models.project_version import IastProjectVersion +from dongtai_common.models.vul_level import IastVulLevel +from dongtai_common.utils.settings import get_managed +from dongtai_common.models.department import Department +from dongtai_common.models.talent import Talent + + +class Asset(models.Model): + package_name = models.CharField(max_length=255, blank=True, null=True) + package_path = models.CharField(max_length=255, blank=True, null=True) + signature_algorithm = models.CharField(max_length=255, blank=True, null=True) + signature_value = models.CharField(max_length=255, blank=True, null=True) + dt = models.IntegerField(blank=True, null=True) + version = models.CharField(max_length=255, blank=True, null=True) + safe_version = models.CharField(max_length=255, blank=True, null=False, default='') + last_version = models.CharField(max_length=255, blank=True, null=False, default='') + level = models.ForeignKey(IastVulLevel, models.DO_NOTHING, blank=True, null=True, default=4) + vul_count = models.IntegerField(blank=True, null=True) + vul_critical_count = models.IntegerField(default=0, blank=True, null=False) + vul_high_count = models.IntegerField(default=0, blank=True, null=False) + vul_medium_count = models.IntegerField(default=0, blank=True, null=False) + vul_low_count = models.IntegerField(default=0, blank=True, null=False) + vul_info_count = models.IntegerField(default=0, blank=True, null=False) + agent = models.ForeignKey( + to=IastAgent, + on_delete=models.DO_NOTHING, + related_name='assets', + related_query_name='asset', + verbose_name=_('agent'), + blank=True, + null=True, + default=-1 + ) + project = models.ForeignKey(IastProject, on_delete=models.DO_NOTHING, blank=True, null=False, default=-1) + project_version = models.ForeignKey(IastProjectVersion, on_delete=models.DO_NOTHING, blank=True, null=False, + default=-1) + user = models.ForeignKey(User, models.DO_NOTHING, null=False, default=-1) + project_name = models.CharField(max_length=255, blank=True, null=False, default='') + language = models.CharField(max_length=32, blank=True, null=False, default='') + license = models.CharField(max_length=64, blank=True, null=False, default='') + dependency_level = models.IntegerField(null=False, default=0) + parent_dependency_id = models.IntegerField(blank=True, null=False, default=0) + is_del = models.SmallIntegerField(blank=True, null=False, default=0) + + # 部门id + department = models.ForeignKey(Department, models.DO_NOTHING, blank=True, null=True, default=-1) + # 租户id + talent = models.ForeignKey(Talent, models.DO_NOTHING, blank=True, null=True, default=-1) + safe_version_list = models.JSONField(blank=True, null=True, default=list) + nearest_safe_version = models.JSONField(blank=True, + null=True, + default=str) + latest_safe_version = models.JSONField(blank=True, null=True, default=str) + license_list = models.JSONField(blank=True, null=True, default=list) + highest_license = models.JSONField(blank=True, null=True, default=dict) + + class Meta: + managed = get_managed() + db_table = 'iast_asset' + + +from django_elasticsearch_dsl import Document +from django_elasticsearch_dsl.registries import registry +from dongtai_web.utils import get_model_field +from django.db.models.fields.related import ForeignKey +from django_elasticsearch_dsl import Document, fields +from dongtai_conf.settings import ASSET_INDEX +from django_elasticsearch_dsl.search import Search +from django.core.cache import cache +import uuid + +@registry.register_document +class IastAssetDocument(Document): + user_id = fields.IntegerField(attr="user_id") + agent_id = fields.IntegerField(attr="agent_id") + level_id = fields.IntegerField(attr="level_id") + project_id = fields.IntegerField(attr="project_id") + project_version_id = fields.IntegerField( + attr="project_version_id") + department_id = fields.IntegerField(attr="department_id") + talent_id = fields.IntegerField(attr="talent_id") + safe_version_list = fields.ObjectField() + nearest_safe_version = fields.ObjectField() + latest_safe_version = fields.ObjectField() + license_list = fields.ObjectField() + highest_license = fields.ObjectField() + + def generate_id(self, object_instance): + return object_instance.id + + def prepare_safe_version_list(self, object_instance): + return object_instance.safe_version_list + + def prepare_nearest_safe_version(self, object_instance): + return object_instance.nearest_safe_version + + def prepare_latest_safe_version(self, object_instance): + return object_instance.latest_safe_version + + def prepare_license_list(self, object_instance): + return object_instance.license_list + + def prepare_highest_license(self, object_instance): + return object_instance.highest_license + + @classmethod + def search(cls, using=None, index=None): + uuid_key = uuid.uuid4().hex + cache_uuid_key = cache.get_or_set( + f'es-documents-shards-{cls.__name__}', uuid_key, 60 * 1) + return Search(using=cls._get_using(using), + index=cls._default_index(index), + doc_type=[cls], + model=cls.django.model).params(preference=cache_uuid_key) + + def get_instances_from_related(self, related_instance): + """If related_models is set, define how to retrieve the Car instance(s) from the related model. + The related_models option should be used with caution because it can lead in the index + to the updating of a lot of items. + """ + if isinstance(related_instance, IastAgent): + if related_instance.bind_project_id < 0: + return Asset.objects.filter(agent_id=related_instance.pk).all() + + class Index: + name = ASSET_INDEX + + class Django: + model = Asset + fields = [ + 'id', 'package_name', 'package_path', 'signature_algorithm', + 'signature_value', 'dt', 'version', 'safe_version', 'last_version', + 'vul_count', 'vul_critical_count', 'vul_high_count', + 'vul_medium_count', 'vul_low_count', 'vul_info_count', + 'project_name', 'language', 'license', 'dependency_level', + 'parent_dependency_id', 'is_del' + ] + + ignore_signals = False diff --git a/dongtai_common/models/asset_aggr.py b/dongtai_common/models/asset_aggr.py new file mode 100644 index 000000000..d3ff6f733 --- /dev/null +++ b/dongtai_common/models/asset_aggr.py @@ -0,0 +1,56 @@ +# !usr/bin/env python +# coding:utf-8 +# @author:zhaoyanwei +# @file: asset_aggr.py +# @time: 2022/5/15 上午12:05 + +from django.db import models +from dongtai_common.models.vul_level import IastVulLevel +from dongtai_common.utils.settings import get_managed + + +class AssetAggr(models.Model): + package_name = models.CharField(max_length=255, blank=True, null=True) + signature_value = models.CharField(max_length=255, blank=True, null=True) + version = models.CharField(max_length=255, blank=True, null=True) + safe_version = models.CharField(max_length=255, blank=True, null=False, default='') + last_version = models.CharField(max_length=255, blank=True, null=False, default='') + level = models.ForeignKey(IastVulLevel, models.DO_NOTHING, blank=True, null=True) + vul_count = models.IntegerField(blank=True, null=True) + vul_critical_count = models.IntegerField(default=0, blank=True, null=False) + vul_high_count = models.IntegerField(default=0, blank=True, null=False) + vul_medium_count = models.IntegerField(default=0, blank=True, null=False) + vul_low_count = models.IntegerField(default=0, blank=True, null=False) + vul_info_count = models.IntegerField(default=0, blank=True, null=False) + project_count = models.IntegerField(blank=True, null=False, default=0) + language = models.CharField(max_length=32, blank=True, null=False, default='') + license = models.CharField(max_length=64, blank=True, null=False, default='') + is_del = models.SmallIntegerField(blank=True, null=False, default=0) + + class Meta: + managed = get_managed() + db_table = 'iast_asset_aggr' + + +from django_elasticsearch_dsl import Document +from django_elasticsearch_dsl.registries import registry +from django_elasticsearch_dsl import Document, fields +from dongtai_conf.settings import ASSET_AGGR_INDEX + + +@registry.register_document +class AssetAggrDocument(Document): + level_id = fields.IntegerField(attr="level_id") + + class Index: + name = ASSET_AGGR_INDEX + + class Django: + model = AssetAggr + + fields = [ + 'id', 'package_name', 'signature_value', 'version', 'safe_version', + 'last_version', 'vul_count', 'vul_critical_count', + 'vul_high_count', 'vul_medium_count', 'vul_low_count', + 'vul_info_count', 'project_count', 'language', 'license', 'is_del', + ] diff --git a/dongtai_common/models/asset_vul.py b/dongtai_common/models/asset_vul.py new file mode 100644 index 000000000..86bdf6962 --- /dev/null +++ b/dongtai_common/models/asset_vul.py @@ -0,0 +1,186 @@ +from dongtai_common.models.asset import Asset +from django.db import models +from dongtai_common.utils.settings import get_managed +from dongtai_common.models.vulnerablity import IastVulnerabilityStatus +from dongtai_common.models.vul_level import IastVulLevel + + +class IastAssetVul(models.Model): + vul_name = models.CharField(max_length=255, blank=True, null=True) + vul_detail = models.TextField(blank=True, null=True) + # 漏洞类型等级 + level = models.ForeignKey(IastVulLevel, + models.DO_NOTHING, + blank=True, + null=True) + license = models.CharField(max_length=50, blank=True, null=True) + # 开源许可证 风险等级 # 1 高 2中 3低 0无风险 + license_level = models.SmallIntegerField(blank=True, null=True) + aql = models.CharField(max_length=100, blank=True, null=True) + package_name = models.CharField(max_length=100, blank=True, null=True) + package_hash = models.CharField(max_length=100, blank=True, null=True) + package_version = models.CharField(max_length=50, blank=True, null=True) + package_safe_version = models.CharField(max_length=50, blank=True, null=True) + package_latest_version = models.CharField(max_length=50, blank=True, null=True) + package_language = models.CharField(max_length=10, blank=True, null=True) + vul_cve_nums = models.JSONField(blank=True, null=True) + vul_serial = models.CharField(max_length=100, blank=True, null=True) # 漏洞编号 CWE|CVE等数据 + have_article = models.SmallIntegerField(blank=True, null=True) + have_poc = models.SmallIntegerField(blank=True, null=True) + cve_code = models.CharField(max_length=64, blank=True, null=True) + sid = models.CharField(max_length=64, blank=True, null=True) + cve_id = models.IntegerField(blank=True, null=True) + vul_publish_time = models.DateTimeField(blank=True, null=True) + vul_update_time = models.DateTimeField(blank=True, null=True) + update_time = models.IntegerField(blank=True, null=True) + update_time_desc = models.IntegerField(blank=True, null=True) + create_time = models.IntegerField(blank=True, null=True) + fix_plan = models.JSONField(blank=True, null=True, default=dict) + poc = models.JSONField(blank=True, null=True, default=dict) + descriptions = models.JSONField(blank=True, null=True, default=dict) + references = models.JSONField(blank=True, null=True, default=dict) + + class Meta: + managed = True + db_table = 'iast_asset_vul' + + +class IastVulAssetRelation(models.Model): + asset_vul = models.ForeignKey(IastAssetVul, + on_delete=models.CASCADE, + db_constraint=False, + db_column='asset_vul_id') + asset = models.ForeignKey(Asset, + on_delete=models.CASCADE, + db_constraint=False, + db_column='asset_id') + status = models.ForeignKey(IastVulnerabilityStatus, + on_delete=models.DO_NOTHING, + db_constraint=False, + db_column='status_id') + is_del = models.SmallIntegerField(blank=True, null=False, default=0) + create_time = models.IntegerField(blank=True, null=True) + vul_dependency_path = models.JSONField(blank=True, + null=True, + default=list) + effected_version_list = models.JSONField(blank=True, + null=True, + default=list) + fixed_version_list = models.JSONField(blank=True, + null=True, + default=list) + nearest_fixed_version = models.JSONField(blank=True, + null=True, + default=dict) + + class Meta: + managed = get_managed() + db_table = 'iast_asset_vul_relation' + unique_together = ['asset_vul_id', 'asset_id'] + + +class IastAssetVulType(models.Model): + cwe_id = models.CharField(max_length=20, blank=True, null=True, default='') + name = models.CharField(max_length=100, blank=True, null=True, default='') + + class Meta: + managed = get_managed() + db_table = 'iast_asset_vul_type' + + +class IastAssetVulTypeRelation(models.Model): + asset_vul = models.ForeignKey(IastAssetVul, + on_delete=models.CASCADE, + db_constraint=False, + db_column='asset_vul_id') + asset_vul_type = models.ForeignKey(IastAssetVulType, + on_delete=models.CASCADE, + db_constraint=False, + db_column='asset_vul_type_id') + + class Meta: + managed = get_managed() + db_table = 'iast_asset_vul_type_relation' + + + # "iast_asset_vul_type_relation iast_asset_vul_relation iast_asset_vul iast_asset " +from django_elasticsearch_dsl.registries import registry +from django_elasticsearch_dsl import Document, fields +from dongtai_conf.settings import ASSET_VUL_INDEX +from django_elasticsearch_dsl.search import Search +from django.core.cache import cache +from dongtai_common.models.agent import IastAgent +import uuid + + +@registry.register_document +class IastAssetVulnerabilityDocument(Document): + # from asset_vul table + vul_name = fields.TextField(attr="asset_vul.vul_name") + vul_detail = fields.TextField(attr="asset_vul.vul_detail") + license = fields.TextField(attr="asset_vul.license") + license_level = fields.IntegerField(attr="asset_vul.license_level") + aql = fields.TextField(attr="asset_vul.aql") + package_hash = fields.TextField(attr="asset_vul.package_hash") + package_version = fields.TextField(attr="asset_vul.package_version") + package_safe_version = fields.TextField(attr="asset_vul.package_safe_version") + package_latest_version = fields.TextField(attr="asset_vul.package_latest_version") + package_language = fields.TextField(attr="asset_vul.package_language") + vul_cve_nums = fields.TextField(attr="asset_vul.vul_cve_nums") + vul_serial = fields.TextField(attr="asset_vul.vul_serial") + cve_code = fields.TextField(attr="asset_vul.cve_code") + have_article = fields.IntegerField(attr="asset_vul.have_article") + have_poc = fields.IntegerField(attr="asset_vul.have_poc") + cve_id = fields.IntegerField(attr="asset_vul.cve_id") + update_time = fields.IntegerField(attr="asset_vul.update_time") + create_time = fields.IntegerField(attr="asset_vul.create_time") + update_time_desc = fields.IntegerField(attr="asset_vul.update_time_desc") + vul_publish_time = fields.DateField(attr="asset_vul.vul_publish_time") + vul_update_time = fields.DateField(attr="asset_vul.vul_update_time") + level_id = fields.IntegerField(attr="asset_vul.level_id") + + # from asset_vul_relation + asset_vul_relation_id = fields.IntegerField(attr="id") + asset_vul_id = fields.IntegerField(attr="asset_vul_id") + asset_vul_relation_is_del = fields.IntegerField(attr="is_del") + + # from asset + asset_user_id = fields.IntegerField(attr="asset.user_id") + asset_agent_id = fields.IntegerField(attr="asset.agent_id") + asset_project_id = fields.IntegerField(attr="asset.project_id") + asset_project_version_id = fields.IntegerField(attr="asset.project_version_id") + + def prepare_vul_cve_nums(self, instance): + import json + return json.dumps(instance.asset_vul.vul_cve_nums) + + def generate_id(self, object_instance): + return object_instance.id + + def get_instances_from_related(self, related_instance): + """If related_models is set, define how to retrieve the Car instance(s) from the related model. + The related_models option should be used with caution because it can lead in the index + to the updating of a lot of items. + """ + if isinstance(related_instance, IastAgent): + if related_instance.bind_project_id < 0: + return IastVulAssetRelation.objects.filter( + asset__agent__id=related_instance.pk).all() + + @classmethod + def search(cls, using=None, index=None): + uuid_key = uuid.uuid4().hex + cache_uuid_key = cache.get_or_set( + f'es-documents-shards-{cls.__name__}', uuid_key, 60 * 1) + return Search(using=cls._get_using(using), + index=cls._default_index(index), + doc_type=[cls], + model=cls.django.model).params(preference=cache_uuid_key) + + class Index: + name = ASSET_VUL_INDEX + + class Django: + model = IastVulAssetRelation + ignore_signals = False + auto_refresh = False diff --git a/dongtai_common/models/asset_vul_relation.py b/dongtai_common/models/asset_vul_relation.py new file mode 100644 index 000000000..550e02bf5 --- /dev/null +++ b/dongtai_common/models/asset_vul_relation.py @@ -0,0 +1,81 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/8/20 15:10 +# software: PyCharm +# project: dongtai-models + +from django.db import models +from dongtai_common.models.project import IastProject +from dongtai_common.models.project_version import IastProjectVersion +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.user import User +from dongtai_common.models.talent import Talent +from dongtai_common.models.department import Department +from dongtai_common.utils.settings import get_managed +from dongtai_web.dongtai_sca.models import VulPackage +from dongtai_common.models.aql_info import AqlInfo + + +class AssetVulRelation(models.Model): + hash = models.CharField(max_length=255, blank=True, null=True) + create_time = models.IntegerField(blank=True, null=True) + is_del = models.SmallIntegerField(blank=True, null=True) + talent = models.ForeignKey( + to=Talent, + on_delete=models.DO_NOTHING, + blank=True, + null=True + ) + + department = models.ForeignKey( + to=Department, + on_delete=models.DO_NOTHING, + blank=True, + null=True + ) + + + user = models.ForeignKey( + to=User, + on_delete=models.DO_NOTHING, + blank=True, + null=True + ) + + project_version = models.ForeignKey( + to=IastProjectVersion, + on_delete=models.DO_NOTHING, + blank=True, + null=True + ) + + project = models.ForeignKey( + to=IastProject, + on_delete=models.DO_NOTHING, + blank=True, + null=True + ) + + agent = models.ForeignKey( + to=IastAgent, + on_delete=models.DO_NOTHING, + blank=True, + null=True + ) + vul_package = models.ForeignKey( + to=VulPackage, + on_delete=models.DO_NOTHING, + blank=True, + null=True + ) + aql_info = models.ForeignKey( + to=AqlInfo, + on_delete=models.DO_NOTHING, + blank=True, + null=True + ) + + class Meta: + managed = get_managed() + db_table = 'iast_asset_vul_relation' diff --git a/dongtai_common/models/authorization.py b/dongtai_common/models/authorization.py new file mode 100644 index 000000000..01449053e --- /dev/null +++ b/dongtai_common/models/authorization.py @@ -0,0 +1,20 @@ +#!/usr/bin/env python +#-*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/5/25 14:48 +# software: PyCharm +# project: dongtai-models +from django.db import models +from dongtai_common.utils.settings import get_managed + + +class IastAuthorization(models.Model): + user_id = models.IntegerField(blank=True, null=True) + token = models.CharField(max_length=50, blank=True, null=True) + view_name = models.CharField(max_length=255, blank=True, null=True) + state = models.CharField(max_length=20, blank=True, null=True) + dt = models.IntegerField(blank=True, null=True) + + class Meta: + managed = get_managed() + db_table = 'iast_authorization' diff --git a/dongtai_common/models/department.py b/dongtai_common/models/department.py new file mode 100644 index 000000000..85f7c0249 --- /dev/null +++ b/dongtai_common/models/department.py @@ -0,0 +1,74 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/11/27 下午4:31 +# software: PyCharm +# project: dongtai-models +import time + +from django.db import models +from django.utils.translation import gettext_lazy as _ + +from dongtai_common.models.talent import Talent +from dongtai_common.utils.settings import get_managed +from dongtai_common.utils.customfields import trans_char_field +from typing import Any + +class IastDepartment(models.Model): + name = models.CharField(max_length=255, blank=True, null=True) + + class Meta: + managed = get_managed() + db_table = 'iast_department' + + +class PermissionsMixin(models.Model): + talent = models.ManyToManyField( + Talent, + verbose_name=_('talent'), + blank=True, + help_text=_( + 'The talent this department belongs to. A department will get all permissions ' + 'granted to each of their talent.' + ), + related_name="departments", + related_query_name="talent", + ) + + class Meta: + abstract = True + + +class Department(PermissionsMixin): + name = models.CharField( + _('name'), + unique=True, + max_length=100, + error_messages={ + 'unique': _("A department with that department name already exists."), + }, + ) + create_time = models.IntegerField(_('create time'), default=int(time.time()), blank=True) + update_time = models.IntegerField(_('update time'), default=int(time.time()), blank=True) + created_by = models.IntegerField(_('created by'), blank=True) + parent_id = models.IntegerField(_('parent id'), blank=True) + principal_id = models.IntegerField(default=0, blank=True) + + class Meta: + managed = get_managed() + db_table = 'auth_department' + + def get_department_name(self): + return self.name + + + @trans_char_field('name', { + 'zh': { + '默认部门': '默认部门' + }, + 'en': { + '默认部门': 'default department' + } + }) + def __getattribute__(self, name) -> Any: + return super().__getattribute__(name) diff --git a/dongtai_common/models/deploy.py b/dongtai_common/models/deploy.py new file mode 100644 index 000000000..8a1de8ca7 --- /dev/null +++ b/dongtai_common/models/deploy.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/6/3 11:36 +# software: PyCharm +# project: dongtai-models + +from django.db import models +from dongtai_common.utils.settings import get_managed + +class IastDeployDesc(models.Model): + desc = models.TextField(blank=True, null=True) + middleware = models.CharField(max_length=255, blank=True, null=True) + language = models.CharField(max_length=255, blank=True, null=True) + + class Meta: + managed = get_managed() + db_table = 'iast_deploy' diff --git a/dongtai_common/models/document.py b/dongtai_common/models/document.py new file mode 100644 index 000000000..523f11531 --- /dev/null +++ b/dongtai_common/models/document.py @@ -0,0 +1,12 @@ +from django.db import models +from dongtai_common.utils.settings import get_managed + +class IastDocument(models.Model): + title = models.CharField(max_length=100, blank=True, null=True) + url = models.CharField(max_length=2000, blank=True, null=True) + language = models.CharField(max_length=100, blank=True, null=True) + weight = models.IntegerField(default=0) + + class Meta: + managed = get_managed() + db_table = 'iast_document' diff --git a/dongtai_common/models/engine_heartbeat.py b/dongtai_common/models/engine_heartbeat.py new file mode 100644 index 000000000..abac64541 --- /dev/null +++ b/dongtai_common/models/engine_heartbeat.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/5/6 下午5:56 +# project: dongtai-models +from django.db import models +from dongtai_common.utils.settings import get_managed + + +class IastEngineHeartbeat(models.Model): + client_ip = models.CharField(max_length=255, blank=True, null=True) + status = models.CharField(max_length=255, blank=True, null=True) + msg = models.CharField(max_length=255, blank=True, null=True) + agentcount = models.IntegerField(db_column='agentCount', blank=True, null=True) # Field name made lowercase. + reqcount = models.BigIntegerField(db_column='reqCount', blank=True, null=True) # Field name made lowercase. + agentenablecount = models.IntegerField(db_column='agentEnableCount', blank=True, + null=True) # Field name made lowercase. + projectcount = models.IntegerField(db_column='projectCount', blank=True, null=True) # Field name made lowercase. + usercount = models.IntegerField(db_column='userCount', blank=True, null=True) # Field name made lowercase. + vulcount = models.IntegerField(db_column='vulCount', blank=True, null=True) # Field name made lowercase. + methodpoolcount = models.IntegerField(db_column='methodPoolCount', blank=True, + null=True) # Field name made lowercase. + timestamp = models.IntegerField(blank=True, null=True) + + class Meta: + managed = get_managed() + db_table = 'iast_engine_heartbeat' diff --git a/dongtai_common/models/engine_monitoring_indicators.py b/dongtai_common/models/engine_monitoring_indicators.py new file mode 100644 index 000000000..c33b26174 --- /dev/null +++ b/dongtai_common/models/engine_monitoring_indicators.py @@ -0,0 +1,23 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : engine_monitoring_indicators +# @created : Wednesday Aug 25, 2021 14:51:16 CST +# +# @description : +###################################################################### + +from django.db import models +from dongtai_common.utils.settings import get_managed + + +class IastEnginMonitoringIndicators(models.Model): + key = models.CharField(max_length=100, + blank=True, + default='', + null=False, + unique=True) + name = models.CharField(max_length=100, blank=True, default='', null=False) + + class Meta: + managed = get_managed() + db_table = 'engine_monitoring_indicators' diff --git a/dongtai_common/models/errorlog.py b/dongtai_common/models/errorlog.py new file mode 100644 index 000000000..06a394652 --- /dev/null +++ b/dongtai_common/models/errorlog.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/5/22 18:21 +# software: PyCharm +# project: dongtai-models +from django.db import models + +from dongtai_common.models.agent import IastAgent +from dongtai_common.utils.settings import get_managed + + +class IastErrorlog(models.Model): + errorlog = models.TextField(blank=True, null=True) + state = models.CharField(max_length=50, blank=True, null=True) + dt = models.IntegerField(blank=True, null=True) + agent = models.ForeignKey(IastAgent, models.DO_NOTHING, blank=True, null=True) + + class Meta: + managed = get_managed() + db_table = 'iast_errorlog' diff --git a/dongtai_common/models/group_routes.py b/dongtai_common/models/group_routes.py new file mode 100644 index 000000000..71b5b3ec3 --- /dev/null +++ b/dongtai_common/models/group_routes.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/5/10 下午2:24 +# project: dongtai-models +from django.contrib.auth.models import Group +from django.db import models + +from dongtai_common.models import User +from dongtai_common.utils.settings import get_managed + + +class AuthGroupRoutes(models.Model): + is_active = models.IntegerField(blank=True, null=True) + routes = models.JSONField(blank=True, null=True) + group = models.ForeignKey(Group, models.DO_NOTHING, blank=True, null=True) + created_by = models.ForeignKey(User, models.DO_NOTHING, blank=True, null=True) + create_time = models.IntegerField(blank=True, null=True) + update_time = models.IntegerField(blank=True, null=True) + + class Meta: + managed = get_managed() + db_table = 'auth_group_routes' diff --git a/dongtai_common/models/heartbeat.py b/dongtai_common/models/heartbeat.py new file mode 100644 index 000000000..ff3c9a098 --- /dev/null +++ b/dongtai_common/models/heartbeat.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/5/22 18:21 +# software: PyCharm +# project: dongtai-models + +from django.db import models +from django.utils.translation import gettext_lazy as _ + +from dongtai_common.models.agent import IastAgent +from dongtai_common.utils.settings import get_managed + + +class IastHeartbeat(models.Model): + memory = models.CharField(max_length=1000, blank=True, null=True) + cpu = models.CharField(max_length=1000, blank=True, null=True) + disk = models.CharField(max_length=1000, blank=True, null=True) + req_count = models.IntegerField(blank=True, null=True) + dt = models.IntegerField(blank=True, null=True) + report_queue = models.PositiveIntegerField(default=0, + null=False, + blank=False) + method_queue = models.PositiveIntegerField(default=0, + null=False, + blank=False) + replay_queue = models.PositiveIntegerField(default=0, + null=False, + blank=False) + + agent = models.ForeignKey( + to=IastAgent, + on_delete=models.DO_NOTHING, + related_name='heartbeats', + related_query_name='heartbeat', + verbose_name=_('agent'), + blank=True, + null=True + ) + + class Meta: + managed = get_managed() + db_table = 'iast_heartbeat' diff --git a/dongtai_common/models/hook_strategy.py b/dongtai_common/models/hook_strategy.py new file mode 100644 index 000000000..77ad8249e --- /dev/null +++ b/dongtai_common/models/hook_strategy.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2021/1/13 下午7:14 +# software: PyCharm +# project: dongtai-models +from django.db import models +from django.utils.translation import gettext_lazy as _ + +from dongtai_common.models.hook_type import HookType +from dongtai_common.utils.settings import get_managed + + +class PermissionsMixin(models.Model): + type = models.ManyToManyField( + HookType, + verbose_name=_('type'), + blank=True, + help_text=_( + 'The department this user belongs to. A user will get all permissions ' + 'granted to each of their department.' + ), + related_name="strategies", + related_query_name="strategy", + ) + + class Meta: + abstract = True + + +class HookStrategy(PermissionsMixin): + value = models.CharField(max_length=255, blank=True, null=True) + source = models.CharField(max_length=255, blank=True, null=True) + target = models.CharField(max_length=255, blank=True, null=True) + inherit = models.CharField(max_length=255, blank=True, null=True) + track = models.CharField(max_length=5, blank=True, null=True) + create_time = models.IntegerField(blank=True, null=True) + update_time = models.IntegerField(blank=True, null=True) + created_by = models.IntegerField(blank=True, null=True, ) + enable = models.IntegerField(blank=True, null=False, default=1) + + class Meta: + managed = get_managed() + db_table = 'iast_hook_strategy' diff --git a/dongtai_common/models/hook_type.py b/dongtai_common/models/hook_type.py new file mode 100644 index 000000000..adf2aab97 --- /dev/null +++ b/dongtai_common/models/hook_type.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2021/1/13 下午6:38 +# software: PyCharm +# project: dongtai-models +from django.db import models +from dongtai_common.utils.settings import get_managed +from dongtai_common.models.program_language import IastProgramLanguage + +class HookType(models.Model): + type = models.IntegerField(blank=True, null=True) + name = models.CharField(max_length=255, blank=True, null=True) + value = models.CharField(max_length=255, blank=True, null=True) + enable = models.IntegerField(blank=True, null=True) + create_time = models.IntegerField(blank=True, null=True) + update_time = models.IntegerField(blank=True, null=True) + created_by = models.IntegerField(blank=True, null=True) + language = models.ForeignKey(IastProgramLanguage, + blank=True, + default='', + on_delete=models.DO_NOTHING, + db_constraint=False) + vul_strategy = models.ForeignKey('dongtai_common.IastStrategyModel', + blank=True, + default='', + on_delete=models.DO_NOTHING, + db_column='strategy_id', + db_constraint=False,) + class Meta: + managed = get_managed() + db_table = 'iast_hook_type' diff --git a/dongtai_common/models/iast_overpower_user.py b/dongtai_common/models/iast_overpower_user.py new file mode 100644 index 000000000..550bcbeed --- /dev/null +++ b/dongtai_common/models/iast_overpower_user.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/10/30 15:46 +# software: PyCharm +# project: dongtai-models +from django.db import models +from dongtai_common.utils.settings import get_managed + + + +class IastOverpowerUserAuth(models.Model): + server_name = models.CharField(max_length=255, blank=True, null=True) + server_port = models.CharField(max_length=5, blank=True, null=True) + app_name = models.CharField(max_length=50, blank=True, null=True) + http_url = models.CharField(max_length=255, blank=True, null=True) + http_query_string = models.CharField(max_length=2000, blank=True, null=True) + auth_sql = models.CharField(max_length=255, blank=True, null=True) + auth_value = models.CharField(max_length=1000, blank=True, null=True) + jdbc_class = models.CharField(max_length=255, blank=True, null=True) + created_time = models.DateTimeField(blank=True, null=True) + updated_time = models.DateTimeField(blank=True, null=True) + + class Meta: + managed = get_managed() + db_table = 'iast_user_auth' diff --git a/dongtai_common/models/iast_vul_log.py b/dongtai_common/models/iast_vul_log.py new file mode 100644 index 000000000..621578dc2 --- /dev/null +++ b/dongtai_common/models/iast_vul_log.py @@ -0,0 +1,37 @@ +from django.db import models + +from dongtai_common.models.agent import IastAgent +from dongtai_common.utils.settings import get_managed +from django.db.models import IntegerChoices +from dongtai_common.models.user import User +from dongtai_common.models.asset import Asset +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from time import time +from dongtai_common.models.asset_vul import IastAssetVul, IastVulAssetRelation + + +class MessageTypeChoices(IntegerChoices): + CHANGE_STATUS = 1 + VUL_RECHECK = 2 + PUSH_TO_INTEGRATION = 3 + VUL_FOUND = 4 + + +class IastVulLog(models.Model): + msg_type = models.IntegerField(blank=True, null=True) + msg = models.TextField(blank=True, null=True) + meta_data = models.JSONField(blank=True, null=True) + datetime = models.IntegerField(blank=True, null=True, default=time()) + vul = models.ForeignKey(IastVulnerabilityModel, + models.DO_NOTHING, + default=-1, + db_constraint=False) + asset_vul = models.ForeignKey(IastAssetVul, + models.DO_NOTHING, + default=-1, + db_constraint=False) + user = models.ForeignKey(User, models.DO_NOTHING, db_constraint=False) + + class Meta: + managed = get_managed() + db_table = 'iast_vul_log' diff --git a/dongtai_common/models/iast_vul_overpower.py b/dongtai_common/models/iast_vul_overpower.py new file mode 100644 index 000000000..5928deb63 --- /dev/null +++ b/dongtai_common/models/iast_vul_overpower.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/10/31 11:38 +# software: PyCharm +# project: dongtai-models +from django.db import models + +from dongtai_common.models.agent import IastAgent + +from dongtai_common.utils.settings import get_managed + + +class IastVulOverpower(models.Model): + agent = models.ForeignKey(IastAgent, models.DO_NOTHING, blank=True, null=True) + http_url = models.CharField(max_length=2000, blank=True, null=True) + http_uri = models.CharField(max_length=2000, blank=True, null=True) + http_query_string = models.CharField(max_length=2000, blank=True, null=True) + http_method = models.CharField(max_length=10, blank=True, null=True) + http_scheme = models.CharField(max_length=255, blank=True, null=True) + http_protocol = models.CharField(max_length=255, blank=True, null=True) + http_header = models.CharField(max_length=2000, blank=True, null=True) + x_trace_id = models.CharField(max_length=255, blank=True, null=True) + cookie = models.CharField(max_length=2000, blank=True, null=True) + sql = models.CharField(max_length=2000, blank=True, null=True) + created_time = models.DateTimeField(blank=True, null=True) + updated_time = models.DateTimeField(blank=True, null=True) + + class Meta: + managed = get_managed() + db_table = 'iast_vul_overpower' diff --git a/dongtai_common/models/message.py b/dongtai_common/models/message.py new file mode 100644 index 000000000..1f7430c4d --- /dev/null +++ b/dongtai_common/models/message.py @@ -0,0 +1,42 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : message +# @created : 星期三 10月 13, 2021 12:09:11 CST +# +# @description : +###################################################################### + + +from django.db import models +from dongtai_common.utils.settings import get_managed +from time import time + +class IastMessageType(models.Model): + name = models.CharField(max_length=100, blank=True, null=False, default='') + + class Meta: + managed = get_managed() + db_table = 'iast_message_type' + + +class IastMessage(models.Model): + message = models.CharField(max_length=512, + blank=True, + null=False, + default='') + relative_url = models.CharField(max_length=512, + blank=True, + null=False, + default='') + create_time = models.IntegerField(blank=True, default=time()) + read_time = models.IntegerField(blank=True, default=0) + is_read = models.IntegerField(blank=True, null=True, default=0) + message_type = models.ForeignKey(IastMessageType, + on_delete=models.DO_NOTHING, + db_constraint=False, + db_column='message_type_id') + to_user_id = models.IntegerField(default=0) + + class Meta: + managed = get_managed() + db_table = 'iast_message' diff --git a/dongtai_common/models/notify_config.py b/dongtai_common/models/notify_config.py new file mode 100644 index 000000000..f6ddb90b0 --- /dev/null +++ b/dongtai_common/models/notify_config.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/5/6 下午2:34 +# project: dongtai-models +from django.db import models + +from dongtai_common.models import User +from dongtai_common.utils.settings import get_managed + +WEB_HOOK = 1 +GITLAB = 2 +JIRA = 3 +ZENDAO = 4 +FEISHU = 5 +WEIXIN = 6 +DING_DING = 7 + + + +NOTIFY_TYPE_CHOICES = ( + (WEB_HOOK, WEB_HOOK), + (GITLAB, GITLAB), + (JIRA, JIRA), + (ZENDAO, ZENDAO), + (FEISHU, FEISHU), + (WEIXIN, WEIXIN), + (DING_DING, DING_DING), +) + + +class IastNotifyConfig(models.Model): + WEB_HOOK = WEB_HOOK + DING_DING = DING_DING + FEISHU = FEISHU + WEIXIN = WEIXIN + NOTIFY_TYPE_CHOICES = NOTIFY_TYPE_CHOICES + + notify_type = models.SmallIntegerField(blank=True, null=True, choices=NOTIFY_TYPE_CHOICES) + notify_meta_data = models.TextField(blank=True, null=True) # This field type is a guess. + user = models.ForeignKey(to=User, on_delete=models.DO_NOTHING, blank=True, null=True) + test_result = models.SmallIntegerField(blank=True, null=True,default=0) + create_time = models.IntegerField(blank=True, null=True) + + class Meta: + managed = get_managed() + db_table = 'iast_notify_config' diff --git a/dongtai_common/models/profile.py b/dongtai_common/models/profile.py new file mode 100644 index 000000000..c7dbed7db --- /dev/null +++ b/dongtai_common/models/profile.py @@ -0,0 +1,9 @@ +from django.db import models +from dongtai_common.utils.settings import get_managed + +class IastProfile(models.Model): + key = models.CharField(max_length=100) + value = models.CharField(max_length=100, blank=True, null=True) + class Meta: + managed = get_managed() + db_table = 'iast_profile' diff --git a/dongtai_common/models/program_language.py b/dongtai_common/models/program_language.py new file mode 100644 index 000000000..638086f65 --- /dev/null +++ b/dongtai_common/models/program_language.py @@ -0,0 +1,19 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : program_language +# @created : Tuesday Sep 07, 2021 10:21:33 CST +# +# @description : +###################################################################### + + + +from django.db import models +from dongtai_common.utils.settings import get_managed + +class IastProgramLanguage(models.Model): + name = models.CharField(max_length=255, blank=True) + + class Meta: + managed = get_managed() + db_table = 'iast_program_language' diff --git a/dongtai_common/models/project.py b/dongtai_common/models/project.py new file mode 100644 index 000000000..f59fd2718 --- /dev/null +++ b/dongtai_common/models/project.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/11/30 下午5:32 +# software: PyCharm +# project: dongtai-models +from django.db import models + +from dongtai_common.models import User +from dongtai_common.models.strategy_user import IastStrategyUser +from dongtai_common.utils.settings import get_managed +import time + + +class VulValidation(models.IntegerChoices): + FOLLOW_GLOBAL = 0 + ENABLE = 1 + DISABLE = 2 + __empty__ = 0 + + +class IastProject(models.Model): + name = models.CharField(max_length=255, blank=True, null=True) + mode = models.CharField(max_length=255, blank=True, null=True) + vul_count = models.PositiveIntegerField(blank=True, null=True) + agent_count = models.IntegerField(blank=True, null=True) + latest_time = models.IntegerField(blank=True, null=True) + user = models.ForeignKey(User, models.DO_NOTHING, blank=True, null=True) + # openapi服务不必使用该字段 + scan = models.ForeignKey(IastStrategyUser, + models.DO_NOTHING, + blank=True, + null=True) + + + vul_validation = models.IntegerField(default=0, + blank=True, + null=False, + choices=VulValidation.choices) + base_url = models.CharField(max_length=255, blank=True, default='') + test_req_header_key = models.CharField(max_length=511, + blank=True, + default='') + test_req_header_value = models.CharField(max_length=511, + blank=True, + default='') + + class Meta: + managed = get_managed() + db_table = 'iast_project' + + def update_latest(self): + self.latest_time = int(time.time()) + self.save(update_fields=['latest_time']) diff --git a/dongtai_common/models/project_report.py b/dongtai_common/models/project_report.py new file mode 100644 index 000000000..45cc32eb0 --- /dev/null +++ b/dongtai_common/models/project_report.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:luzhongyang +# datetime:2021/10/29 下午5:29 +# software: PyCharm +# project: dongtai-models +from django.db import models +from django.utils.translation import gettext_lazy as _ + +from dongtai_common.models import User +from dongtai_common.models.project import IastProject +from dongtai_common.models.server import IastServer +from dongtai_common.utils.settings import get_managed + +ORDER_TYPE_REPORT = { + "1":"create_time", + "2":"status" +} + +class ProjectReport(models.Model): + user = models.ForeignKey(User, models.DO_NOTHING) + project = models.ForeignKey(IastProject, models.DO_NOTHING, blank=True, null=True) + type = models.CharField(max_length=10, blank=True, null=True) + language = models.CharField(max_length=10, blank=True, null=True) + status = models.IntegerField(default=0, null=False) + path = models.CharField(default='', max_length=255, blank=True, null=True) + file = models.BinaryField(blank=True, null=True) + create_time = models.IntegerField(default=0, null=False) + is_del = models.SmallIntegerField(default=0, null=False) + level_png = models.CharField(default='', max_length=255, blank=True, null=True) + trend_png = models.CharField(default='', max_length=255, blank=True, null=True) + version_str = models.CharField(default='', max_length=255, blank=True, null=True) + vul_type_str = models.CharField(default='', max_length=255, blank=True, null=True) + sca_type_str = models.TextField(default='', blank=True, null=True) + vul_id = models.IntegerField(blank=True, null=True, default=0) + report_name = models.CharField(default='', max_length=255, blank=True, null=True) + + class Meta: + managed = get_managed() + db_table = 'iast_project_report' diff --git a/dongtai_common/models/project_version.py b/dongtai_common/models/project_version.py new file mode 100644 index 000000000..ca813b8b7 --- /dev/null +++ b/dongtai_common/models/project_version.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:sjh +# datetime:2021/06/08 下午5:32 +# software: PyCharm +# project: dongtai-models +import time +from django.db import models +from dongtai_common.models import User +from dongtai_common.models.project import IastProject +from django.utils.translation import gettext_lazy as _ +from dongtai_common.utils.settings import get_managed + + +class IastProjectVersion(models.Model): + version_name = models.CharField(max_length=255, blank=True, null=True) + description = models.TextField(blank=True, null=True) + current_version = models.PositiveSmallIntegerField(blank=True, default=0) + status = models.PositiveSmallIntegerField(blank=True, null=True) + create_time = models.IntegerField(_('create time'), default=int(time.time()), blank=True) + update_time = models.IntegerField(_('update time'), default=int(time.time()), blank=True) + user = models.ForeignKey(User, models.DO_NOTHING, blank=True, null=True) + project = models.ForeignKey(IastProject, models.DO_NOTHING, blank=True, null=True) + + class Meta: + managed = get_managed() + db_table = 'iast_project_version' diff --git a/dongtai_common/models/replay_method_pool.py b/dongtai_common/models/replay_method_pool.py new file mode 100644 index 000000000..c78968ec7 --- /dev/null +++ b/dongtai_common/models/replay_method_pool.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/7/3 下午2:34 +# project: dongtai-engine + +from django.db import models + +from dongtai_common.models.agent import IastAgent +from dongtai_common.utils.settings import get_managed + + +class IastAgentMethodPoolReplay(models.Model): + agent = models.ForeignKey(IastAgent, models.DO_NOTHING, blank=True, null=True) + url = models.CharField(max_length=2000, blank=True, null=True) + uri = models.CharField(max_length=2000, blank=True, null=True) + http_method = models.CharField(max_length=10, blank=True, null=True) + http_scheme = models.CharField(max_length=20, blank=True, null=True) + http_protocol = models.CharField(max_length=255, blank=True, null=True) + req_header = models.CharField(max_length=2000, blank=True, null=True) + req_params = models.CharField(max_length=2000, blank=True, null=True) + req_data = models.CharField(max_length=4000, blank=True, null=True) + res_header = models.CharField(max_length=1000, blank=True, null=True) + res_body = models.CharField(max_length=1000, blank=True, null=True) + context_path = models.CharField(max_length=255, blank=True, null=True) + method_pool = models.TextField(blank=True, null=True) # This field type is a guess. + clent_ip = models.CharField(max_length=255, blank=True, null=True) + create_time = models.IntegerField(blank=True, null=True) + update_time = models.IntegerField(blank=True, null=True) + replay_id = models.IntegerField(blank=True, null=True) + replay_type = models.IntegerField(blank=True, null=True) + relation_id = models.IntegerField(blank=True, null=True) + + class Meta: + managed = get_managed() + db_table = 'iast_agent_method_pool_replay' diff --git a/dongtai_common/models/replay_queue.py b/dongtai_common/models/replay_queue.py new file mode 100644 index 000000000..161afe52b --- /dev/null +++ b/dongtai_common/models/replay_queue.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/7/1 下午3:02 +# project: dongtai-engine +from django.db import models + +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.utils.settings import get_managed +from dongtai_common.models.vul_recheck_payload import IastVulRecheckPayload + +class IastReplayQueue(models.Model): + agent = models.ForeignKey(IastAgent, models.DO_NOTHING, blank=True, null=True) + relation_id = models.IntegerField(blank=True, null=True) + state = models.IntegerField(blank=True, null=True) + count = models.IntegerField(blank=True, null=True) + result = models.IntegerField(blank=True, null=True) + create_time = models.IntegerField(blank=True, null=True) + update_time = models.IntegerField(blank=True, null=True) + verify_time = models.IntegerField(blank=True, null=True) + uri = models.CharField(max_length=2000, blank=True, null=True) + method = models.CharField(max_length=10, blank=True, null=True) + scheme = models.CharField(max_length=10, blank=True, null=True) + header = models.CharField(max_length=4000, blank=True, null=True) + params = models.CharField(max_length=2000, blank=True, null=True) + body = models.CharField(max_length=4000, blank=True, null=True) + replay_type = models.IntegerField(blank=True, null=True) + payload = models.ForeignKey(IastVulRecheckPayload, + models.DO_NOTHING, + blank=True, + null=True, + default=-1) + + class Meta: + managed = get_managed() + db_table = 'iast_replay_queue' + ordering = ('-replay_type',) diff --git a/dongtai_common/models/res_header.py b/dongtai_common/models/res_header.py new file mode 100644 index 000000000..5e33e3fe2 --- /dev/null +++ b/dongtai_common/models/res_header.py @@ -0,0 +1,31 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : res_header +# @created : 星期三 1月 12, 2022 16:49:40 CST +# +# @description : +###################################################################### + + +from django.db import models +from dongtai_common.utils.settings import get_managed +from dongtai_common.models.agent import IastAgent + + +class HeaderType(models.IntegerChoices): + REQUEST = 1 + RESPONSE = 2 + + +class ProjectSaasMethodPoolHeader(models.Model): + key = models.CharField(max_length=255, blank=True, null=False) + agent = models.ForeignKey(IastAgent, + models.DO_NOTHING, + blank=True, + null=True, + db_constraint=False) + header_type = models.IntegerField(choices=HeaderType.choices,default=0) + + class Meta: + managed = get_managed() + db_table = 'iast_project_header' diff --git a/dongtai_common/models/sca_maven_db.py b/dongtai_common/models/sca_maven_db.py new file mode 100644 index 000000000..0feb15904 --- /dev/null +++ b/dongtai_common/models/sca_maven_db.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/8/26 16:01 +# software: PyCharm +# project: dongtai-models +from django.db import models +from dongtai_common.utils.settings import get_managed + + +class ImportFrom(models.IntegerChoices): + SYSTEM = 1 + USER = 2 + __empty__ = 2 + + +class ScaMavenDb(models.Model): + group_id = models.CharField(max_length=255, blank=True, null=True) + atrifact_id = models.CharField(max_length=255, blank=True, null=True) + version = models.CharField(max_length=255, blank=True, null=True) + sha_1 = models.CharField(unique=True, + max_length=255, + blank=True, + null=True) + package_name = models.CharField(max_length=255, blank=True, null=True) + aql = models.CharField(max_length=255, blank=True, null=True) + license = models.CharField(max_length=255, blank=True, null=True) + import_from = models.IntegerField(choices=ImportFrom.choices, + default=ImportFrom.USER) + + class Meta: + managed = get_managed() + db_table = 'sca_maven_db' diff --git a/dongtai_common/models/sensitive_info.py b/dongtai_common/models/sensitive_info.py new file mode 100644 index 000000000..37d429949 --- /dev/null +++ b/dongtai_common/models/sensitive_info.py @@ -0,0 +1,31 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : sensitive_info +# @created : 星期五 11月 19, 2021 11:02:19 CST +# +# @description : +###################################################################### + +from django.db import models +from dongtai_common.models.strategy import IastStrategyModel +from dongtai_common.models.user import User +import time + + +class IastPatternType(models.Model): + name = models.CharField(blank=True,default=None,max_length=255) + id = models.IntegerField(default=0,db_column='value') + logi_id = models.BigAutoField(primary_key=True,db_column='id') + class Meta: + db_table = 'iast_pattern_type' + +class IastSensitiveInfoRule(models.Model): + user = models.ForeignKey(User, models.DO_NOTHING, blank=True, null=True) + strategy = models.ForeignKey(IastStrategyModel, models.DO_NOTHING, blank=True, null=True) + pattern_type = models.ForeignKey(IastPatternType,models.DO_NOTHING,blank=True,default=None) + pattern = models.CharField(blank=True,default=None,max_length=255) + status = models.IntegerField(blank=True,default=None) + latest_time = models.IntegerField(default=time.time(),blank=True, null=True) + + class Meta: + db_table = 'iast_sensitive_info_rule' diff --git a/dongtai_common/models/server.py b/dongtai_common/models/server.py new file mode 100644 index 000000000..69eeaeb6a --- /dev/null +++ b/dongtai_common/models/server.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/5/25 14:47 +# software: PyCharm +# project: dongtai-models +from django.db import models +from dongtai_common.utils.settings import get_managed + + +class IastServer(models.Model): + hostname = models.CharField(max_length=255, blank=True, null=True) + ip = models.CharField(max_length=255, blank=True, null=True) + port = models.IntegerField(blank=True, null=True) + environment = models.TextField(blank=True, null=True) + path = models.CharField(max_length=255, blank=True, null=True) + status = models.CharField(max_length=255, blank=True, null=True) + container = models.CharField(max_length=255, blank=True, null=True) + container_path = models.CharField(max_length=255, blank=True, null=True) + cluster_name = models.CharField(max_length=255, blank=True, null=True) + cluster_version = models.CharField(max_length=100, blank=True, null=True) + command = models.TextField(blank=True, null=True) + env = models.CharField(max_length=255, blank=True, null=True) + runtime = models.CharField(max_length=255, blank=True, null=True) + create_time = models.IntegerField(blank=True, null=True) + update_time = models.IntegerField(blank=True, null=True) + network = models.CharField(max_length=255, blank=True, null=True) + protocol = models.CharField(max_length=255, + blank=True, + null=True, + default='') + pid = models.IntegerField(blank=True, null=True) + + class Meta: + managed = get_managed() + db_table = 'iast_server' diff --git a/dongtai_common/models/strategy.py b/dongtai_common/models/strategy.py new file mode 100644 index 000000000..54cf21363 --- /dev/null +++ b/dongtai_common/models/strategy.py @@ -0,0 +1,22 @@ +from django.db import models + +from dongtai_common.models import User +from dongtai_common.models.hook_type import HookType +from dongtai_common.models.vul_level import IastVulLevel +from dongtai_common.utils.settings import get_managed + + +class IastStrategyModel(models.Model): + user = models.ForeignKey(User, models.DO_NOTHING, blank=True, null=True) + vul_type = models.CharField(max_length=255, blank=True, null=True) + level = models.ForeignKey(IastVulLevel, models.DO_NOTHING, blank=True, null=True) + state = models.CharField(max_length=255, blank=True, null=True) + dt = models.IntegerField(blank=True, null=True) + vul_name = models.CharField(max_length=255, blank=True, null=True) + vul_desc = models.TextField(blank=True, null=True) + vul_fix = models.TextField(blank=True, null=True) + hook_type = models.ForeignKey(HookType, models.DO_NOTHING, blank=True, null=True) + + class Meta: + managed = get_managed() + db_table = 'iast_strategy' diff --git a/dongtai_common/models/strategy_user.py b/dongtai_common/models/strategy_user.py new file mode 100644 index 000000000..9fbe90111 --- /dev/null +++ b/dongtai_common/models/strategy_user.py @@ -0,0 +1,17 @@ +from django.db import models + +from dongtai_common.models import User +from dongtai_common.utils.settings import get_managed + + +class IastStrategyUser(models.Model): + id = models.BigAutoField(primary_key=True) + name = models.CharField(max_length=200, blank=True, null=True) + content = models.TextField(blank=True, null=True) + user = models.ForeignKey(User, models.DO_NOTHING, blank=True, null=True) + status = models.IntegerField(blank=True, null=True) + created_at = models.DateTimeField(verbose_name="创建时间", auto_now_add=True) + + class Meta: + managed = get_managed() + db_table = 'iast_strategy_user' diff --git a/dongtai_common/models/system.py b/dongtai_common/models/system.py new file mode 100644 index 000000000..af68f3d01 --- /dev/null +++ b/dongtai_common/models/system.py @@ -0,0 +1,20 @@ +from django.db import models + +from dongtai_common.models import User +from dongtai_common.utils.settings import get_managed + + +class IastSystem(models.Model): + id = models.BigAutoField(primary_key=True) + agent_value = models.CharField(max_length=50, blank=True, null=True) + java_version = models.CharField(max_length=50, blank=True, null=True) + middleware = models.CharField(max_length=50, blank=True, null=True) + system = models.CharField(max_length=50, blank=True, null=True) + deploy_status = models.IntegerField(blank=True, null=True) + created_at = models.DateTimeField(auto_now_add=True) + update_at = models.DateTimeField(blank=True, null=True) + user = models.ForeignKey(User, models.DO_NOTHING, blank=True, null=True) + + class Meta: + managed = get_managed() + db_table = 'iast_system' diff --git a/dongtai_common/models/talent.py b/dongtai_common/models/talent.py new file mode 100644 index 000000000..b80ed55a0 --- /dev/null +++ b/dongtai_common/models/talent.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2021/1/18 下午12:54 +# software: PyCharm +# project: dongtai-models +from django.db import models +from django.utils.translation import gettext_lazy as _ +from dongtai_common.utils.settings import get_managed +from dongtai_common.utils.customfields import trans_char_field +from typing import Any + + +class Talent(models.Model): + talent_name = models.CharField( + unique=True, + verbose_name=_('talent'), + max_length=255, + blank=True, + error_messages={ + 'unique': _("A talent with that talent name already exists."), + }, + ) + create_time = models.IntegerField(blank=True, null=True) + update_time = models.IntegerField(blank=True, null=True) + created_by = models.IntegerField(blank=True, null=True) + is_active = models.BooleanField( + _('active'), + default=True, + help_text=_( + 'Designates whether this user should be treated as active. ' + 'Unselect this instead of deleting accounts.'), + ) + + class Meta: + verbose_name = _('talent') + managed = get_managed() + db_table = 'auth_talent' + + def get_talent_name(self): + return self.talent_name + + @trans_char_field('talent_name', { + 'zh': { + "默认租户": "默认租户" + }, + "en": { + '默认租户': "Default Tenant" + } + }) + def __getattribute__(self, name) -> Any: + return super().__getattribute__(name) diff --git a/dongtai_common/models/user.py b/dongtai_common/models/user.py new file mode 100644 index 000000000..01cac38cd --- /dev/null +++ b/dongtai_common/models/user.py @@ -0,0 +1,83 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2021/1/25 下午6:43 +# software: PyCharm +# project: dongtai-models + +from django.contrib.auth.models import AbstractUser, UserManager +from django.db import models +from django.utils.translation import gettext_lazy as _ + +from dongtai_common.models.department import Department +from dongtai_common.utils.settings import get_managed + + +class PermissionsMixin(models.Model): + department = models.ManyToManyField( + Department, + verbose_name=_('department'), + blank=True, + help_text=_( + 'The department this user belongs to. A user will get all permissions ' + 'granted to each of their department.' + ), + related_name="users", + related_query_name="user", + ) + + class Meta: + abstract = True + + +class SaaSUserManager(UserManager): + def create_user(self, username, email=None, password=None, **extra_fields): + extra_fields.setdefault('is_staff', False) + extra_fields.setdefault('is_superuser', 0) + return self._create_user(username, email, password, **extra_fields) + + def create_talent_user(self, username, email=None, password=None, **extra_fields): + extra_fields.setdefault('is_staff', True) + extra_fields.setdefault('is_superuser', 2) + + if extra_fields.get('is_staff') is not True: + raise ValueError('Superuser must have is_staff=True.') + if extra_fields.get('is_superuser') != 2: + raise ValueError('Superuser must have is_superuser=True.') + + return self._create_user(username, email, password, **extra_fields) + + def create_system_user(self, username, email=None, password=None, **extra_fields): + extra_fields.setdefault('is_staff', True) + extra_fields.setdefault('is_superuser', 1) + + if extra_fields.get('is_staff') is not True: + raise ValueError('Superuser must have is_staff=True.') + if extra_fields.get('is_superuser') != 1: + raise ValueError('Superuser must have is_superuser=True.') + + return self._create_user(username, email, password, **extra_fields) + + +class User(AbstractUser, PermissionsMixin): + is_superuser = models.IntegerField(default=0) + phone = models.CharField(max_length=15) + default_language = models.CharField(max_length=15) + objects = SaaSUserManager() + + class Meta(AbstractUser.Meta): + db_table = 'auth_user' + + def is_talent_admin(self): + return self.is_superuser == 2 or self.is_superuser == 1 + + def is_system_admin(self): + return self.is_superuser == 1 + + def get_talent(self): + department = self.department.get() if self else None + talent = department.talent.get() if department else None + return talent + + def get_department(self): + return self.department.get() diff --git a/dongtai_common/models/version_control.py b/dongtai_common/models/version_control.py new file mode 100644 index 000000000..a96600b1f --- /dev/null +++ b/dongtai_common/models/version_control.py @@ -0,0 +1,25 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : version_control +# @created : 星期四 1月 20, 2022 17:32:43 CST +# +# @description : +###################################################################### + + +import time +from django.db import models +from dongtai_common.utils.settings import get_managed + + +class VersionControl(models.Model): + version = models.CharField(max_length=255, blank=True, null=True) + component_name = models.CharField(max_length=255, blank=True, null=True) + component_version_hash = models.CharField(max_length=255, blank=True, null=True) + additional = models.CharField(max_length=255, blank=True, null=True) + update_time = models.IntegerField(default=int(time.time()), blank=True) + + + class Meta: + managed = get_managed() + db_table = 'project_version_control' diff --git a/dongtai_common/models/vul_level.py b/dongtai_common/models/vul_level.py new file mode 100644 index 000000000..1558034ca --- /dev/null +++ b/dongtai_common/models/vul_level.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/12/4 上午11:54 +# software: PyCharm +# project: dongtai-models +from django.db import models +from dongtai_common.utils.settings import get_managed + + +class IastVulLevel(models.Model): + name = models.CharField(max_length=255, blank=True, null=True) + name_value = models.CharField(max_length=255, blank=True, null=True) + name_type = models.CharField(max_length=255, blank=True, null=True) + + class Meta: + managed = get_managed() + db_table = 'iast_vul_level' diff --git a/dongtai_common/models/vul_recheck_payload.py b/dongtai_common/models/vul_recheck_payload.py new file mode 100644 index 000000000..7abe1605f --- /dev/null +++ b/dongtai_common/models/vul_recheck_payload.py @@ -0,0 +1,22 @@ +from django.db import models +from dongtai_common.models import User +from dongtai_common.utils.settings import get_managed +from time import time +from django.db.models import IntegerChoices +from django.utils.translation import gettext_lazy as _ +from dongtai_common.models.strategy import IastStrategyModel + + +class IastVulRecheckPayload(models.Model): + user = models.ForeignKey(User, models.DO_NOTHING, blank=True, null=True) + strategy = models.ForeignKey(IastStrategyModel, + models.DO_NOTHING, + blank=True, + null=True) + value = models.CharField(blank=True, default=None, max_length=255) + status = models.IntegerField(blank=True, default=None) + create_time = models.IntegerField(default=time(), blank=True, null=True) + language_id = models.IntegerField(blank=True, default=0) + + class Meta: + db_table = 'iast_vul_recheck_payload' diff --git a/dongtai_common/models/vul_rule.py b/dongtai_common/models/vul_rule.py new file mode 100644 index 000000000..d64c7175e --- /dev/null +++ b/dongtai_common/models/vul_rule.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2021/2/19 下午3:04 +# software: PyCharm +# project: dongtai-models +from django.db import models +from dongtai_common.utils.settings import get_managed + + +class IastVulRule(models.Model): + rule_name = models.CharField(max_length=255, blank=True, null=True) + rule_level = models.CharField(max_length=10, blank=True, null=True) + rule_msg = models.CharField(max_length=255, blank=True, null=True) + rule_value = models.TextField(blank=True, null=True) # This field type is a guess. + is_enable = models.IntegerField(blank=True, null=True) + is_system = models.IntegerField(blank=True, null=True) + create_by = models.IntegerField(blank=True, null=True) + create_time = models.IntegerField(blank=True, null=True) + update_time = models.IntegerField(blank=True, null=True) + + class Meta: + managed = get_managed() + db_table = 'iast_vul_rule' diff --git a/dongtai_common/models/vulnerablity.py b/dongtai_common/models/vulnerablity.py new file mode 100644 index 000000000..98b2a6563 --- /dev/null +++ b/dongtai_common/models/vulnerablity.py @@ -0,0 +1,142 @@ +from django.db import models +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.vul_level import IastVulLevel +from dongtai_common.utils.settings import get_managed +from dongtai_common.models.strategy import IastStrategyModel +from dongtai_common.models.hook_type import HookType +class IastVulnerabilityStatus(models.Model): + name = models.CharField(max_length=100, blank=True, default='') + + class Meta: + managed = get_managed() + db_table = 'iast_vulnerability_status' + +class IastVulnerabilityModel(models.Model): + search_keywords = models.CharField(max_length=1000, blank=True, null=True) + level = models.ForeignKey(IastVulLevel, models.DO_NOTHING, blank=True, null=True) + url = models.CharField(max_length=2000, blank=True, null=True) + uri = models.CharField(max_length=255, blank=True, null=True) + pattern_uri = models.CharField(max_length=255, blank=True, null=True) + # 模糊搜索 全文索引 查询 + vul_title = models.CharField(max_length=255, blank=True, null=True,default="") + http_method = models.CharField(max_length=10, blank=True, null=True) + http_scheme = models.CharField(max_length=255, blank=True, null=True) + http_protocol = models.CharField(max_length=255, blank=True, null=True) + req_header = models.TextField(blank=True, null=True) + req_params = models.CharField(max_length=2000, blank=True, null=True, default="") + req_data = models.TextField(blank=True, null=True) + res_header = models.TextField(blank=True, null=True) + res_body = models.TextField(blank=True, null=True) + full_stack = models.TextField(blank=True, null=True) + top_stack = models.CharField(max_length=255, blank=True, null=True) + bottom_stack = models.CharField(max_length=255, blank=True, null=True) + taint_value = models.CharField(max_length=255, blank=True, null=True) + taint_position = models.CharField(max_length=255, blank=True, null=True) + agent = models.ForeignKey(IastAgent, models.DO_NOTHING, blank=True, null=True) + context_path = models.CharField(max_length=255, blank=True, null=True) + counts = models.IntegerField(blank=True, null=True) + first_time = models.IntegerField(blank=True, null=True) + latest_time = models.IntegerField(blank=True, null=True) + latest_time_desc = models.IntegerField(blank=True, null=True, default=0) + level_id_desc = models.SmallIntegerField(blank=True, null=True, default=0) + client_ip = models.CharField(max_length=255, blank=True, null=True) + param_name = models.CharField(max_length=255, blank=True, null=True, default='') + is_del = models.SmallIntegerField(blank=True, null=True,default=0) + method_pool_id = models.IntegerField(default=-1, blank=True, null=True) + strategy = models.ForeignKey(IastStrategyModel, + on_delete=models.DO_NOTHING, + db_constraint=False, + db_column='strategy_id') + hook_type = models.ForeignKey(HookType, + on_delete=models.DO_NOTHING, + db_constraint=False, + db_column='hook_type_id') + status = models.ForeignKey(IastVulnerabilityStatus, + on_delete=models.DO_NOTHING, + db_constraint=False, + db_column='status_id') + + class Meta: + managed = get_managed() + db_table = 'iast_vulnerability' + + def save(self, *args, **kwargs): + key_works = [ + # self.uri, + # self.http_method, + # self.http_scheme, + # self.http_protocol, + # self.top_stack, + # self.bottom_stack, + self.strategy.vul_type, + self.strategy.vul_name, + ] + if not self.pattern_uri: + self.pattern_uri = self.pattern_uri + self.search_keywords = " ".join(key_works) + self.latest_time_desc = -int(self.latest_time) + self.level_id_desc = -int(self.level_id) + super(IastVulnerabilityModel, self).save(*args, **kwargs) + + +from django_elasticsearch_dsl.registries import registry +from django_elasticsearch_dsl import Document, fields +from dongtai_conf.settings import VULNERABILITY_INDEX +from django_elasticsearch_dsl.search import Search +from django.core.cache import cache +import uuid + + +@registry.register_document +class IastVulnerabilityDocument(Document): + user_id = fields.IntegerField(attr="agent.user_id") + agent_id = fields.IntegerField(attr="agent_id") + strategy_id = fields.IntegerField(attr="strategy_id") + hook_type_id = fields.IntegerField(attr="hook_type_id") + status_id = fields.IntegerField(attr="status_id") + level_id = fields.IntegerField(attr="level_id") + bind_project_id = fields.IntegerField(attr="agent.bind_project_id") + language = fields.IntegerField(attr="agent.language") + project_version_id = fields.IntegerField( + attr="agent.project_version_id") + project_name = fields.IntegerField(attr="agent.bind_project.name") + token = fields.IntegerField(attr="agent.token") + + + @classmethod + def search(cls, using=None, index=None): + uuid_key = uuid.uuid4().hex + cache_uuid_key = cache.get_or_set( + f'es-documents-shards-{cls.__name__}', uuid_key, 60 * 1) + return Search(using=cls._get_using(using), + index=cls._default_index(index), + doc_type=[cls], + model=cls.django.model).params(preference=cache_uuid_key) + + def get_instances_from_related(self, related_instance): + """If related_models is set, define how to retrieve the Car instance(s) from the related model. + The related_models option should be used with caution because it can lead in the index + to the updating of a lot of items. + """ + if isinstance(related_instance, IastAgent): + if related_instance.bind_project_id < 0: + return related_instance.iastvulnerabilitymodel_set.all() + + + class Index: + name = VULNERABILITY_INDEX + + class Django: + model = IastVulnerabilityModel + fields = [ + 'id', 'search_keywords', 'url', 'uri', 'vul_title', 'http_method', + 'http_scheme', 'http_protocol', 'req_header', 'req_params', + 'req_data', 'res_header', 'res_body', 'full_stack', 'top_stack', + 'bottom_stack', 'taint_value', 'taint_position', 'context_path', + 'counts', 'first_time', 'latest_time', 'latest_time_desc', + 'level_id_desc', 'client_ip', 'param_name', 'is_del', + 'method_pool_id' + ] + auto_refresh = False + + ignore_signals = False diff --git a/dongtai_common/permissions/__init__.py b/dongtai_common/permissions/__init__.py new file mode 100644 index 000000000..0e1033e03 --- /dev/null +++ b/dongtai_common/permissions/__init__.py @@ -0,0 +1,81 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/7/21 下午4:04 +# project: dongtai-engine + +from rest_framework import permissions + + +class ScopedPermission(permissions.BasePermission): + """ + Permissions work depending on the type of authentication: + + - A user inherits permissions based on their membership role. These are + still dictated as common scopes, but they can't be checked until the + has_object_permission hook is called. + - ProjectKeys (legacy) are granted only project based scopes. This + - APIKeys specify their scope, and work as expected. + """ + + scope_map = {"HEAD": (), "GET": (), "POST": (), "PUT": (), "PATCH": (), "DELETE": ()} + + def has_permission(self, request, view): + # session-based auth has all scopes for a logged in user + if not getattr(request, "auth", None): + return request.user.is_authenticated() + + allowed_scopes = set(self.scope_map.get(request.method, [])) + current_scopes = request.auth.get_scopes() + return any(s in allowed_scopes for s in current_scopes) + + def has_object_permission(self, request, view, obj): + return False + + +class UserPermission(ScopedPermission): + """ + 用户权限验证类,验证是否为有效用户 + """ + + def has_permission(self, request, view): + user = request.user + if user is not None and user.is_active: + return True + return False + + def has_object_permission(self, request, view, obj): + print('enter has object permission') + return super().has_object_permission(request, view, obj) + + +class TalentAdminPermission(ScopedPermission): + """ + 租户管理员权限验证类 + """ + + def has_permission(self, request, view): + user = request.user + if user is not None and user.is_active and user.is_talent_admin(): + return True + return False + + def has_object_permission(self, request, view, obj): + print('enter has object permission') + return super().has_object_permission(request, view, obj) + + +class SystemAdminPermission(ScopedPermission): + """ + 系统管理员权限验证类 + """ + + def has_permission(self, request, view): + user = request.user + if user is not None and user.is_active and user.is_system_admin(): + return True + return False + + def has_object_permission(self, request, view, obj): + print('enter has object permission') + return super().has_object_permission(request, view, obj) diff --git a/dongtai_common/permissions/user_auth.py b/dongtai_common/permissions/user_auth.py new file mode 100644 index 000000000..11b29f3bc --- /dev/null +++ b/dongtai_common/permissions/user_auth.py @@ -0,0 +1,8 @@ +# 通过当前用户id,筛选出更高级管理员用户id +from dongtai_common.models.user import User + + +def super_of_cur_user(user_id): + cur_user = User.objects.filter(pk=user_id).first() + + return [] diff --git a/dongtai_common/translation.py b/dongtai_common/translation.py new file mode 100644 index 000000000..c0fb92b78 --- /dev/null +++ b/dongtai_common/translation.py @@ -0,0 +1,54 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : translation +# @created : Friday Aug 13, 2021 14:31:38 CST +# +# @description : +###################################################################### + + +from modeltranslation.translator import translator, TranslationOptions, register +from dongtai_common.models.strategy import IastStrategyModel +from dongtai_common.models.vul_level import IastVulLevel +from dongtai_common.models.vulnerablity import IastVulnerabilityStatus +from dongtai_common.models.deploy import IastDeployDesc +from dongtai_common.models.document import IastDocument +from dongtai_common.models.hook_type import HookType +from dongtai_common.models.department import Department +from dongtai_common.models.talent import Talent +from dongtai_common.models.engine_monitoring_indicators import IastEnginMonitoringIndicators + + +@register(IastStrategyModel) +class IastStrategyModelTranslationOptions(TranslationOptions): + fields = ('vul_name', 'vul_desc', 'vul_fix') + + +@register(IastVulLevel) +class IastVulLevelTranslationOptions(TranslationOptions): + fields = ('name_value', 'name_type') + + +@register(IastDeployDesc) +class IastDeployDescTranslationOptions(TranslationOptions): + fields = ('desc',) + + +@register(IastDocument) +class IastDocumentTranslationOptions(TranslationOptions): + fields = ('title', 'url') + + +@register(HookType) +class HookTypeTranslationOptions(TranslationOptions): + fields = ('name', ) + + +@register(IastEnginMonitoringIndicators) +class IastEnginMonitoringIndicatorsOptions(TranslationOptions): + fields = ('name', ) + + +@register(IastVulnerabilityStatus) +class IastVulnerabilityStatusOptions(TranslationOptions): + fields = ('name', ) diff --git a/AgentServer/urls.py b/dongtai_common/urls.py similarity index 78% rename from AgentServer/urls.py rename to dongtai_common/urls.py index 3fb71065c..c1c3efa6e 100644 --- a/AgentServer/urls.py +++ b/dongtai_common/urls.py @@ -1,4 +1,4 @@ -"""AgentServer URL Configuration +"""webapi URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/3.0/topics/http/urls/ @@ -13,9 +13,11 @@ 1. Import the include() function: from django.urls import include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) """ -from django.contrib import admin -from django.urls import path, include +from django.conf.urls.static import static +from django.urls import include, path +import os -urlpatterns = [ - path('api/v1/', include('apiserver.urls')), -] + + + +urlpatterns = [] diff --git a/dongtai_common/utils/__init__.py b/dongtai_common/utils/__init__.py new file mode 100644 index 000000000..7f132d18d --- /dev/null +++ b/dongtai_common/utils/__init__.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/7/12 下午5:49 +# project: dongtai-engine + +from . import const \ No newline at end of file diff --git a/dongtai_common/utils/const.py b/dongtai_common/utils/const.py new file mode 100644 index 000000000..9a37bcd87 --- /dev/null +++ b/dongtai_common/utils/const.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/7/12 下午5:52 + +# report +REPORT_HEART_BEAT = 0x01 +REPORT_SCA = 0x11 +REPORT_VULN_NORNAL = 0x21 +REPORT_VULN_DYNAMIC = 0x22 +REPORT_VULN_OVER_POWER = 0x23 +REPORT_VULN_SAAS_POOL = 0x24 +REPORT_VULN_HARDCODE = 0x25 + +REPORT_AUTH_ADD = 0x31 +REPORT_AUTH_UPDATE = 0x32 +REPORT_ERROR_LOG = 0x51 +REPORT_API_ROUTE = 0x61 +REPORT_THIRD_PARTY_SERVICE = 0x81 +REPORT_FILE_PATH = 0x82 + + +# strategy +STRATEGY_ENABLE = 'enable' +STRATEGY_DISABLE = 'disable' + +RUNNING = 1 + +# 定义Agent运行状态 +CORE_IS_RUNNING = 1 +CORE_NOT_RUNNING = 0 + +# 定义重放类型 +VUL_REPLAY = 1 +REQUEST_REPLAY = 2 +API_REPLAY = 3 + +# 定义重放数据类型 +PENDING = 0 +WAITING = 1 +SOLVED = 2 +SOLVING = 3 +DISCARD = 4 + +# 定义漏洞验证结果 +RECHECK_ERROR = 2 +RECHECK_TRUE = 1 +RECHECK_FALSE = 0 +RECHECK_DISCARD = 3 + +# 定义漏洞状态 +VUL_WAITING = '待验证' +VUL_VERIFY = '验证中' +VUL_TRUE = '已确认' +VUL_FALSE = '已忽略' + +# hook strategy type +HOOK_TYPE_ENABLE = 1 +HOOK_TYPE_DISABLE = 0 + +USER_BUGENV = 'dt-range' + +SYSTEM_USER_ID = 1 + +# 定义规则状态 +ENABLE = 1 +DISABLE = 0 +DELETE = -1 + +# 定义规则类型 +RULE_PROPAGATOR = 1 +RULE_SOURCE = 2 +RULE_FILTER = 3 +RULE_SINK = 4 +RULE_ENTRY_POINT = 5 + +# 定义规则对应的用户 +RULE_USER = 'user' +RULE_SYSTEM = 'system' +RULE_IS_SYSTEM = 1 +RULE_IS_ENABLE = 1 + +# 限制每页的最大数量 +MAX_PAGE_SIZE = 50 + +VUL_PENDING = 1 +VUL_VERIFYING = 2 +VUL_CONFIRMED = 3 +VUL_IGNORE = 4 +VUL_SOLVED = 5 diff --git a/dongtai_common/utils/customfields.py b/dongtai_common/utils/customfields.py new file mode 100644 index 000000000..9d2458602 --- /dev/null +++ b/dongtai_common/utils/customfields.py @@ -0,0 +1,36 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : customfields +# @created : Friday Aug 13, 2021 17:34:31 CST +# +# @description : +###################################################################### + +from django.db.models import CharField +from functools import wraps +from django.utils.translation import get_language +from collections import defaultdict + + +def trans_char_field(field, transdict): + def wrapper(func): + @wraps(func) + def wrapped(*args, **kwargs): + value = func(*args, **kwargs) + try: + if len(args) > 1: + name = args[1] + else: + name = kwargs['name'] + except BaseException as e: + print(e) + return value + res = [ + v[value] for k, v in transdict.items() if name == field + and k == get_language() and v.get(value, None) + ] + return res[0] if res else value + + return wrapped + + return wrapper diff --git a/dongtai_common/utils/db.py b/dongtai_common/utils/db.py new file mode 100644 index 000000000..8e1b75218 --- /dev/null +++ b/dongtai_common/utils/db.py @@ -0,0 +1,50 @@ + +from django.db import models +import copy +from django.db.models import Expression + + +class SearchLanguageMode(Expression): + template = "MATCH( %(expressions)s ) AGAINST ( '+%(search_keyword)s' IN NATURAL LANGUAGE MODE )" + + def __init__(self, expressions, search_keyword): + super().__init__(output_field=models.IntegerField()) + self.search_keyword = search_keyword + for expression in expressions: + if not hasattr(expression, 'resolve_expression'): + raise TypeError('%r is not an Expression' % expression) + self.expressions = expressions + + def resolve_expression(self, + query=None, + allow_joins=True, + reuse=None, + summarize=False, + for_save=False): + c = self.copy() + c.is_summary = summarize + for pos, expression in enumerate(self.expressions): + c.expressions[pos] = expression.resolve_expression( + query, allow_joins, reuse, summarize, for_save) + return c + + def as_sql(self, compiler, connection, template=None): + sql_expressions, sql_params = [], [] + for expression in self.expressions: + sql, params = compiler.compile(expression) + sql_expressions.append(sql) + sql_params.extend(params) + template = template or self.template + data = { + 'expressions': ','.join(sql_expressions), + 'search_keyword': self.search_keyword + } + return template % data, sql_params + + def get_source_expressions(self): + return self.expressions + + def set_source_expressions(self, expressions): + self.expressions = expressions + + diff --git a/dongtai_common/utils/es.py b/dongtai_common/utils/es.py new file mode 100644 index 000000000..6f0375c91 --- /dev/null +++ b/dongtai_common/utils/es.py @@ -0,0 +1,92 @@ +from celery import shared_task +from django.apps import apps +from django.db import transaction +from django_elasticsearch_dsl.registries import registry +from django_elasticsearch_dsl.signals import RealTimeSignalProcessor +from django_redis import get_redis_connection +from django.core.cache import cache +from celery.apps.worker import logger +from dongtai_conf.settings import DONGTAI_MAX_RATE_LIMIT +from dongtai_conf.settings import DONGTAI_REDIS_ES_UPDATE_BATCH_SIZE +from dongtai_conf.settings import DONGTAI_MAX_BATCH_TASK_CONCORRENCY +from django_elasticsearch_dsl.registries import registry + + +@shared_task +def handle_save(pk, app_label, model_name): + logger.info(f'handle_save to es: {model_name} pk: {pk}') + sender = apps.get_model(app_label, model_name) + instance = sender.objects.get(pk=pk) + registry.update(instance) + registry.update_related(instance) + rate_limit_key = f"batch-save-rate-limit-{app_label}-{model_name}-rate_limit" + cache.decr(rate_limit_key) + + +@shared_task +def handle_batch_save(app_label, model_name): + logger.info(f'handle batch save to es: {model_name} app: {app_label}') + list_key = f"batch-save-list{app_label}-{model_name}-task" + rate_limit_key = f"batch-save-rate-limit-{app_label}-{model_name}-rate_limit" + batch_task_count_key = f"batch-save-task-count{app_label}-{model_name}-batch-task-count" + con = get_redis_connection() + pipe = con.pipeline() + pipe.multi() + model_ids, status = pipe.lrange(list_key, 0, + DONGTAI_REDIS_ES_UPDATE_BATCH_SIZE).ltrim( + list_key, + DONGTAI_REDIS_ES_UPDATE_BATCH_SIZE, + -1).execute() + logger.info(f'handle batch save to es model_ids size: {len(model_ids)}') + cache.decr(rate_limit_key, len(model_ids)) + for doc in registry._models[model_name]: + model = doc.Django.model + model = apps.get_model(app_label, model_name) + instance_qs = model.objects.filter(pk__in=model_ids).all() + doc().update(instance_qs) + listlen = con.llen(list_key) + if listlen > 0: + logger.info(listlen) + handle_batch_save.apply_async(args=(app_label, model_name), + count_down=1) + else: + cache.decr(batch_task_count_key) + +class DTCelerySignalProcessor(RealTimeSignalProcessor): + + + def handle_save(self, sender, instance, **kwargs): + app_label = instance._meta.app_label + model_name = instance._meta.model_name + + if instance.__class__ in registry._models or instance.__class__ in registry._related_models: + transaction.on_commit( + lambda: task_routings(instance, app_label, model_name)) + + +def task_routings(instance, app_label, model_name): + rate_limit_key = f"batch-save-rate-limit-{app_label}-{model_name}-rate_limit" + rate_limit = cache.get_or_set(rate_limit_key, 0) + cache.incr(rate_limit_key) + logger.info(f"rate_limit_key now: {rate_limit_key} value: {rate_limit}") + if rate_limit > DONGTAI_MAX_RATE_LIMIT and instance.__class__ in registry._models: + logger.info(f'handle_save to es exceed limit : {model_name}') + add_task(instance.pk, app_label, model_name) + else: + logger.info(f'handle_save to es: {model_name} ') + handle_save.delay(instance.pk, app_label, model_name) + +def add_task(pk, app_label, model_name): + list_key = f"batch-save-list{app_label}-{model_name}-task" + con = get_redis_connection() + con.rpush(list_key, pk) + add_async_batch_task(app_label, model_name) + + +def add_async_batch_task(app_label, model_name): + batch_task_count_key = f"batch-save-task-count{app_label}-{model_name}-batch-task-count" + batch_task_count = cache.get_or_set(batch_task_count_key, 0) + logger.info(f"rate_limit_key now: {batch_task_count_key} value: {batch_task_count}") + if batch_task_count < DONGTAI_MAX_BATCH_TASK_CONCORRENCY: + cache.incr(batch_task_count_key) + handle_batch_save.delay(app_label, model_name) diff --git a/dongtai_common/utils/http.py b/dongtai_common/utils/http.py new file mode 100644 index 000000000..418448bef --- /dev/null +++ b/dongtai_common/utils/http.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/7/21 下午6:21 +# project: dongtai-engine +import base64 +import logging + +logger = logging.getLogger('dongtai-core') + + +def build_request(req_method, raw_req_header, uri, query_params, req_data, http_protocol): + decode_req_header = base64.b64decode(raw_req_header).decode('utf-8').strip() + headers = f"{req_method} {uri + ('?' + query_params if query_params else '')} {http_protocol}\n{decode_req_header}\n\n{req_data if req_data else ''}" + return headers + + +def build_response(header, body): + try: + _data = base64.b64decode(header.encode("utf-8")).decode("utf-8") + except Exception as e: + _data = '' + logger.error(f'Response Header解析出错,错误原因:{e}') + return '{header}\n\n{body}'.format(header=_data, body=body) diff --git a/dongtai_common/utils/settings.py b/dongtai_common/utils/settings.py new file mode 100644 index 000000000..d8cece840 --- /dev/null +++ b/dongtai_common/utils/settings.py @@ -0,0 +1,13 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : settings +# @created : Tuesday Aug 17, 2021 16:51:00 CST +# +# @description : +###################################################################### + +import os + + +def get_managed(): + return True if os.getenv('environment', None) == 'TEST' else False diff --git a/dongtai_common/utils/systemsettings.py b/dongtai_common/utils/systemsettings.py new file mode 100644 index 000000000..f75c0708c --- /dev/null +++ b/dongtai_common/utils/systemsettings.py @@ -0,0 +1,24 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : systemsettings +# @created : 星期二 11月 30, 2021 19:54:50 CST +# +# @description : +###################################################################### + + + +from dongtai_common.models.profile import IastProfile + + + +def get_vul_validate(): + vul_verifiy = IastProfile.objects.filter(key='vul_verifiy').values_list( + 'value', flat=True).first() + return True if not vul_verifiy or vul_verifiy == "1" else False + + +def get_circuit_break(): + circuit_break = IastProfile.objects.filter( + key='circuit_break').values_list('value', flat=True).first() + return True if not circuit_break or circuit_break == "1" else False diff --git a/dongtai_common/utils/user.py b/dongtai_common/utils/user.py new file mode 100644 index 000000000..122975220 --- /dev/null +++ b/dongtai_common/utils/user.py @@ -0,0 +1,19 @@ +from dongtai_common.models.department import Department +from django.db.models import QuerySet +from dongtai_common.models.user import User +from dongtai_common.utils import const +from django.db.models import Q + + + +def get_auth_users__by_id(user_id: int) -> QuerySet: + user = User.objects.filter(pk=user_id).first() + if user.is_system_admin(): + users = User.objects.all() + elif user.is_talent_admin(): + talent = user.get_talent() + departments = talent.departments.all() + users = User.objects.filter(department__in=departments) + else: + users = User.objects.filter(pk=user_id).all() + return users diff --git a/dongtai_common/utils/validate.py b/dongtai_common/utils/validate.py new file mode 100644 index 000000000..8adc65649 --- /dev/null +++ b/dongtai_common/utils/validate.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/7/16 下午2:25 +# project: dongtai-engine + + +class Validate: + """ + common Validate for dongtai project + """ + + @staticmethod + def is_number(iterable): + """ + Return True if x is int for all values x in the iterable. + :param iterable: + :return: + """ + for item in iterable: + try: + int(item) + except: + return False + return True + + @staticmethod + def is_empty(obj): + """ + Return True if obj is None or obj is '' + :param obj: + :return: + """ + return obj is None or obj == '' diff --git a/dongtai_conf/__init__.py b/dongtai_conf/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/AgentServer/asgi.py b/dongtai_conf/asgi.py similarity index 72% rename from AgentServer/asgi.py rename to dongtai_conf/asgi.py index 6c98b419d..bab16d18c 100644 --- a/AgentServer/asgi.py +++ b/dongtai_conf/asgi.py @@ -1,5 +1,5 @@ """ -ASGI config for AgentServer project. +ASGI config for dongtai_conf project. It exposes the ASGI callable as a module-level variable named ``application``. @@ -11,6 +11,6 @@ from django.core.asgi import get_asgi_application -os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'AgentServer.settings') +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dongtai_conf.settings') application = get_asgi_application() diff --git a/dongtai_conf/celery.py b/dongtai_conf/celery.py new file mode 100644 index 000000000..34a143b76 --- /dev/null +++ b/dongtai_conf/celery.py @@ -0,0 +1,103 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2021/1/26 下午7:27 +# software: PyCharm +# project: lingzhi-engine + +import os + +from celery import Celery + +# set the default Django settings module for the 'celery' program. +from kombu import Queue, Exchange + +from dongtai_conf import settings + +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dongtai_conf.settings') + +app = Celery('dongtai') + +configs = {k: v for k, v in settings.__dict__.items() if k.startswith('CELERY')} +# Using a string here means the worker doesn't have to serialize +# the configuration object to child processes. +# - namespace='CELERY' means all celery-related configuration keys +# should have a `CELERY_` prefix. + +configs["task_queues"] = [ + # normal + Queue("dongtai-method-pool-scan", Exchange("dongtai-method-pool-scan"), routing_key="dongtai-method-pool-scan"), + Queue("dongtai-replay-vul-scan", Exchange("dongtai-replay-vul-scan"), routing_key="dongtai-replay-vul-scan"), + Queue("dongtai-sca-task", Exchange("dongtai-sca-task"), routing_key="dongtai-sca-task"), + Queue("dongtai-function-flush-data", Exchange("dongtai-function-flush-data"), routing_key="dongtai-function-flush-data"), + Queue("dongtai-es-save-task", Exchange("dongtai-es-save-task"), routing_key="dongtai-es-save-task"), + # cronjob + Queue("dongtai-periodic-task", Exchange("dongtai-periodic-task"), routing_key="dongtai-periodic-task"), +] +# celery config +configs['task_serializer'] = 'json' +configs['result_serializer'] = 'json' +configs['accept_content'] = ['json'] +configs['task_ignore_result'] = True +configs['task_acks_late'] = True +configs['task_acks_on_failure_or_timeout'] = True +# configs['worker_concurrency'] = 8 +configs["task_routes"] = { + # normal + "dongtai_engine.tasks.search_vul_from_method_pool": {'exchange': 'dongtai-method-pool-scan', 'routing_key': 'dongtai-method-pool-scan'}, + "dongtai_engine.tasks.search_vul_from_replay_method_pool": {'exchange': 'dongtai-replay-vul-scan', 'routing_key': 'dongtai-replay-vul-scan'}, + "dongtai_web.dongtai_sca.scan.utils.update_one_sca": {'exchange': 'dongtai-sca-task', 'routing_key': 'dongtai-sca-task'}, + "dongtai_engine.preheat.function_flush": {'exchange': 'dongtai-function-flush-data', 'routing_key': 'dongtai-function-flush-data'}, + "dongtai_common.utils.es.handle_save": {'exchange': 'dongtai-es-save-task', 'routing_key': 'dongtai-es-save-task'}, + "dongtai_common.utils.es.handle_batch_save": {'exchange': 'dongtai-es-save-task', 'routing_key': 'dongtai-es-save-task'}, + "dongtai_engine.elatic_search.data_correction": {'exchange': 'dongtai-es-save-task', 'routing_key': 'dongtai-es-save-task'}, + # cronjob + "dongtai_engine.tasks.update_agent_status": {'exchange': 'dongtai-periodic-task', 'routing_key': 'dongtai-periodic-task'}, + "dongtai_engine.tasks.heartbeat": {'exchange': 'dongtai-periodic-task', 'routing_key': 'dongtai-periodic-task'}, + "dongtai_engine.tasks.clear_error_log": {'exchange': 'dongtai-periodic-task', 'routing_key': 'dongtai-periodic-task'}, + "dongtai_engine.tasks.vul_recheck": {'exchange': 'dongtai-periodic-task', 'routing_key': 'dongtai-periodic-task'}, + "dongtai_engine.preheat.function_preheat": {'exchange': 'dongtai-periodic-task', 'routing_key': 'dongtai-periodic-task'}, + "dongtai_engine.plugins.data_clean": {'exchange': 'dongtai-periodic-task', 'routing_key': 'dongtai-periodic-task'}, +} +configs["CELERY_ENABLE_UTC"] = False +configs["timezone"] = settings.TIME_ZONE +configs["DJANGO_CELERY_BEAT_TZ_AWARE"] = False +configs["CELERY_BEAT_SCHEDULER"] = 'django_celery_beat.schedulers:DatabaseScheduler' + +app.namespace = 'CELERY' +app.conf.update(configs) + +# Load task modules from all registered Django app configs. +app.autodiscover_tasks() +from dongtai_conf.settings import DONGTAI_CELERY_CACHE_PREHEAT + +def ready(self): + super().ready() + checkout_preheat_online(DONGTAI_CELERY_CACHE_PREHEAT) + + +app.ready = ready +print(f"preheat settings now : {DONGTAI_CELERY_CACHE_PREHEAT}") +def checkout_preheat_online(status): + from django_celery_beat.models import ( + CrontabSchedule, + PeriodicTask, + IntervalSchedule, + + ) + import json + from datetime import datetime, timedelta + if not status: + PeriodicTask.objects.delete(name='preheat functions') + else: + schedule, _ = IntervalSchedule.objects.get_or_create( + every=10, period=IntervalSchedule.MINUTES) + task = PeriodicTask.objects.get_or_create( + name='preheat functions', # simply describes this periodic task. + defaults={ + 'interval': schedule, # we created this above. + 'task': 'dongtai_engine.preheat.function_preheat', # name of task. + 'args': json.dumps([]), + 'kwargs': json.dumps({}), + }) + print(task) diff --git a/dongtai_conf/conf/config.ini.example b/dongtai_conf/conf/config.ini.example new file mode 100644 index 000000000..dcc9d0e57 --- /dev/null +++ b/dongtai_conf/conf/config.ini.example @@ -0,0 +1,72 @@ +[mysql] +host = dongtai-mysql +port = 3306 +name = dongtai_webapi +user = root +password = dongtai-iast + +[redis] +host = dongtai-redis +port = 6379 +password = 123456 +db = 0 + +[engine] +url = http://dongtai-engine:8000 + +[apiserver] +url = http://dongtai-server:8000 +#url = http://dongtai-web:8000 + +[security] +csrf_trust_origins = .example.com +secret_key = vbjlvbxfvazjfprywuxgyclmvhtmselddsefxxlcixovmqfpgy + +[smtp] +server = smtp_server +user = smtp_user +password = smtp_password +from_addr = from_addr +ssl = False +cc_addr = cc_addr +port = 25 + + +[sca] +#https://iast.huoxian.cn/openapi/sca/v1 +base_url = https://sca.huoxian.cn/ +timeout = 5 +token = + + +[task] +retryable = true +max_retries = 3 +async_send = true +async_send_delay = 5 + +[log_service] +host = dongtai-logstash +port = 8083 + +[common_file_path] +tmp_path = /tmp/logstash +report_img = report/img +report_pdf = report/pdf +report_word = report/word +report_excel = report/excel + +[other] +domain = http://localhost.domain/ +demo_session_cookie_domain = .huoxian.cn +logging_level = INFO +cache_preheat = True + +[elastic_search] +enable = false +host = http://dongtai:dongtaies@dongtaies:9200 +vulnerability_index = alias-dongtai-v1-vulnerability-dev +asset_aggr_index = alias-dongtai-v1-asset-aggr-dev +asset_index = alias-dongtai-v1-asset-dev +method_pool_index = alias-dongtai-v1-method-pool-dev +asset_vul_index = alias-dongtai-v1-asset-vul-dev diff --git a/dongtai_conf/conf/config.ini.test b/dongtai_conf/conf/config.ini.test new file mode 100644 index 000000000..ce42bbbe9 --- /dev/null +++ b/dongtai_conf/conf/config.ini.test @@ -0,0 +1,66 @@ +[mysql] +host = 127.0.0.1 +port = 3306 +name = dongtai_webapi +user = root +password = dongtai-iast + +[redis] +host = 127.0.0.1 +port = 6379 +password = 123456 +db = 0 + +[engine] +url = http://dongtai-engine:8000 + +[apiserver] +url = http://dongtai-server:8000 + +[smtp] +server = smtp_server +user = smtp_user +password = smtp_password +from_addr = from_addr +port = 25 +ssl = False +cc_addr = cc_addr + + +[aliyun_oss] +access_key = LTAI5t7pu9WUT2DcbknfNiaD +access_key_secret = ZoEOSi7KfayQ7JalvJVHa37fdZ4XFY + +[security] +csrf_trust_origins = localhost,.huoxian.cn,.secnium.xyz +secret_key = vbjlvbxfvazjfprywuxgyclmvhtmselddsefxxlcixovmqfpgy + +[sca] +base_url = http://52.80.75.225:8000 +timeout = 5 +token = + +[task] +retryable = false +max_retries = 3 +async_send = false +async_send_delay = 2 + +[log_service] +host = localhost +port = 8082 + +[other] +domain = http://localhost.domain/ +demo_session_cookie_domain = .huoxian.cn +logging_level = INFO +cache_preheat = True + +[elastic_search] +enable = false +host = http://dongtai:dongtaies@dongtaies:9200 +vulnerability_index = alias-dongtai-v1-vulnerability-dev +asset_aggr_index = alias-dongtai-v1-asset-aggr-dev +asset_index = alias-dongtai-v1-asset-dev +method_pool_index = alias-dongtai-v1-method-pool-dev +asset_vul_index = alias-dongtai-v1-asset-vul-dev diff --git a/dongtai_conf/conf/db.sql b/dongtai_conf/conf/db.sql new file mode 100644 index 000000000..2d36c205d --- /dev/null +++ b/dongtai_conf/conf/db.sql @@ -0,0 +1,765 @@ +/* + Navicat Premium Data Transfer + + Source Server : aws-saas-iast + Source Server Type : MySQL + Source Server Version : 50731 + Source Host : mysql-server:3306 + Source Schema : iast_webapi + + Target Server Type : MySQL + Target Server Version : 50731 + File Encoding : 65001 + + Date: 22/03/2021 18:34:42 +*/ + +SET NAMES utf8mb4; +SET FOREIGN_KEY_CHECKS = 0; + +-- ---------------------------- +-- Table structure for auth_department +-- ---------------------------- +DROP TABLE IF EXISTS `auth_department`; +CREATE TABLE `auth_department` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `name` varchar(255) DEFAULT NULL COMMENT '部门名称', + `create_time` int(11) DEFAULT NULL COMMENT '创建时间', + `update_time` int(11) DEFAULT NULL COMMENT '修改时间', + `created_by` int(11) DEFAULT NULL COMMENT '创建用户', + `parent_id` int(11) DEFAULT NULL COMMENT '父节点ID', + PRIMARY KEY (`id`) USING BTREE, + UNIQUE KEY `name` (`name`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=36 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for auth_department_talent +-- ---------------------------- +DROP TABLE IF EXISTS `auth_department_talent`; +CREATE TABLE `auth_department_talent` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `department_id` int(11) DEFAULT NULL COMMENT '部门ID', + `talent_id` int(11) DEFAULT NULL COMMENT '租户ID', + PRIMARY KEY (`id`) USING BTREE, + UNIQUE KEY `department_id` (`department_id`,`talent_id`) USING BTREE, + UNIQUE KEY `department_id_2` (`department_id`,`talent_id`) USING BTREE, + KEY `talent_id` (`talent_id`) USING BTREE, + CONSTRAINT `auth_department_talent_ibfk_1` FOREIGN KEY (`talent_id`) REFERENCES `auth_talent` (`id`), + CONSTRAINT `auth_department_talent_ibfk_2` FOREIGN KEY (`department_id`) REFERENCES `auth_department` (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=27 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for auth_group +-- ---------------------------- +DROP TABLE IF EXISTS `auth_group`; +CREATE TABLE `auth_group` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `name` varchar(150) NOT NULL, + PRIMARY KEY (`id`) USING BTREE, + UNIQUE KEY `name` (`name`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=6 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for auth_group_permissions +-- ---------------------------- +DROP TABLE IF EXISTS `auth_group_permissions`; +CREATE TABLE `auth_group_permissions` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `group_id` int(11) NOT NULL, + `permission_id` int(11) NOT NULL, + PRIMARY KEY (`id`) USING BTREE, + UNIQUE KEY `auth_group_permissions_group_id_permission_id_0cd325b0_uniq` (`group_id`,`permission_id`) USING BTREE, + KEY `auth_group_permissio_permission_id_84c5c92e_fk_auth_perm` (`permission_id`) USING BTREE, + CONSTRAINT `auth_group_permissio_permission_id_84c5c92e_fk_auth_perm` FOREIGN KEY (`permission_id`) REFERENCES `auth_permission` (`id`), + CONSTRAINT `auth_group_permissions_group_id_b120cbf9_fk_auth_group_id` FOREIGN KEY (`group_id`) REFERENCES `auth_group` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for auth_permission +-- ---------------------------- +DROP TABLE IF EXISTS `auth_permission`; +CREATE TABLE `auth_permission` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `name` varchar(255) NOT NULL, + `content_type_id` int(11) NOT NULL, + `codename` varchar(100) NOT NULL, + PRIMARY KEY (`id`) USING BTREE, + UNIQUE KEY `auth_permission_content_type_id_codename_01ab375a_uniq` (`content_type_id`,`codename`) USING BTREE, + CONSTRAINT `auth_permission_content_type_id_2f476e4b_fk_django_co` FOREIGN KEY (`content_type_id`) REFERENCES `django_content_type` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for auth_talent +-- ---------------------------- +DROP TABLE IF EXISTS `auth_talent`; +CREATE TABLE `auth_talent` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `talent_name` varchar(255) DEFAULT NULL COMMENT '租户名称', + `create_time` int(11) DEFAULT NULL COMMENT '创建时间', + `update_time` int(11) DEFAULT NULL COMMENT '修改时间', + `created_by` int(11) DEFAULT NULL COMMENT '创建用户', + `is_active` tinyint(1) DEFAULT NULL COMMENT '租户是否启用', + PRIMARY KEY (`id`) USING BTREE, + UNIQUE KEY `talent_name` (`talent_name`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=15 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for auth_user +-- ---------------------------- +DROP TABLE IF EXISTS `auth_user`; +CREATE TABLE `auth_user` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `password` varchar(128) NOT NULL, + `last_login` datetime(6) DEFAULT NULL, + `is_superuser` tinyint(1) NOT NULL COMMENT '0-普通用户、1-系统管理员、2-租户管理员', + `username` varchar(150) NOT NULL, + `first_name` varchar(30) NOT NULL, + `last_name` varchar(150) NOT NULL, + `email` varchar(254) NOT NULL, + `is_staff` tinyint(1) NOT NULL, + `is_active` tinyint(1) NOT NULL, + `date_joined` datetime(6) NOT NULL, + `phone` bigint(11) DEFAULT NULL, + PRIMARY KEY (`id`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=38 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for auth_user_department +-- ---------------------------- +DROP TABLE IF EXISTS `auth_user_department`; +CREATE TABLE `auth_user_department` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `user_id` int(11) DEFAULT NULL COMMENT '用户ID', + `department_id` int(11) DEFAULT NULL COMMENT '部门ID', + PRIMARY KEY (`id`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=31 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for auth_user_groups +-- ---------------------------- +DROP TABLE IF EXISTS `auth_user_groups`; +CREATE TABLE `auth_user_groups` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `user_id` int(11) NOT NULL, + `group_id` int(11) NOT NULL, + PRIMARY KEY (`id`) USING BTREE, + KEY `user_id` (`user_id`) USING BTREE, + KEY `group_id` (`group_id`) USING BTREE, + CONSTRAINT `auth_user_groups_ibfk_1` FOREIGN KEY (`user_id`) REFERENCES `auth_user` (`id`), + CONSTRAINT `auth_user_groups_ibfk_2` FOREIGN KEY (`group_id`) REFERENCES `auth_group` (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=41 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for auth_user_user_permissions +-- ---------------------------- +DROP TABLE IF EXISTS `auth_user_user_permissions`; +CREATE TABLE `auth_user_user_permissions` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `user_id` int(11) NOT NULL, + `permission_id` int(11) NOT NULL, + PRIMARY KEY (`id`) USING BTREE, + UNIQUE KEY `auth_user_user_permissions_user_id_permission_id_14a6b632_uniq` (`user_id`,`permission_id`) USING BTREE, + KEY `auth_user_user_permi_permission_id_1fbb5f2c_fk_auth_perm` (`permission_id`) USING BTREE, + CONSTRAINT `auth_user_user_permi_permission_id_1fbb5f2c_fk_auth_perm` FOREIGN KEY (`permission_id`) REFERENCES `auth_permission` (`id`), + CONSTRAINT `auth_user_user_permissions_user_id_a95ead1b_fk_auth_user_id` FOREIGN KEY (`user_id`) REFERENCES `auth_user` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for authtoken_token +-- ---------------------------- +DROP TABLE IF EXISTS `authtoken_token`; +CREATE TABLE `authtoken_token` ( + `key` varchar(40) NOT NULL, + `created` datetime(6) NOT NULL, + `user_id` int(11) NOT NULL, + PRIMARY KEY (`key`) USING BTREE, + UNIQUE KEY `user_id` (`user_id`) USING BTREE, + CONSTRAINT `authtoken_token_ibfk_1` FOREIGN KEY (`user_id`) REFERENCES `auth_user` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for captcha_captchastore +-- ---------------------------- +DROP TABLE IF EXISTS `captcha_captchastore`; +CREATE TABLE `captcha_captchastore` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `challenge` varchar(32) NOT NULL, + `response` varchar(32) NOT NULL, + `hashkey` varchar(40) NOT NULL, + `expiration` datetime(6) NOT NULL, + PRIMARY KEY (`id`) USING BTREE, + UNIQUE KEY `hashkey` (`hashkey`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=2082 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for django_admin_log +-- ---------------------------- +DROP TABLE IF EXISTS `django_admin_log`; +CREATE TABLE `django_admin_log` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `action_time` datetime(6) NOT NULL, + `object_id` longtext, + `object_repr` varchar(200) NOT NULL, + `action_flag` smallint(5) unsigned NOT NULL, + `change_message` longtext NOT NULL, + `content_type_id` int(11) DEFAULT NULL, + `user_id` int(11) NOT NULL, + PRIMARY KEY (`id`) USING BTREE, + KEY `django_admin_log_content_type_id_c4bce8eb_fk_django_co` (`content_type_id`) USING BTREE, + KEY `django_admin_log_user_id_c564eba6_fk_auth_user_id` (`user_id`) USING BTREE, + CONSTRAINT `django_admin_log_content_type_id_c4bce8eb_fk_django_co` FOREIGN KEY (`content_type_id`) REFERENCES `django_content_type` (`id`), + CONSTRAINT `django_admin_log_user_id_c564eba6_fk_auth_user_id` FOREIGN KEY (`user_id`) REFERENCES `auth_user` (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=36314 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for django_content_type +-- ---------------------------- +DROP TABLE IF EXISTS `django_content_type`; +CREATE TABLE `django_content_type` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `app_label` varchar(100) NOT NULL, + `model` varchar(100) NOT NULL, + PRIMARY KEY (`id`) USING BTREE, + UNIQUE KEY `django_content_type_app_label_model_76bd3d3b_uniq` (`app_label`,`model`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=313 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for django_migrations +-- ---------------------------- +DROP TABLE IF EXISTS `django_migrations`; +CREATE TABLE `django_migrations` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `app` varchar(255) NOT NULL, + `name` varchar(255) NOT NULL, + `applied` datetime(6) NOT NULL, + PRIMARY KEY (`id`) USING BTREE +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for django_session +-- ---------------------------- +DROP TABLE IF EXISTS `django_session`; +CREATE TABLE `django_session` ( + `session_key` varchar(40) NOT NULL, + `session_data` longtext NOT NULL, + `expire_date` datetime(6) NOT NULL, + PRIMARY KEY (`session_key`) USING BTREE, + KEY `django_session_expire_date_a5c62663` (`expire_date`) USING BTREE +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for iast_agent +-- ---------------------------- +DROP TABLE IF EXISTS `iast_agent`; +CREATE TABLE `iast_agent` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `token` varchar(255) DEFAULT NULL COMMENT 'agent唯一标识', + `version` varchar(255) DEFAULT NULL COMMENT '版本', + `latest_time` int(11) DEFAULT NULL COMMENT '更新时间', + `user_id` int(11) NOT NULL COMMENT '用户ID', + `server_id` int(11) DEFAULT NULL COMMENT '服务器ID', + `is_running` int(1) DEFAULT NULL COMMENT 'agent运行状态', + `control` int(1) DEFAULT NULL COMMENT 'agent控制位,1-安装、2-卸载、0-无控制', + `is_control` int(1) DEFAULT NULL COMMENT '是否正处于控制中,0-否,1-是', + `bind_project_id` int(11) DEFAULT '0' COMMENT '捆绑项目ID,存在则为已捆绑', + PRIMARY KEY (`id`) USING BTREE, + KEY `user_id` (`user_id`) USING BTREE, + KEY `server_id` (`server_id`) USING BTREE, + CONSTRAINT `iast_agent_ibfk_1` FOREIGN KEY (`user_id`) REFERENCES `auth_user` (`id`), + CONSTRAINT `iast_agent_ibfk_2` FOREIGN KEY (`server_id`) REFERENCES `iast_server` (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=202 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for iast_agent_method_pool +-- ---------------------------- +DROP TABLE IF EXISTS `iast_agent_method_pool`; +CREATE TABLE `iast_agent_method_pool` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `agent_id` int(11) DEFAULT NULL COMMENT 'Agent', + `url` varchar(2000) DEFAULT NULL COMMENT 'URL', + `uri` varchar(2000) DEFAULT NULL COMMENT 'URI', + `http_method` varchar(10) DEFAULT NULL COMMENT 'HTTP请求方法', + `http_scheme` varchar(20) DEFAULT NULL COMMENT '协议', + `http_protocol` varchar(255) DEFAULT NULL COMMENT 'HTTP协议', + `req_header` varchar(2000) DEFAULT NULL COMMENT '请求头', + `req_params` varchar(2000) DEFAULT NULL COMMENT '请求参数', + `req_data` varchar(4000) DEFAULT NULL COMMENT '请求体', + `res_header` varchar(1000) DEFAULT NULL COMMENT '响应头', + `res_body` varchar(1000) DEFAULT NULL COMMENT '响应体', + `context_path` varchar(255) DEFAULT NULL COMMENT '应用上下文', + `language` varchar(20) DEFAULT NULL COMMENT '语言', + `method_pool` json DEFAULT NULL COMMENT '方法池', + `clent_ip` varchar(255) DEFAULT NULL COMMENT '客户端IP', + `create_time` int(11) DEFAULT NULL COMMENT '创建时间', + `update_time` int(11) DEFAULT NULL COMMENT '修改时间', + `pool_sign` varchar(40) DEFAULT NULL COMMENT '方法池签名', + PRIMARY KEY (`id`) USING BTREE, + UNIQUE KEY `pool_sign` (`pool_sign`) USING BTREE, + KEY `agent_id` (`agent_id`) USING BTREE, + CONSTRAINT `iast_agent_method_pool_ibfk_1` FOREIGN KEY (`agent_id`) REFERENCES `iast_agent` (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=1574138 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for iast_agent_method_pool_sinks +-- ---------------------------- +DROP TABLE IF EXISTS `iast_agent_method_pool_sinks`; +CREATE TABLE `iast_agent_method_pool_sinks` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `methodpool_id` int(11) DEFAULT NULL, + `hookstrategy_id` int(11) DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=205 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for iast_agent_properties +-- ---------------------------- +DROP TABLE IF EXISTS `iast_agent_properties`; +CREATE TABLE `iast_agent_properties` ( + `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '自增主键', + `hook_type` int(1) DEFAULT '0' COMMENT 'HOOK类型,1-全量HOOK,0-按配置HOOK', + `dump_class` int(1) DEFAULT '0' COMMENT '是否dump修改后的字节码,1-dump,0-不dump,默认不dump', + `create_time` int(11) DEFAULT NULL COMMENT '创建时间', + `update_time` int(11) DEFAULT NULL COMMENT '修改时间', + `updated_by` int(11) DEFAULT NULL COMMENT '修改人', + `agent_id` int(11) DEFAULT NULL COMMENT 'agent', + PRIMARY KEY (`id`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for iast_asset +-- ---------------------------- +DROP TABLE IF EXISTS `iast_asset`; +CREATE TABLE `iast_asset` ( + `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '自增主键', + `package_name` varchar(255) DEFAULT NULL COMMENT '第三方包名', + `package_path` varchar(255) DEFAULT NULL COMMENT '第三方包所在路径', + `signature_algorithm` varchar(255) DEFAULT NULL COMMENT '签名算法', + `signature_value` varchar(50) DEFAULT NULL COMMENT '签名值', + `dt` int(11) DEFAULT NULL COMMENT '更新时间', + `version` varchar(255) DEFAULT NULL COMMENT '当前版本', + `level_id` int(11) DEFAULT NULL COMMENT '漏洞等级', + `vul_count` int(11) DEFAULT NULL COMMENT '漏洞数量', + `agent_id` int(11) DEFAULT NULL COMMENT 'agent id', + `language` varchar(255) DEFAULT NULL COMMENT '语言', + PRIMARY KEY (`id`) USING BTREE, + UNIQUE KEY `signature_value` (`signature_value`) USING BTREE, + KEY `agent_id` (`agent_id`) USING BTREE, + KEY `level_id` (`level_id`) USING BTREE, + CONSTRAINT `iast_asset_ibfk_2` FOREIGN KEY (`agent_id`) REFERENCES `iast_agent` (`id`), + CONSTRAINT `iast_asset_ibfk_3` FOREIGN KEY (`level_id`) REFERENCES `iast_vul_level` (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=15160 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for iast_deploy +-- ---------------------------- +DROP TABLE IF EXISTS `iast_deploy`; +CREATE TABLE `iast_deploy` ( + `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '自增主键', + `desc` mediumtext COMMENT '安装描述', + `middleware` varchar(255) DEFAULT NULL COMMENT '中间件', + `os` varchar(255) DEFAULT NULL COMMENT '操作系统', + PRIMARY KEY (`id`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=12 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for iast_errorlog +-- ---------------------------- +DROP TABLE IF EXISTS `iast_errorlog`; +CREATE TABLE `iast_errorlog` ( + `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '自增主键', + `errorlog` mediumtext COMMENT '错误日志详情', + `state` varchar(50) DEFAULT NULL COMMENT '错误日志状态', + `dt` int(11) DEFAULT NULL COMMENT '日志触发时间', + `agent_id` int(11) DEFAULT NULL COMMENT 'agent id', + PRIMARY KEY (`id`) USING BTREE, + KEY `agent_id` (`agent_id`) USING BTREE, + CONSTRAINT `iast_errorlog_ibfk_2` FOREIGN KEY (`agent_id`) REFERENCES `iast_agent` (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=94994 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for iast_heartbeat +-- ---------------------------- +DROP TABLE IF EXISTS `iast_heartbeat`; +CREATE TABLE `iast_heartbeat` ( + `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '自增主键', + `hostname` varchar(1000) DEFAULT NULL COMMENT '主机名', + `network` varchar(2000) DEFAULT NULL COMMENT '网卡信息 ', + `memory` varchar(1000) DEFAULT NULL COMMENT '内存信息', + `cpu` varchar(1000) DEFAULT NULL COMMENT 'CPU信息', + `disk` varchar(1000) DEFAULT NULL COMMENT '磁盘信息', + `pid` varchar(1050) DEFAULT NULL COMMENT '进程ID,带主机名', + `env` mediumtext COMMENT '环境变量', + `req_count` int(255) DEFAULT NULL COMMENT 'HTTP请求数量', + `dt` int(11) DEFAULT NULL COMMENT '最近一次心跳时间', + `agent_id` int(11) DEFAULT NULL COMMENT 'agent ID', + PRIMARY KEY (`id`) USING BTREE, + KEY `agent_id` (`agent_id`) USING BTREE, + CONSTRAINT `iast_heartbeat_ibfk_2` FOREIGN KEY (`agent_id`) REFERENCES `iast_agent` (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=1560483 DEFAULT CHARSET=utf8mb4 COMMENT='IAST agent心跳表'; + +-- ---------------------------- +-- Table structure for iast_hook_strategy +-- ---------------------------- +DROP TABLE IF EXISTS `iast_hook_strategy`; +CREATE TABLE `iast_hook_strategy` ( + `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '自增主键', + `value` varchar(2000) DEFAULT NULL COMMENT '策略值', + `source` varchar(255) DEFAULT NULL COMMENT '污点来源', + `target` varchar(255) DEFAULT NULL COMMENT '污点去向', + `inherit` varchar(255) DEFAULT NULL COMMENT '继承类型,false-仅检测当前类,true-进检测子类,all-检测当前类及子类', + `track` varchar(5) DEFAULT NULL COMMENT '是否需要污点跟踪,true-需要,false-不需要', + `create_time` int(11) DEFAULT NULL COMMENT '创建时间', + `update_time` int(11) DEFAULT NULL COMMENT '修改时间', + `created_by` int(11) DEFAULT NULL COMMENT '创建人', + `enable` tinyint(1) DEFAULT '1' COMMENT '启用状态:0-禁用,1-启用,-1-删除', + PRIMARY KEY (`id`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=500 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for iast_hook_strategy_type +-- ---------------------------- +DROP TABLE IF EXISTS `iast_hook_strategy_type`; +CREATE TABLE `iast_hook_strategy_type` ( + `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '自增主键', + `hookstrategy_id` int(11) DEFAULT NULL COMMENT '策略ID', + `hooktype_id` int(11) DEFAULT NULL COMMENT '策略类型ID', + PRIMARY KEY (`id`) USING BTREE, + UNIQUE KEY `hookstrategy_id` (`hookstrategy_id`,`hooktype_id`) USING HASH +) ENGINE=InnoDB AUTO_INCREMENT=959 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for iast_hook_talent_strategy +-- ---------------------------- +DROP TABLE IF EXISTS `iast_hook_talent_strategy`; +CREATE TABLE `iast_hook_talent_strategy` ( + `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '自增主键', + `talent_id` int(11) DEFAULT NULL COMMENT '租户ID', + `values` varchar(500) DEFAULT NULL COMMENT '租户启用的策略类型', + `create_time` int(11) DEFAULT NULL COMMENT '创建时间', + `update_time` int(11) DEFAULT NULL COMMENT '修改时间', + `created_by` int(11) DEFAULT NULL COMMENT '创建者', + PRIMARY KEY (`id`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for iast_hook_type +-- ---------------------------- +DROP TABLE IF EXISTS `iast_hook_type`; +CREATE TABLE `iast_hook_type` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `type` int(11) DEFAULT NULL COMMENT '策略总类型,1-source节点、2-propagator节点、3-filter节点、4-sink节点', + `name` varchar(255) DEFAULT NULL COMMENT '策略类型名称', + `value` varchar(255) DEFAULT NULL COMMENT '策略类型值', + `enable` int(1) DEFAULT NULL COMMENT '状态:1-启用;0-禁用', + `create_time` int(11) DEFAULT NULL COMMENT '创建时间', + `update_time` int(11) DEFAULT NULL COMMENT '修改时间', + `created_by` int(11) DEFAULT NULL COMMENT '创建者', + PRIMARY KEY (`id`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=74 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for iast_project +-- ---------------------------- +DROP TABLE IF EXISTS `iast_project`; +CREATE TABLE `iast_project` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `name` varchar(255) DEFAULT NULL COMMENT '项目名称', + `mode` varchar(255) DEFAULT NULL COMMENT '项目类型,默认为插桩', + `vul_count` int(11) unsigned DEFAULT '0' COMMENT '漏洞数量', + `agent_count` int(11) DEFAULT NULL COMMENT 'Agent数量', + `latest_time` int(11) DEFAULT NULL COMMENT '最新时间', + `user_id` int(11) DEFAULT NULL COMMENT 'user id', + `scan_id` bigint(20) unsigned DEFAULT NULL COMMENT '扫描策略ID', + PRIMARY KEY (`id`) USING BTREE, + KEY `user_id` (`user_id`) USING BTREE, + KEY `scan_id` (`scan_id`) USING BTREE, + CONSTRAINT `iast_project_ibfk_1` FOREIGN KEY (`user_id`) REFERENCES `auth_user` (`id`), + CONSTRAINT `iast_project_ibfk_2` FOREIGN KEY (`scan_id`) REFERENCES `iast_strategy_user` (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=52 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for iast_server +-- ---------------------------- +DROP TABLE IF EXISTS `iast_server`; +CREATE TABLE `iast_server` ( + `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '自增主键', + `name` varchar(255) DEFAULT NULL COMMENT '服务器名称', + `hostname` varchar(255) DEFAULT NULL COMMENT '主机名', + `ip` varchar(255) DEFAULT NULL COMMENT '服务器IP地址', + `port` int(11) DEFAULT NULL COMMENT '服务器开放的端口', + `environment` text COMMENT '运行环境:dev/test/prod', + `agent_version` varchar(20) DEFAULT NULL COMMENT 'Agent版本', + `latest_agent_version` varchar(255) DEFAULT NULL COMMENT '最新Agent版本', + `language` varchar(20) DEFAULT NULL COMMENT 'Agent语言', + `path` varchar(255) DEFAULT NULL COMMENT '服务器路径', + `status` varchar(255) DEFAULT NULL COMMENT '服务器状态', + `container` varchar(255) DEFAULT NULL COMMENT '中间件信息', + `container_path` varchar(255) DEFAULT NULL COMMENT '中间件路径', + `command` varchar(255) DEFAULT NULL COMMENT '启动命令', + `env` varchar(255) DEFAULT NULL COMMENT '环境变量', + `runtime` varchar(255) DEFAULT NULL COMMENT '运行时环境', + `create_time` int(11) DEFAULT NULL COMMENT '启动时间', + `update_time` int(11) DEFAULT NULL COMMENT '最近一次活跃', + PRIMARY KEY (`id`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=76 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for iast_strategy +-- ---------------------------- +DROP TABLE IF EXISTS `iast_strategy`; +CREATE TABLE `iast_strategy` ( + `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '自增主键', + `user_id` int(11) DEFAULT NULL COMMENT '用户ID', + `vul_type` varchar(255) DEFAULT NULL COMMENT '漏洞类型', + `level_id` int(11) DEFAULT NULL COMMENT '漏洞等级', + `state` varchar(255) DEFAULT NULL COMMENT '策略状态,true-开启,false-关闭', + `dt` int(11) DEFAULT NULL COMMENT '策略变更时间', + `vul_name` varchar(255) DEFAULT NULL COMMENT '漏洞名称(中文)', + `vul_desc` text COMMENT '漏洞描述', + `vul_fix` text COMMENT '修复建议', + PRIMARY KEY (`id`) USING BTREE, + KEY `user_id` (`user_id`) USING BTREE, + KEY `level_id` (`level_id`) USING BTREE, + CONSTRAINT `iast_strategy_ibfk_1` FOREIGN KEY (`user_id`) REFERENCES `auth_user` (`id`), + CONSTRAINT `iast_strategy_ibfk_2` FOREIGN KEY (`level_id`) REFERENCES `iast_vul_level` (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=28 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for iast_strategy_user +-- ---------------------------- +DROP TABLE IF EXISTS `iast_strategy_user`; +CREATE TABLE `iast_strategy_user` ( + `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT, + `name` varchar(200) DEFAULT NULL COMMENT '策略名称', + `content` text COMMENT '策略ID串', + `user_id` int(11) DEFAULT NULL COMMENT '用户ID', + `status` tinyint(2) DEFAULT '1' COMMENT '1有效0无效', + `created_at` datetime DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', + PRIMARY KEY (`id`) USING BTREE, + UNIQUE KEY `id` (`id`) USING BTREE, + KEY `user_id` (`user_id`) USING BTREE, + CONSTRAINT `iast_strategy_user_ibfk_1` FOREIGN KEY (`user_id`) REFERENCES `auth_user` (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=17 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for iast_system +-- ---------------------------- +DROP TABLE IF EXISTS `iast_system`; +CREATE TABLE `iast_system` ( + `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT, + `agent_value` varchar(50) DEFAULT NULL COMMENT 'agent类型', + `java_version` varchar(50) DEFAULT NULL COMMENT 'java版本', + `middleware` varchar(50) DEFAULT NULL COMMENT '中间件', + `system` varchar(50) DEFAULT NULL COMMENT '系统信息', + `deploy_status` tinyint(5) DEFAULT NULL COMMENT '0未安装,1第一步,2第二部', + `created_at` datetime DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', + `update_at` datetime DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间', + `user_id` int(11) DEFAULT NULL COMMENT '操作用户', + PRIMARY KEY (`id`) USING BTREE, + UNIQUE KEY `id` (`id`) USING BTREE, + KEY `user_id` (`user_id`) USING BTREE, + CONSTRAINT `iast_system_ibfk_1` FOREIGN KEY (`user_id`) REFERENCES `auth_user` (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=9 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for iast_vul_level +-- ---------------------------- +DROP TABLE IF EXISTS `iast_vul_level`; +CREATE TABLE `iast_vul_level` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `name` varchar(255) DEFAULT NULL COMMENT '漏洞等级名称 :high、medium、low、info', + `name_value` varchar(255) DEFAULT NULL COMMENT '漏洞等级值:高危、中危、低危、提示', + `name_type` varchar(255) DEFAULT NULL, + PRIMARY KEY (`id`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=5 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for iast_vul_overpower +-- ---------------------------- +DROP TABLE IF EXISTS `iast_vul_overpower`; +CREATE TABLE `iast_vul_overpower` ( + `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '自增主键', + `agent_id` int(255) DEFAULT NULL COMMENT 'agent ID ', + `http_url` varchar(2000) DEFAULT NULL COMMENT 'HTTP请求的URL', + `http_uri` varchar(2000) DEFAULT NULL COMMENT 'HTTP请求的URI', + `http_query_string` varchar(2000) DEFAULT NULL COMMENT 'HTTP请求的查询参数', + `http_method` varchar(10) DEFAULT NULL COMMENT 'HTTP请求的方法', + `http_scheme` varchar(255) DEFAULT NULL COMMENT 'HTTP请求的协议', + `http_protocol` varchar(255) DEFAULT NULL COMMENT 'HTTP请求协议(完整)', + `http_header` varchar(2000) DEFAULT NULL COMMENT 'HTTP请求头', + `x_trace_id` varchar(255) DEFAULT NULL COMMENT '灵芝trace-id', + `cookie` varchar(2000) DEFAULT NULL COMMENT '当前请求的cookie', + `sql` varchar(2000) DEFAULT NULL COMMENT '当前请求触发的sql语句', + `created_time` datetime DEFAULT NULL, + `updated_time` datetime DEFAULT NULL, + PRIMARY KEY (`id`) USING BTREE, + KEY `agent_id` (`agent_id`) USING BTREE, + CONSTRAINT `iast_vul_overpower_ibfk_2` FOREIGN KEY (`agent_id`) REFERENCES `iast_agent` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for iast_vul_rule +-- ---------------------------- +DROP TABLE IF EXISTS `iast_vul_rule`; +CREATE TABLE `iast_vul_rule` ( + `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '自增主键', + `rule_name` varchar(255) DEFAULT NULL COMMENT '策略名称', + `rule_level` varchar(10) DEFAULT NULL COMMENT '策略等级', + `rule_msg` varchar(255) DEFAULT NULL COMMENT '策略描述', + `rule_value` json DEFAULT NULL COMMENT '策略详情', + `is_enable` tinyint(1) DEFAULT NULL COMMENT '是否启用,0-禁用、1-启用', + `is_system` tinyint(1) DEFAULT NULL COMMENT '是否为系统策略', + `create_by` int(11) DEFAULT NULL COMMENT '创建者', + `create_time` int(11) DEFAULT NULL COMMENT '创建时间', + `update_time` int(11) DEFAULT NULL COMMENT '更新时间', + PRIMARY KEY (`id`), + UNIQUE KEY `rule_name` (`rule_name`,`create_by`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=46 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for iast_vulnerability +-- ---------------------------- +DROP TABLE IF EXISTS `iast_vulnerability`; +CREATE TABLE `iast_vulnerability` ( + `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '自增主键', + `type` varchar(255) DEFAULT NULL COMMENT '漏洞类型', + `level_id` int(11) DEFAULT NULL COMMENT '漏洞等级', + `url` varchar(2000) DEFAULT NULL COMMENT '漏洞url', + `uri` varchar(255) DEFAULT '' COMMENT 'uri', + `http_method` varchar(10) DEFAULT NULL COMMENT '漏洞请求方法', + `http_scheme` varchar(255) DEFAULT NULL COMMENT '协议名', + `http_protocol` varchar(255) DEFAULT NULL COMMENT 'HTTP协议', + `req_header` mediumtext COMMENT '漏洞请求的header头', + `req_params` varchar(2000) DEFAULT NULL COMMENT '漏洞url的get参数', + `req_data` mediumtext COMMENT '漏洞url的post数据信息', + `res_header` mediumtext COMMENT '漏洞响应头', + `res_body` mediumtext COMMENT '漏洞响应包', + `full_stack` mediumtext COMMENT '漏洞栈', + `top_stack` varchar(255) DEFAULT NULL COMMENT '污点栈-栈顶', + `bottom_stack` varchar(255) DEFAULT NULL COMMENT '污点栈-栈底', + `taint_value` varchar(255) DEFAULT NULL COMMENT '污点值', + `taint_position` varchar(255) DEFAULT NULL COMMENT '漏洞所在请求的位置', + `agent_id` int(11) DEFAULT '0' COMMENT '应用ID', + `context_path` varchar(255) DEFAULT NULL COMMENT '漏洞所在应用', + `counts` int(11) DEFAULT NULL COMMENT '漏洞出现次数', + `status` varchar(255) DEFAULT NULL COMMENT '漏洞状态:已上报、已确认、已忽略', + `language` varchar(255) DEFAULT NULL COMMENT '开发语言', + `first_time` int(11) DEFAULT NULL COMMENT '漏洞第一次出现的时间', + `latest_time` int(11) DEFAULT NULL COMMENT '漏洞最近一次出现的时间', + `client_ip` varchar(255) DEFAULT NULL COMMENT '来源IP', + `param_name` varchar(255) DEFAULT NULL COMMENT '传递参数变量名称', + PRIMARY KEY (`id`) USING BTREE, + KEY `agent_id` (`agent_id`) USING BTREE, + KEY `level_id` (`level_id`) USING BTREE, + CONSTRAINT `iast_vulnerability_ibfk_2` FOREIGN KEY (`agent_id`) REFERENCES `iast_agent` (`id`), + CONSTRAINT `iast_vulnerability_ibfk_3` FOREIGN KEY (`level_id`) REFERENCES `iast_vul_level` (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=366 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for sca_artifact_db +-- ---------------------------- +DROP TABLE IF EXISTS `sca_artifact_db`; +CREATE TABLE `sca_artifact_db` ( + `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '自增主键', + `cwe_id` varchar(20) DEFAULT NULL COMMENT 'CWE漏洞编号', + `cve_id` varchar(20) DEFAULT NULL COMMENT 'CVE漏洞编号', + `stage` varchar(255) DEFAULT NULL COMMENT '第三方包发布类型', + `title` varchar(255) DEFAULT NULL COMMENT '漏洞标题', + `overview` text COMMENT '漏洞概述', + `teardown` text COMMENT '漏洞详细解释(markdown)', + `group_id` varchar(256) DEFAULT NULL COMMENT '第三方组件的组信息', + `artifact_id` varchar(256) DEFAULT NULL COMMENT '第三方组件的名称', + `latest_version` varchar(50) DEFAULT NULL COMMENT '第三方组件的最新版本', + `component_name` varchar(512) DEFAULT NULL COMMENT '第三方组件的human名称', + `dt` int(11) DEFAULT NULL COMMENT '数据添加时间', + `reference` text COMMENT '相关链接/分析文章', + `cvss_score` float(10,0) DEFAULT NULL COMMENT 'cvss2评分', + `cvss3_score` float(10,0) DEFAULT NULL COMMENT 'cvss3评分', + `level` varchar(20) DEFAULT NULL COMMENT '漏洞等级(以cvss3为准)', + PRIMARY KEY (`id`) USING BTREE, + UNIQUE KEY `cve_id` (`cve_id`,`group_id`,`artifact_id`,`latest_version`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=39499 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for sca_maven_artifact +-- ---------------------------- +DROP TABLE IF EXISTS `sca_maven_artifact`; +CREATE TABLE `sca_maven_artifact` ( + `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '自增主键', + `aid` int(11) DEFAULT NULL COMMENT 'artifactdb表关联主键', + `safe_version` varchar(255) DEFAULT NULL COMMENT '推荐版本', + `version_range` varchar(255) DEFAULT NULL COMMENT '组件版本范围', + `cph_version` varchar(255) DEFAULT NULL COMMENT 'maven查询规范', + `dt` int(11) DEFAULT NULL COMMENT '更新时间', + `patch` varchar(255) DEFAULT NULL COMMENT '补丁地址', + `cph` varchar(255) DEFAULT NULL COMMENT '组件maven查询语法', + `type` varchar(255) DEFAULT NULL COMMENT '包管理器类型', + `group_id` varchar(255) DEFAULT NULL COMMENT '包管理器组', + `artifact_id` varchar(255) DEFAULT NULL COMMENT 'artifact', + `version` varchar(255) DEFAULT NULL COMMENT '版本', + `signature` varchar(255) DEFAULT NULL COMMENT '版本哈希', + `package_name` varchar(255) DEFAULT NULL COMMENT '包名', + PRIMARY KEY (`id`) USING BTREE, + UNIQUE KEY `cph_version` (`cph_version`,`aid`) USING BTREE, + KEY `aid` (`aid`) USING BTREE, + CONSTRAINT `sca_maven_artifact_ibfk_1` FOREIGN KEY (`aid`) REFERENCES `sca_artifact_db` (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=66971721 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for sca_maven_db +-- ---------------------------- +DROP TABLE IF EXISTS `sca_maven_db`; +CREATE TABLE `sca_maven_db` ( + `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键', + `group_id` varchar(255) DEFAULT NULL COMMENT 'Java第三方组件的groupID', + `atrifact_id` varchar(255) DEFAULT NULL COMMENT 'Java第三方组件的ArtifactId', + `version` varchar(255) DEFAULT NULL COMMENT 'Java第三方组件的版本号', + `sha_1` varchar(255) DEFAULT NULL COMMENT 'Java包的SHA-1值,用于与灵芝Agent获取的数据进行匹配', + `package_name` varchar(255) DEFAULT NULL COMMENT '包名', + `aql` varchar(255) DEFAULT NULL COMMENT '组件查询语言', + PRIMARY KEY (`id`) USING BTREE, + UNIQUE KEY `sha_1` (`sha_1`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=193562 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for sca_record +-- ---------------------------- +DROP TABLE IF EXISTS `sca_record`; +CREATE TABLE `sca_record` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `page` int(11) DEFAULT NULL COMMENT '当前页', + `total` int(11) DEFAULT NULL COMMENT '总页数', + `dt` int(11) DEFAULT NULL COMMENT '更新时间s', + `type` varchar(255) DEFAULT NULL COMMENT '记录类型', + `data` varchar(255) DEFAULT NULL COMMENT '记录数据', + PRIMARY KEY (`id`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for sca_vul_db +-- ---------------------------- +DROP TABLE IF EXISTS `sca_vul_db`; +CREATE TABLE `sca_vul_db` ( + `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '自增主键', + `package_type` varchar(20) DEFAULT NULL COMMENT '包管理器', + `cve` varchar(20) DEFAULT NULL COMMENT 'cve编号', + `cwe` varchar(20) DEFAULT NULL COMMENT 'cwe编号', + `vul_name` varchar(255) DEFAULT NULL COMMENT '漏洞名称', + `vul_level` varchar(20) DEFAULT NULL COMMENT '漏洞等级', + `cve_href` varchar(255) DEFAULT NULL COMMENT 'CVE地址', + `cwe_href` varchar(255) DEFAULT NULL COMMENT 'CWE地址', + `aql` varchar(255) DEFAULT NULL COMMENT '组件查询语言:', + `version_range` varchar(255) DEFAULT NULL COMMENT '版本范围', + `version_condition` varchar(255) DEFAULT NULL COMMENT '版本范围-条件', + `latest_version` varchar(255) DEFAULT NULL COMMENT '最新版本', + `overview` varchar(255) DEFAULT NULL COMMENT '漏洞概述', + `teardown` varchar(2000) DEFAULT NULL COMMENT '漏洞详细描述', + `url` varchar(255) DEFAULT NULL COMMENT '漏洞地址', + `source` varchar(20) DEFAULT NULL COMMENT '数据来源', + `dt` int(11) DEFAULT NULL COMMENT '时间戳', + `extra` varchar(2000) DEFAULT NULL COMMENT '附加数据,暂时不知道是否有用', + PRIMARY KEY (`id`) USING BTREE, + UNIQUE KEY `package_type` (`package_type`,`cve`,`cwe`,`vul_name`,`aql`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=31963 DEFAULT CHARSET=utf8mb4; + +SET FOREIGN_KEY_CHECKS = 1; diff --git a/conf/uwsgi.ini b/dongtai_conf/conf/uwsgi.ini similarity index 74% rename from conf/uwsgi.ini rename to dongtai_conf/conf/uwsgi.ini index 167629192..4b2a4ba18 100644 --- a/conf/uwsgi.ini +++ b/dongtai_conf/conf/uwsgi.ini @@ -4,19 +4,23 @@ http = :8000 #指定项目的目录,在app加载前切换到当前目录 -chdir = /opt/iast/apiserver +chdir = /opt/dongtai # Django的wsgi文件,用来加载blog2/wsgi.py这个模块 -module = AgentServer.wsgi +module = dongtai_conf.wsgi # Python虚拟环境的路径 # master 启动主进程。 master = true # 最大数量的工作进程数 -processes = 2 -# 指定工作进程中的线程数 -threads = 10 - +processes = %k*2 +# 指定工作进程中的线程数 %k*8 +threads = 8 +# worker = 8 +listen = 1024 # 设置socket的权限 -chmod-socket = 664 +chmod-socket = 664 # 退出的时候是否清理环境,自动移除unix Socket 和 Pid 文件 vacuum = true + +static-map = /upload=/opt/dongtai/static + diff --git a/dongtai_conf/conf/uwsgi_params b/dongtai_conf/conf/uwsgi_params new file mode 100644 index 000000000..d1de4c8f3 --- /dev/null +++ b/dongtai_conf/conf/uwsgi_params @@ -0,0 +1,16 @@ +uwsgi_param QUERY_STRING $query_string; +uwsgi_param REQUEST_METHOD $request_method; +uwsgi_param CONTENT_TYPE $content_type; +uwsgi_param CONTENT_LENGTH $content_length; + +uwsgi_param REQUEST_URI $request_uri; +uwsgi_param PATH_INFO $document_uri; +uwsgi_param DOCUMENT_ROOT $document_root; +uwsgi_param SERVER_PROTOCOL $server_protocol; +uwsgi_param REQUSET_SCHEME $scheme; +uwsgi_param HTTPS $https if_not_empty; + +uwsgi_param REMOTE_ADDR $remote_addr; +uwsgi_param REMOTE_PORT $remote_port; +uwsgi_param SERVER_PORT $server_port; +uwsgi_param SERVER_NAME $server_name; diff --git a/dongtai_conf/plugin/__init__.py b/dongtai_conf/plugin/__init__.py new file mode 100644 index 000000000..5795c9a24 --- /dev/null +++ b/dongtai_conf/plugin/__init__.py @@ -0,0 +1,95 @@ +from dongtai_conf.settings import BASE_DIR +from os import walk, chdir, getcwd +from os.path import join +from importlib import import_module +from inspect import getmembers,isclass +from functools import wraps +import logging +logger = logging.getLogger('dongtai.openapi') +PLUGIN_DICT = {} + + +class DongTaiPlugin: + appname = 'appserver' + target_class_name = 'SaasMethodPoolHandler' + target_func_name = 'save_method_call' + target_module_name = 'dongtai_protocol.report.handler.saas_method_pool_handler' + + def before_patch_function(self, func_args, func_kwargs): + pass + + def after_patch_function(self, func_args, func_kwargs, func_res): + pass + + def _monkey_patch(self): + module = import_module(self.target_module_name) + target_class = getattr(module, self.target_class_name) + origin_func = getattr(target_class, self.target_func_name) + setattr(target_class, f'_origin_{self.target_func_name}', origin_func) + self.target_class = target_class + + @wraps(origin_func) + def patched_function(*args, **kwargs): + logger.debug( + f"{self.target_class_name} {self.target_func_name} args:{args} kwargs:{kwargs}" + ) + try: + self.before_patch_function(args, kwargs) + except Exception as e: + logger.info(f'plugin error:{e} args: {args} kwargs: {kwargs}', + exc_info=True) + res = origin_func(*args, **kwargs) + try: + final_res = self.after_patch_function(args, kwargs, res) + except Exception as e: + logger.info(f'plugin error:{e} args: {args} kwargs: {kwargs}', + exc_info=True) + return res + + return final_res + + setattr(target_class, self.target_func_name, + patched_function) + + def monkey_patch(self, appname): + if self.appname == appname: + try: + self._monkey_patch() + logger.info( + f"app: {appname} module: {self.target_module_name} class: {self.target_class_name} func : {self.target_func_name} is patched by {type(self).__name__}" + ) + except Exception as e: + logger.error(f"monkey_patch failed: {e}", exc_info=True) + +def monkey_patch(appname): + plugin_dict = get_plugin_dict() + for plugin in plugin_dict.get(appname, []): + plugin().monkey_patch(appname) + + +def get_plugin_dict(): + if PLUGIN_DICT: + return PLUGIN_DICT + previous_path = getcwd() + PLUGIN_ROOT_PATH = join(BASE_DIR, 'dongtai_conf/plugin') + for root, directories, files in walk(top=PLUGIN_ROOT_PATH, topdown=False): + for file_ in files: + if file_.startswith('plug_') and file_.endswith('.py'): + packname = '.'.join([ + root.replace(BASE_DIR + '/', '').replace('/', '.'), + file_.replace('.py', '') + ]) + mod = import_module(packname) + plugin_classes = filter(lambda x: _plug_class_filter(x), + getmembers(mod)) + for name, plug_class in plugin_classes: + if PLUGIN_DICT.get(plug_class.appname): + PLUGIN_DICT[plug_class.appname] += [plug_class] + else: + PLUGIN_DICT[plug_class.appname] = [plug_class] + chdir(previous_path) + return PLUGIN_DICT + +def _plug_class_filter(tup): + return tup[0].startswith('Plug') and isclass( + tup[1]) and issubclass(tup[1], DongTaiPlugin) diff --git a/dongtai_conf/plugin_install.py b/dongtai_conf/plugin_install.py new file mode 100644 index 000000000..fd10e40e8 --- /dev/null +++ b/dongtai_conf/plugin_install.py @@ -0,0 +1,55 @@ +import zipfile +import requests +from io import BytesIO +import fire +import os +from shutil import copytree +from typing import Optional + +def _get_plugin(repo: str, extra:dict): + url_schema = 'https://github.com/{repo}/{resofurl}' + default_url = 'https://github.com/HXSecurityBusiness/DongTai-webapi/archive/refs/heads/main.zip' + default_url = 'https://github.com/Bidaya0/DongTai-openapi/archive/refs/tags/v1.0.3.zip' + if 'branch' in extra.keys(): + resofurl = f'archive/refs/heads/{extra["branch"]}.zip' + elif 'tag' in extra.keys(): + resofurl = f'archive/refs/tags/{extra["tag"]}.zip' + elif 'commit' in extra.keys(): + resofurl = f'zip/{extra["commit"]}' + else: + resofurl = 'archive/refs/heads/main.zip' + final_url = f'https://github.com/{repo}/{resofurl}' + if 'uri' in extra.keys(): + final_url = extra['uri'] + r = requests.get(final_url, stream=True) + z = zipfile.ZipFile(BytesIO(r.content)) + owner, repo_name = repo.split('/') + z.extractall(f'/tmp/plugin/{repo_name}') + +def _install_plugin(repo: str): + owner,repo_name = repo.split('/') + base_path = f"/tmp/plugin/{repo_name}/{os.listdir(f'/tmp/plugin/{repo_name}')[0]}" + copyapp_path = f"{base_path}/logs" + copytree(copyapp_path,f'./{repo_name}') + copyapp_path = f"{base_path}/logs" + copytree(copyapp_path,f'./plugin/{repo_name}/') + + +def get_plugin(repo: str, + branch: Optional[str] = None, + tag: Optional[str] = None, + commit: Optional[str] = None, + uri: Optional[str] = None): + extra = { + key: value + for key, value in filter( + lambda x: x[1], + zip(['branch', 'tag', 'commit', 'uri'], + [branch, tag, commit, uri])) + } + _get_plugin(repo,extra) + _install_plugin(repo) + + +if __name__ == '__main__': + fire.Fire(get_plugin) diff --git a/dongtai_conf/settings.py b/dongtai_conf/settings.py new file mode 100644 index 000000000..3924bd245 --- /dev/null +++ b/dongtai_conf/settings.py @@ -0,0 +1,715 @@ +""" +Django settings for dongtai_conf project. + +Generated by 'django-admin startproject' using Django 3.0.3. + +For more information on this file, see +https://docs.djangoproject.com/en/3.0/topics/settings/ + +For the full list of settings and their values, see +https://docs.djangoproject.com/en/3.0/ref/settings/ +""" + +from typing import List +from ast import literal_eval +from urllib.parse import urljoin +import os +import sys +from configparser import ConfigParser + +# Build paths inside the project like this: os.path.join(BASE_DIR, ...) +import random + +import pymysql +from dongtai_conf.utils import get_config + + +pymysql.install_as_MySQLdb() + + +BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + +get_config(BASE_DIR, os.getenv("TARGET_SECRETSMANAGER", "")) + + +# Quick-start development settings - unsuitable for production +# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/ + + +# SECURITY WARNING: don't run with debug turned on in production! +# or os.getenv('environment', None) in ('TEST',) +DEBUG = os.environ.get("debug", 'false') == 'true' + +# READ CONFIG FILE +config = ConfigParser() +status = config.read(os.path.join(BASE_DIR, 'dongtai_conf/conf/config.ini')) +if len(status) == 0: + print("config file not exist. stop running") + exit(0) + + +def ranstr(num): + H = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*()`-{}|:?><>?' + salt = '' + for i in range(num): + salt += random.choice(H) + return salt + + +# SECURITY WARNING: keep the secret key used in production secret! +try: + SECRET_KEY = config.get('security', 'secret_key') +except Exception as e: + SECRET_KEY = ranstr(50) +# DEBUG = True +ALLOWED_HOSTS = ['*'] + +# Application definition +TOKEN_EXP_DAY = 14 + +INSTALLED_APPS = [ + 'django.contrib.admin', + 'django.contrib.auth', + 'django.contrib.contenttypes', + 'django.contrib.sessions', + 'django.contrib.messages', + 'django.contrib.staticfiles', + 'rest_framework', + 'rest_framework.authtoken', + 'django_filters', + 'corsheaders', + 'captcha', + 'modeltranslation', + 'django_celery_beat', + 'deploy.commands', +] +DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' + + +def get_installed_apps(): + from os import walk, chdir, getcwd + previous_path = getcwd() + master = [] + APPS_ROOT_PATH = BASE_DIR + chdir(APPS_ROOT_PATH) + for root, directories, files in walk(top=getcwd(), topdown=False): + for file_ in files: + if 'apps.py' in file_ and len( + list( + filter(lambda x: x != '', + root.replace(getcwd(), '').split('/')))) == 1: + app_path = f"{root.replace(BASE_DIR + '/', '').replace('/', '.')}" + master.append(app_path) + chdir(previous_path) + return master + + +CUSTOM_APPS = get_installed_apps() +INSTALLED_APPS.extend(CUSTOM_APPS) + + +MODELTRANSLATION_LANGUAGES = ('en', 'zh') +MODELTRANSLATION_DEFAULT_LANGUAGE = 'zh' +REST_FRAMEWORK = { + 'PAGE_SIZE': + 20, + 'DEFAULT_PAGINATION_CLASS': ['django.core.paginator'], + 'DEFAULT_AUTHENTICATION_CLASSES': [ + 'rest_framework.authentication.SessionAuthentication', + 'rest_framework.authentication.TokenAuthentication', + ], + 'DEFAULT_RENDERER_CLASSES': [ + 'rest_framework.renderers.JSONRenderer', + ], + 'DEFAULT_THROTTLE_CLASSES': ('rest_framework.throttling.AnonRateThrottle', + 'rest_framework.throttling.UserRateThrottle'), + 'DEFAULT_THROTTLE_RATES': { + 'anon': '6000000/min', + 'user': '6000000/min' + }, +} + +basedir = os.path.dirname(os.path.realpath(__file__)) +LANGUAGE_CODE = 'zh' +LANGUAGES = ( + ('en', 'English'), + ('zh', '简体中文'), +) +USE_I18N = True +LOCALE_PATHS = ( + os.path.join(BASE_DIR, 'static/i18n'), +) +USE_L10N = True +MODELTRANSLATION_FALLBACK_LANGUAGES = ('zh', 'en') +MIDDLEWARE = [ + 'django.middleware.gzip.GZipMiddleware', + 'dongtai_common.common.utils.CSPMiddleware', + 'django.middleware.clickjacking.XFrameOptionsMiddleware', + 'django.contrib.sessions.middleware.SessionMiddleware', + 'django.middleware.locale.LocaleMiddleware', + 'django.middleware.security.SecurityMiddleware', + 'corsheaders.middleware.CorsMiddleware', + 'django.middleware.common.CommonMiddleware', + 'django.middleware.csrf.CsrfViewMiddleware', + 'xff.middleware.XForwardedForMiddleware', + 'django.contrib.auth.middleware.AuthenticationMiddleware', + 'django.contrib.messages.middleware.MessageMiddleware', + 'django.middleware.clickjacking.XFrameOptionsMiddleware', +] + +XFF_TRUSTED_PROXY_DEPTH = 20 + +CSRF_COOKIE_NAME = "DTCsrfToken" +CSRF_HEADER_NAME = "HTTP_CSRF_TOKEN" + + +def safe_execute(default, exception, function, *args): + try: + return function(*args) + except exception: + return default + + +CSRF_TRUSTED_ORIGINS = tuple( + filter( + lambda x: x != "", + safe_execute("", BaseException, config.get, "security", + "csrf_trust_origins").split(","))) +CSRF_COOKIE_AGE = 60 * 60 * 24 + +AGENT_UPGRADE_URL = "https://www.huoxian.cn" +CORS_ALLOWED_ORIGINS = [ + 'https://dongtai.io', +] + +CORS_ORIGIN_REGEX_WHITELIST = [ + r"^https://\w+\.huoxian.cn:(\:\d+)?$", + r"^https://\w+\.dongtai_common.io:(\:\d+)?$", +] + +CORS_ALLOW_CREDENTIALS = True +CORS_ALLOW_METHODS = [ + 'GET', + 'OPTIONS', + 'POST', + 'PUT', + 'DELETE' +] + +CORS_ALLOW_HEADERS = [ + 'accept', + 'accept-encoding', + 'authorization', + 'content-type', + 'dnt', + 'origin', + 'referer', + 'x-token', + 'user-agent', + 'x-csrftoken', + 'csrf-token', + 'x-requested-with', + 'x_http_method_override' +] + +ROOT_URLCONF = 'dongtai_conf.urls' + +TEMPLATES = [ + { + 'BACKEND': 'django.template.backends.django.DjangoTemplates', + 'DIRS': [os.path.join(BASE_DIR, 'static/templates')], + 'APP_DIRS': True, + 'OPTIONS': { + 'context_processors': [ + 'django.template.context_processors.debug', + 'django.template.context_processors.request', + 'django.contrib.auth.context_processors.auth', + 'django.contrib.messages.context_processors.messages', + ], + }, + }, +] + +WSGI_APPLICATION = 'dongtai_conf.wsgi.application' + +DATABASES = { + 'default': { + 'CONN_MAX_AGE': 900, + 'ENGINE': 'django.db.backends.mysql', + 'USER': config.get("mysql", 'user'), + 'NAME': config.get("mysql", 'name'), + 'PASSWORD': config.get("mysql", 'password'), + 'HOST': config.get("mysql", 'host'), + 'PORT': config.get("mysql", 'port'), + 'OPTIONS': { + # 'init_command': + # 'SET NAMES utf8mb4;SET collation_server=utf8mb4_general_ci;SET collation_database=utf8mb4_general_ci; ', + 'charset': 'utf8mb4', + 'use_unicode': True, + }, + 'TEST': { + 'USER': config.get("mysql", 'user'), + 'NAME': config.get("mysql", 'name'), + 'PASSWORD': config.get("mysql", 'password'), + 'HOST': config.get("mysql", 'host'), + 'PORT': config.get("mysql", 'port'), + } + } +} +REDIS_URL = 'redis://:%(password)s@%(host)s:%(port)s/%(db)s' % { + 'password': config.get("redis", 'password'), + 'host': config.get("redis", 'host'), + 'port': config.get("redis", 'port'), + 'db': config.get("redis", 'db'), +} +CACHES = { + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": REDIS_URL, + "OPTIONS": { + "CLIENT_CLASS": "django_redis.client.DefaultClient", + } + } +} + +AUTH_PASSWORD_VALIDATORS = [ + { + 'NAME': + 'django.contrib.auth.password_validation.MinimumLengthValidator', + 'OPTIONS': { + 'min_length': 6, + } + }, +] +AUTH_USER_MODEL = 'dongtai_common.User' +TIME_ZONE = "Asia/Shanghai" +STATIC_URL = '/static/' +MEDIA_ROOT = os.path.join(BASE_DIR, 'static') +MEDIA_URL = "/static/media/" +CAPTCHA_IMAGE_SIZE = (80, 45) +CAPTCHA_LENGTH = 4 +CAPTCHA_TIMEOUT = 1 +LOGGING_LEVEL = 'DEBUG' if DEBUG else 'ERROR' +if os.getenv('environment', None) == 'TEST': + LOGGING_LEVEL = 'INFO' +LOGGING_LEVEL = safe_execute(LOGGING_LEVEL, BaseException, config.get, "other", + "logging_level") +# 报告存储位置 +try: + TMP_COMMON_PATH = config.get('common_file_path', 'tmp_path') +except Exception as e: + TMP_COMMON_PATH = "/tmp/logstash" + +# 图片二级存储路径 +try: + REPORT_IMG_FILES_PATH = config.get('common_file_path', 'report_img') +except Exception as e: + REPORT_IMG_FILES_PATH = "report/img" + +# report html二级存储路径 +try: + REPORT_HTML_FILES_PATH = config.get('common_file_path', 'report_html') +except Exception as e: + REPORT_HTML_FILES_PATH = "report/html" + +# report pdf二级存储路径 +try: + REPORT_PDF_FILES_PATH = config.get('common_file_path', 'report_pdf') +except Exception as e: + REPORT_PDF_FILES_PATH = "report/pdf" +# report word 二级存储路径 +try: + REPORT_WORD_FILES_PATH = config.get('common_file_path', 'report_word') +except Exception as e: + REPORT_WORD_FILES_PATH = "report/word" +# report excel 二级存储路径 +try: + REPORT_EXCEL_FILES_PATH = config.get('common_file_path', 'report_excel') +except Exception as e: + REPORT_EXCEL_FILES_PATH = "report/excel" +FILES_SIZE_LIMIT = 1024 * 1024 * 50 +# # 报告二级存储路径 +# try: +# REPORT_IMG_FILES_PATH = config.get('common_file_path', 'report_img') +# except Exception as e: +# REPORT_IMG_FILES_PATH = "report/img" + + +LOGGING = { + 'version': 1, + 'disable_existing_loggers': False, + 'formatters': { + 'verbose': { + 'format': + u'{levelname} {asctime} [{module}.{funcName}:{lineno}] {message}', + 'style': '{', + }, + "json": { + "()": "pythonjsonlogger.jsonlogger.JsonFormatter" + }, + }, + 'handlers': { + 'console': { + 'class': 'logging.StreamHandler', + 'formatter': 'verbose' + }, + 'dongtai-webapi': { + 'class': 'logging.handlers.RotatingFileHandler', + 'filename': '/tmp/webapi.log', + 'formatter': 'verbose', + 'encoding': 'utf-8', + }, + 'dongtai.openapi': { + 'class': 'logging.handlers.RotatingFileHandler', + 'filename': '/tmp/openapi.log', + 'formatter': 'verbose', + 'encoding': 'utf-8', + }, + 'dongtai-core': { + 'class': 'logging.handlers.RotatingFileHandler', + 'filename': '/tmp/core.log', + 'formatter': 'verbose', + 'encoding': 'utf-8', + }, + 'celery.apps.worker': { + 'class': 'logging.handlers.RotatingFileHandler', + 'filename': '/tmp/worker.log', + 'formatter': 'verbose' + }, + 'jsonlog': { + 'class': 'logging.handlers.RotatingFileHandler', + 'filename': TMP_COMMON_PATH + '/server.log', + 'formatter': 'json' + } + }, + 'loggers': { + 'django.db.backends': { + 'handlers': ['console'], + 'level': LOGGING_LEVEL, + }, + 'dongtai-webapi': { + 'handlers': ['console', 'dongtai-webapi'], + 'propagate': True, + 'level': LOGGING_LEVEL, + }, + 'dongtai.openapi': { + 'handlers': ['console', 'dongtai.openapi'], + 'propagate': True, + 'level': LOGGING_LEVEL, + }, + 'dongtai-core': { + 'handlers': ['console', 'dongtai-webapi'], + 'propagate': True, + 'level': LOGGING_LEVEL, + }, + 'django': { + 'handlers': ['console', 'dongtai-webapi'], + 'propagate': True, + 'level': LOGGING_LEVEL, + }, + 'dongtai-engine': { + 'handlers': ['console', 'dongtai-webapi'], + 'propagate': True, + 'level': LOGGING_LEVEL, + }, + 'celery.apps.worker': { + 'handlers': ['console', 'celery.apps.worker'], + 'propagate': True, + 'level': LOGGING_LEVEL, + }, + 'jsonlogger': { # it use to logging to local logstash file + 'handlers': ['jsonlog'], + 'propagate': True, + 'level': 'DEBUG', + }, + } +} +REST_PROXY = { + 'HOST': config.get("engine", 'url'), +} + +OPENAPI = config.get("apiserver", "url") + +# notify +EMAIL_SERVER = config.get('smtp', 'server') +EMAIL_USER = config.get('smtp', 'user') +EMAIL_PASSWORD = config.get('smtp', 'password') +EMAIL_FROM_ADDR = config.get('smtp', 'from_addr') +EMAIL_PORT = config.get('smtp', 'port') +ENABLE_SSL = config.get('smtp', 'ssl') == 'True' +ADMIN_EMAIL = config.get('smtp', 'cc_addr') +SESSION_COOKIE_DOMAIN = None +CSRF_COOKIE_DOMAIN = None + +SECURE_BROWSER_XSS_FILTER = True +SECURE_CONTENT_TYPE_NOSNIFF = True +X_FRAME_OPTIONS = 'DENY' + +TEST_RUNNER = 'test.NoDbTestRunner' + + +# if os.getenv('environment', None) == 'TEST' or os.getenv('REQUESTLOG', +# None) == 'TRUE': +# MIDDLEWARE.insert(0, 'apitimelog.middleware.RequestLogMiddleware') + + +if os.getenv('PYTHONAGENT', None) == 'TRUE': + MIDDLEWARE.insert( + 0, 'dongtai_agent_python.middlewares.django_middleware.FireMiddleware') +if os.getenv('environment', None) == 'TEST' or os.getenv('SAVEEYE', + None) == 'TRUE': + CAPTCHA_NOISE_FUNCTIONS = ('captcha.helpers.noise_null', ) +if os.getenv('environment', 'PROD') in ('TEST', 'DOC') or os.getenv( + 'DOC', None) == 'TRUE': + from django.utils.translation import gettext_lazy as _ + INSTALLED_APPS.append('drf_spectacular') + SPECTACULAR_SETTINGS = { + 'TITLE': + 'DongTai WebApi Doc', + 'VERSION': + "1.1.0", + 'PREPROCESSING_HOOKS': + ['drf_spectacular.hooks.preprocess_exclude_path_format'], + 'URL_FORMAT_OVERRIDE': + None, + 'DESCRIPTION': + _("""Here is the API documentation in dongtai_conf. The corresponding management part API can be found through the relevant tag. + +There are two authentication methods. You can obtain csrf_token and sessionid through the login process, or access the corresponding API through the user's corresponding Token. + +The Token method is recommended here, and users can find it in the Agent installation interface such as -H + 'Authorization: Token {token}', here is the token corresponding to the user, the token method also requires a token like this on the request header.""" + ), + } + REST_FRAMEWORK[ + 'DEFAULT_SCHEMA_CLASS'] = 'drf_spectacular.openapi.AutoSchema' + + +if os.getenv('environment', None) == 'TEST' or os.getenv('CPROFILE', + None) == 'TRUE': + DJANGO_CPROFILE_MIDDLEWARE_REQUIRE_STAFF = False + MIDDLEWARE.append( + 'django_cprofile_middleware.middleware.ProfilerMiddleware') + +try: + SCA_BASE_URL = config.get('sca', 'base_url') + SCA_TIMEOUT = config.getint('sca', 'timeout') + SCA_TOKEN = config.get('sca', 'token') + SCA_SETUP = True if SCA_TOKEN else False +except BaseException: + SCA_BASE_URL = '' + SCA_TIMEOUT = 0 + SCA_TOKEN = "" + SCA_SETUP = False + + +if os.getenv('environment', None) in ('TEST', 'PROD'): + SESSION_COOKIE_DOMAIN = config.get('other', + 'demo_session_cookie_domain') + CSRF_COOKIE_DOMAIN = SESSION_COOKIE_DOMAIN + DOMAIN = config.get('other', 'domain') + +try: + DOMAIN_VUL = config.get('other', 'domain_vul') +except Exception as e: + DOMAIN_VUL = "http://localhost" + +# OPENAPI +BUCKET_URL = 'https://oss-cn-beijing.aliyuncs.com' +BUCKET_NAME = 'dongtai' +BUCKET_NAME_BASE_URL = 'agent/' if os.getenv('active.profile', + None) != 'TEST' else 'agent_test/' +VERSION = 'latest' +# CONST +PENDING = 1 +VERIFYING = 2 +CONFIRMED = 3 +IGNORE = 4 +SOLVED = 5 +ENGINE_URL = config.get("engine", "url") +HEALTH_ENGINE_URL = urljoin(ENGINE_URL, "/api/engine/health") +BASE_ENGINE_URL = config.get("engine", "url") + \ + '/api/engine/run?method_pool_id={id}' +SCA_ENGINE_URL = config.get("engine", "url") + '/api/engine/sca?agent_id={agent_id}' \ + + '&package_path={package_path}&package_signature={package_signature}' \ + + '&package_name={package_name}&package_algorithm={package_algorithm}' +REPLAY_ENGINE_URL = config.get( + "engine", "url") + '/api/engine/run?method_pool_id={id}&model=replay' + +CELERY_BROKER_URL = 'redis://:%(password)s@%(host)s:%(port)s/%(db)s' % { + 'password': config.get("redis", 'password'), + 'host': config.get("redis", 'host'), + 'port': config.get("redis", 'port'), + 'db': config.get("redis", 'db'), +} +CELERY_RESULT_EXPIRES = 600 +# CELERY_WORKER_LOG_FORMAT = '%(asctime)s [%(module)s %(levelname)s] %(message)s' +# CELERY_WORKER_LOG_FORMAT = '%(message)s' +# CELERY_WORKER_TASK_LOG_FORMAT = '%(task_id)s %(task_name)s %(message)s' +CELERY_WORKER_TASK_LOG_FORMAT = '%(message)s' +# CELERY_WORKER_LOG_FORMAT = '%(asctime)s [%(module)s %(levelname)s] %(message)s' +CELERY_WORKER_LOG_FORMAT = '%(message)s' +CELERY_TASK_EAGER_PROPAGATES = True +CELERY_WORKER_REDIRECT_STDOUTS = True +CELERY_WORKER_REDIRECT_STDOUTS_LEVEL = "ERROR" +# CELERY_WORKER_HIJACK_ROOT_LOGGER = True +CELERY_WORKER_MAX_TASKS_PER_CHILD = 5000 + +CELERY_TASK_SOFT_TIME_LIMIT = 3600 +CELERY_TASK_REJECT_ON_WORKER_LOST = True +DJANGO_CELERY_BEAT_TZ_AWARE = False + +DONGTAI_CELERY_CACHE_PREHEAT = safe_execute( + True, BaseException, lambda x, y: literal_eval(config.get(x, y)), "other", + "cache_preheat") +DEFAULT_CIRCUITCONFIG = { + 'SYSTEM': { + "name": + "系统配置", + "metric_group": + 1, + "interval": + 1, + "deal": + 1, + "is_enable": + 1, + "is_deleted": + 0, + "targets": [], + "metrics": [{ + "metric_type": 1, + "opt": 5, + "value": 100 + }, { + "metric_type": 2, + "opt": 5, + "value": 100 + }, { + "metric_type": 3, + "opt": 5, + "value": 1000000000 + }] + }, + 'JVM': { + "name": + "JVM", + "metric_group": + 2, + "interval": + 1, + "deal": + 1, + "is_enable": + 1, + "is_deleted": + 0, + "targets": [], + "metrics": [{ + "metric_type": 4, + "opt": 5, + "value": 100 + }, { + "metric_type": 5, + "opt": 5, + "value": 1000000000 + }, { + "metric_type": 6, + "opt": 5, + "value": 1000000 + }, { + "metric_type": 7, + "opt": 5, + "value": 1000000 + }, { + "metric_type": 8, + "opt": 5, + "value": 1000000 + }] + }, + 'APPLICATION': { + "name": + "应用配置", + "metric_group": + 3, + "interval": + 1, + "deal": + 1, + "is_enable": + 1, + "is_deleted": + 0, + "targets": [], + "metrics": [{ + "metric_type": 9, + "opt": 5, + "value": 10000 + }, { + "metric_type": 10, + "opt": 5, + "value": 100000000 + }] + } +} +DONGTAI_MAX_RATE_LIMIT = 10 +DONGTAI_REDIS_ES_UPDATE_BATCH_SIZE = 500 +DONGTAI_MAX_BATCH_TASK_CONCORRENCY = 5 + +ELASTICSEARCH_STATE = config.get('elastic_search', 'enable') == 'true' + + +def get_elasticsearch_conf() -> List[str]: + hoststr = config.get('elastic_search', 'host') + return hoststr.split(',') + + +if ELASTICSEARCH_STATE: + INSTALLED_APPS.append('django_elasticsearch_dsl') + ELASTICSEARCH_DSL = { + 'default': { + 'hosts': get_elasticsearch_conf() + }, + } + ASSET_VUL_INDEX = config.get('elastic_search', 'asset_vul_index') + VULNERABILITY_INDEX = config.get('elastic_search', 'vulnerability_index') + ASSET_AGGR_INDEX = config.get('elastic_search', 'asset_aggr_index') + METHOD_POOL_INDEX = config.get('elastic_search', 'method_pool_index') + ASSET_INDEX = config.get('elastic_search', 'asset_index') + ELASTICSEARCH_DSL_PARALLEL = True + ELASTICSEARCH_DSL_AUTO_REFRESH = False + ELASTICSEARCH_DSL_SIGNAL_PROCESSOR = 'dongtai_common.utils.es.DTCelerySignalProcessor' + from elasticsearch import logger as es_logger + import elasticsearch + es_logger.setLevel(elasticsearch.logging.INFO) +else: + ELASTICSEARCH_DSL = { + 'default': { + }, + } + ASSET_VUL_INDEX = '' + VULNERABILITY_INDEX = '' + ASSET_AGGR_INDEX = '' + METHOD_POOL_INDEX = '' + ASSET_INDEX = '' + + +def is_gevent_monkey_patched() -> bool: + try: + from gevent import monkey + except ImportError: + return False + else: + return bool(monkey.saved) + + +def set_asyncio_policy(): + import asyncio_gevent + import asyncio + state = is_gevent_monkey_patched() + print(f"is in gevent patched : {state}") + if state: + asyncio.set_event_loop_policy(asyncio_gevent.EventLoopPolicy()) + + +set_asyncio_policy() diff --git a/dongtai_conf/urls.py b/dongtai_conf/urls.py new file mode 100644 index 000000000..7d42dff43 --- /dev/null +++ b/dongtai_conf/urls.py @@ -0,0 +1,39 @@ +"""dongtai_conf URL Configuration + +The `urlpatterns` list routes URLs to views. For more information please see: + https://docs.djangoproject.com/en/3.0/topics/http/urls/ +Examples: +Function views + 1. Add an import: from my_app import views + 2. Add a URL to urlpatterns: path('', views.home, name='home') +Class-based views + 1. Add an import: from other_app.views import Home + 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') +Including another URLconf + 1. Import the include() function: from django.urls import include, path + 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) +""" +from django.conf.urls.static import static +from django.urls import include, path +import os +from dongtai_conf import settings +from django.views.decorators.csrf import csrf_exempt + +urlpatterns = [ + path('', include('{}.urls'.format(app))) for app in settings.CUSTOM_APPS +] +urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) + +if os.getenv('environment', 'PROD') in ('TEST', 'DOC') or os.getenv('DOC', None) == 'TRUE': + from drf_spectacular.views import SpectacularJSONAPIView, SpectacularRedocView, SpectacularSwaggerView + urlpatterns.extend([ + path('api/XZPcGFKoxYXScwGjQtJx8u/schema/', + SpectacularJSONAPIView.as_view(), + name='schema'), + path('api/XZPcGFKoxYXScwGjQtJx8u/schema/swagger-ui/', + SpectacularSwaggerView.as_view(url_name='schema'), + name='swagger-ui'), + path('api/XZPcGFKoxYXScwGjQtJx8u/schema/redoc/', + SpectacularRedocView.as_view(url_name='schema'), + name='redoc'), + ]) diff --git a/dongtai_conf/utils.py b/dongtai_conf/utils.py new file mode 100644 index 000000000..aefab9489 --- /dev/null +++ b/dongtai_conf/utils.py @@ -0,0 +1,37 @@ +import boto3 +import base64 +from botocore.exceptions import ClientError +import json +import os +import sys + + +def aws_get_secret(base_dir: str): + aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID') + aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY') + region_name = os.getenv('REGION_NAME') + secret_name = os.getenv('SECRET_NAME') + if not all( + [aws_access_key_id, aws_secret_access_key, region_name, secret_name]): + sys.exit("environment not set") + session = boto3.session.Session() + client = session.client(aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + service_name='secretsmanager', + region_name=region_name) + + get_secret_value_response = client.get_secret_value(SecretId=secret_name) + + config = json.loads(get_secret_value_response['SecretString'])['iast-config'] + raw = config.encode('raw_unicode_escape') + + with open(os.path.join(base_dir, 'dongtai_conf/conf/config.ini'), + 'w') as fp: + fp.write(base64.b64decode(raw).decode('utf-8')) + + +def get_config(base_dir: str, target_cloud: str): + if target_cloud == 'AWS': + aws_get_secret(base_dir) + else: + print('use local file') diff --git a/AgentServer/wsgi.py b/dongtai_conf/wsgi.py similarity index 72% rename from AgentServer/wsgi.py rename to dongtai_conf/wsgi.py index cdd49f28f..1f5fbcf15 100644 --- a/AgentServer/wsgi.py +++ b/dongtai_conf/wsgi.py @@ -1,5 +1,5 @@ """ -WSGI config for AgentServer project. +WSGI config for dongtai_conf project. It exposes the WSGI callable as a module-level variable named ``application``. @@ -11,6 +11,6 @@ from django.core.wsgi import get_wsgi_application -os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'AgentServer.settings') +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dongtai_conf.settings') application = get_wsgi_application() diff --git a/apiserver/base/__init__.py b/dongtai_engine/__init__.py similarity index 55% rename from apiserver/base/__init__.py rename to dongtai_engine/__init__.py index 25a22ff71..ca4d40024 100644 --- a/apiserver/base/__init__.py +++ b/dongtai_engine/__init__.py @@ -1,6 +1,6 @@ #!/usr/bin/env python #-*- coding:utf-8 -*- # author:owefsad -# datetime:2021/1/12 下午7:45 +# datetime:2021/1/26 下午4:05 # software: PyCharm -# project: lingzhi-agent-server +# project: lingzhi-engine diff --git a/dongtai_engine/elatic_search/__init__.py b/dongtai_engine/elatic_search/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/dongtai_engine/elatic_search/data_correction.py b/dongtai_engine/elatic_search/data_correction.py new file mode 100644 index 000000000..81c57c5aa --- /dev/null +++ b/dongtai_engine/elatic_search/data_correction.py @@ -0,0 +1,37 @@ +from dongtai_common.models.vulnerablity import IastVulnerabilityModel, IastVulnerabilityDocument +from celery import shared_task +from django.apps import apps +from django.db import transaction +from dongtai_common.models.asset import Asset, IastAssetDocument +from dongtai_common.models.asset_vul import IastVulAssetRelation, IastAssetVulnerabilityDocument + +from celery.apps.worker import logger + + +@shared_task +def data_correction_interpetor(situation: str): + logger.info(f"data incorrect detected, situation {situation} is handling") + if situation == "project_missing": + data_correction_project(-1) + elif situation == "vulnerablity_sync_fail": + data_correction_all() + + +def data_correction_project(project_id): + qs = IastVulnerabilityModel.objects.filter( + agent__bind_project_id=project_id).all() + IastVulnerabilityDocument().update(list(qs)) + qs = Asset.objects.filter(agent__bind_project_id=project_id).all() + IastAssetDocument().update(list(qs)) + qs = IastVulAssetRelation.objects.filter( + asset__agent__bind_project_id=project_id).all() + IastAssetVulnerabilityDocument().update(list(qs)) + + +def data_correction_all(): + qs = IastVulnerabilityModel.objects.all() + IastVulnerabilityDocument().update(list(qs)) + qs = Asset.objects.all() + IastAssetDocument().update(list(qs)) + qs = IastVulAssetRelation.objects.all() + IastAssetVulnerabilityDocument().update(list(qs)) diff --git a/dongtai_engine/filters/__init__.py b/dongtai_engine/filters/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/dongtai_engine/filters/main.py b/dongtai_engine/filters/main.py new file mode 100644 index 000000000..204903d53 --- /dev/null +++ b/dongtai_engine/filters/main.py @@ -0,0 +1,58 @@ +from urllib.parse import urlparse + + +def vul_filter(stack, source_sign, sink_sign, taint_value, vul_type, agent_id): + source_signature = stack[0][0]['signature'] + if (vul_type != 'trust-boundary-violation' and source_signature == 'javax.servlet.http.HttpServletRequest.getSession()'): + return False + #if vul_type == 'ssrf' or vul_type == 'unvalidated-redirect': + if vul_type == 'unvalidated-redirect': + try: + target_url = stack[0][-1]['sourceValues'] + res = urlparse(target_url) + afterurl = target_url.replace(taint_value, '') + res_after_replace = urlparse(afterurl) + except Exception as e: + return False + if not res.scheme and not res.netloc: + return True + if res.netloc == res_after_replace.netloc: + return False + return True + elif vul_type == 'reflected-xss': + target_signature = stack[0][0]['signature'] + filter_source_signature = [ + 'javax.servlet.http.HttpServletRequest.getHeader(java.lang.String)', + 'javax.servlet.http.HttpServletRequest.getHeaderNames()', + 'javax.servlet.http.HttpServletRequest.getParts()', + 'javax.servlet.http.HttpServletRequest.getPart(java.lang.String)', + 'javax.servlet.http.HttpServletRequest.getHeaders(java.lang.String)', + 'jakarta.servlet.http.HttpServletRequest.getHeader(java.lang.String)', + 'jakarta.servlet.http.HttpServletRequest.getHeaders(java.lang.String)', + 'jakarta.servlet.http.HttpServletRequest.getHeaderNames()', + 'jakarta.servlet.http.HttpServletRequest.getParts()', + 'jakarta.servlet.http.HttpServletRequest.getPart(java.lang.String)', + 'org.apache.struts2.dispatcher.multipart.MultiPartRequest.getParameterValues(java.lang.String)', + 'org.apache.commons.fileupload.FileUploadBase.parseRequest(org.apache.commons.fileupload.RequestContext)' + ] + if target_signature in filter_source_signature: + return False + return True + elif vul_type == 'reflection-injection': + target_value = stack[0][-1]['sourceValues'] + if target_value.startswith('sun.net.www.protocol'): + return False + return True + elif vul_type == 'unsafe-json-deserialize': + if stack[0][-1]['signature'].startswith('com.alibaba.fastjson'): + from dongtai_common.models.asset import Asset + asset = Asset.objects.filter( + agent_id=agent_id, + package_name__icontains="maven:com.alibaba:fastjson:").values( + 'version').first() + if asset: + from packaging import version + if version.parse(asset['version']) > version.parse('1.2.80'): + return False + return True + return True diff --git a/dongtai_engine/filters/utils.py b/dongtai_engine/filters/utils.py new file mode 100644 index 000000000..4a190944c --- /dev/null +++ b/dongtai_engine/filters/utils.py @@ -0,0 +1,36 @@ +from http.client import parse_headers +#Request{method=GET, url=http://www.baidu.com/, tag=null} + +class JavaObjects: + + def __init__(self, objects_classname, objects_attrs): + self.objects_classname = objects_classname + self.objects_attrs = objects_attrs + for name, value in objects_attrs: + setattr(self, name, value) + + def __str__(self): + attrs_string = ', '.join( + [f'{name}={value}' for name, value in self.objects_attrs]) + return f"{self.objects_classname}{{{attrs_string}}}" + + +def parse_java_objects(objects_string: str): + objects_classname = objects_string[:objects_string.index('{')] + objects_attrstring = objects_string[objects_string.index('{'):] + objects_attrs = [ + attr.split('=', 2) + for attr in objects_attrstring.strip('{}').split(', ') + ] + return JavaObjects(objects_classname, objects_attrs) + + +from io import BytesIO +from tempfile import TemporaryFile, SpooledTemporaryFile + + +def parse_headers_dict_from_bytes(header_bytes: bytes) -> dict: + with SpooledTemporaryFile(max_size=10000) as fp: + fp.write(header_bytes) + fp.seek(0) + return dict(parse_headers(fp)) diff --git a/dongtai_engine/plugins/__init__.py b/dongtai_engine/plugins/__init__.py new file mode 100644 index 000000000..28084df44 --- /dev/null +++ b/dongtai_engine/plugins/__init__.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/10/22 下午2:26 +# project: DongTai-engine + +from dongtai_common.models.project import IastProject +from dongtai_common.models.strategy import IastStrategyModel +from dongtai_common.utils import const + + +def is_strategy_enable(vul_type, method_pool): + try: + vul_strategy = IastStrategyModel.objects.filter( + vul_type=vul_type, + state=const.STRATEGY_ENABLE, + user_id__in=(1, method_pool.agent.user.id) + ).first() + if vul_strategy is None: + return False + project_id = method_pool.agent.bind_project_id + project = IastProject.objects.filter(id=project_id).first() + if project is None: + return False + strategy_ids = project.scan.content + if strategy_ids is None: + return False + if str(vul_strategy.id) in strategy_ids.split(','): + return True + return False + except Exception as e: + return False diff --git a/dongtai_engine/plugins/data_clean.py b/dongtai_engine/plugins/data_clean.py new file mode 100644 index 000000000..f226ec682 --- /dev/null +++ b/dongtai_engine/plugins/data_clean.py @@ -0,0 +1,91 @@ +from dongtai_common.models.vulnerablity import IastVulnerabilityModel, IastVulnerabilityDocument +from celery import shared_task +from django.apps import apps +from django.db import transaction +from dongtai_common.models.asset import Asset, IastAssetDocument +from dongtai_common.models.asset_vul import IastVulAssetRelation, IastAssetVulnerabilityDocument +from dongtai_common.models.agent_method_pool import MethodPool +from time import time +from celery.apps.worker import logger +from dongtai_conf.settings import ELASTICSEARCH_STATE +from asgiref.sync import sync_to_async +import asyncio +from typing import List, Tuple +import asyncio_gevent + + +DELETE_BATCH_SIZE = 10000 + + +def chunked_queryset(queryset, chunk_size): + """ Slice a queryset into chunks. """ + + start_pk = 0 + queryset = queryset.order_by('pk') + + while True: + # No entry left + if not queryset.filter(pk__gt=start_pk).exists(): + break + + try: + # Fetch chunk_size entries if possible + end_pk = queryset.filter(pk__gt=start_pk).values_list( + 'pk', flat=True)[chunk_size - 1] + + # Fetch rest entries if less than chunk_size left + except IndexError: + end_pk = queryset.values_list('pk', flat=True).last() + + yield queryset.filter(pk__gt=start_pk).filter(pk__lte=end_pk) + + start_pk = end_pk + + +@shared_task(queue='dongtai-periodic-task', + time_limit=60 * 60 * 2, + soft_time_limit=60 * 60 * 4) +def data_cleanup(days: int): + delete_time_stamp = int(time()) - 60 * 60 * 24 * days + if ELASTICSEARCH_STATE: + # use delete to trigger the signal to delete related elasticsearch doc + qs = MethodPool.objects.filter(update_time__lte=delete_time_stamp) + for i in chunked_queryset(qs, DELETE_BATCH_SIZE): + i.delete() + else: + # use _raw_delete to reduce the delete time and memory usage. + # it could aviod to load every instance into memory. + latest_id = MethodPool.objects.filter( + update_time__lte=delete_time_stamp).order_by('-id').values_list( + 'id', flat=True).first() + first_id = MethodPool.objects.filter( + update_time__lte=delete_time_stamp).order_by('id').values_list( + 'id', flat=True).first() + if not any([latest_id, first_id]): + logger.info("no data for clean up") + batch_clean(latest_id, first_id, 10000) + #qs = MethodPool.objects.filter(pk__lte=latest_id) + #qs._raw_delete(qs.db) + + +@sync_to_async(thread_sensitive=False) +def data_clean_batch(upper_id: int, lower_id: int): + qs = MethodPool.objects.filter(pk__lt=upper_id, pk__gte=lower_id) + logger.info(f"data cleaning {upper_id}-{lower_id} ") + qs._raw_delete(qs.db) + + +@asyncio_gevent.async_to_sync +async def loop_main(range_list: List[Tuple[int, int]]): + coros = [ + data_clean_batch(upper_id, lower_id) + for upper_id, lower_id in range_list + ] + await asyncio.gather(*coros) + + +def batch_clean(upper_id: int, lower_id: int, batch_size: int): + chunk_range = list( + zip(range(lower_id + batch_size, upper_id + batch_size, batch_size), + range(lower_id, upper_id, batch_size))) + loop_main(chunk_range) diff --git a/dongtai_engine/plugins/strategy_headers.py b/dongtai_engine/plugins/strategy_headers.py new file mode 100644 index 000000000..2f9986072 --- /dev/null +++ b/dongtai_engine/plugins/strategy_headers.py @@ -0,0 +1,181 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/10/22 下午2:26 +# project: DongTai-engine +import random +import time +from http.client import HTTPResponse +from io import BytesIO + +from celery.apps.worker import logger +from django.db.models import Q +from dongtai_common.models.project import IastProject +from dongtai_common.models.strategy import IastStrategyModel +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.utils import const + +from dongtai_engine.plugins import is_strategy_enable +from dongtai_web.vul_log.vul_log import log_vul_found, log_recheck_vul + +class FakeSocket(): + def __init__(self, response_str): + self._file = BytesIO(response_str) + + def makefile(self, *args, **kwargs): + return self._file + + +def parse_response(http_response_str): + source = FakeSocket(http_response_str.encode()) + response = HTTPResponse(source) + response.begin() + return response + + +def check_csp(response): + if response.getheader('Content-Security-Policy') is None: + return True + + +def check_x_xss_protection(response): + if response.getheader('X-XSS-Protection') is None: + return True + if response.getheader('X-XSS-Protection').strip() == '0': + return True + + +def check_strict_transport_security(response): + if response.getheader('Strict-Transport-Security'): + # parse max-age + import re + result = re.match('max-age=(\\d+);.*?', response.getheader('Strict-Transport-Security')) + if result is None: + return + max_age = result.group(1) + if int(max_age) < 15768000: + return True + + +def check_x_frame_options(response): + if response.getheader('X-Frame-Options') is None: + return True + + +def check_x_content_type_options(response): + if response.getheader('X-Content-Type-Options') is None: + return True + + +def check_response_header(method_pool): + try: + response = parse_response(method_pool.res_header.strip() + '\n\n' + method_pool.res_body.strip()) + if check_csp(response): + save_vul('Response Without Content-Security-Policy Header', method_pool, position='HTTP Response Header') + if check_x_xss_protection(response): + save_vul('Response With X-XSS-Protection Disabled', method_pool) + if check_strict_transport_security(response): + save_vul('Response With Insecurely Configured Strict-Transport-Security Header', method_pool, + position='HTTP Response Header') + if check_x_frame_options(response): + save_vul('Pages Without Anti-Clickjacking Controls', method_pool, position='HTTP Response Header') + if check_x_content_type_options(response): + save_vul('Response Without X-Content-Type-Options Header', method_pool, position='HTTP Response Header') + except Exception as e: + logger.error("check_response_header failed, reason: " + str(e)) + + +from django.core.cache import cache +import uuid + +def save_vul(vul_type, method_pool, position=None, data=None): + if is_strategy_enable(vul_type, method_pool) is False: + return None + vul_strategy = IastStrategyModel.objects.filter( + vul_type=vul_type, + state=const.STRATEGY_ENABLE, + user_id__in=(1, method_pool.agent.user.id) + ).first() + if vul_strategy is None: + logger.error(f'There is no corresponding strategy for the current vulnerability: {vul_type}') + + from dongtai_common.models.agent import IastAgent + project_agents = IastAgent.objects.filter( + project_version_id=method_pool.agent.project_version_id) + uuid_key = uuid.uuid4().hex + cache_key = f'vul_save-{vul_strategy.id}-{method_pool.uri}-{method_pool.http_method}-{method_pool.agent.project_version_id}' + is_api_cached = uuid_key != cache.get_or_set(cache_key, uuid_key) + if is_api_cached: + return + vul = IastVulnerabilityModel.objects.filter( + strategy_id=vul_strategy.id, + uri=method_pool.uri, + http_method=method_pool.http_method, + agent__project_version_id=method_pool.agent.project_version_id, + ).order_by('-latest_time').first() + timestamp = int(time.time()) + IastProject.objects.filter(id=method_pool.agent.bind_project_id).update(latest_time=timestamp) + if vul: + vul.url = vul.url + vul.req_header = method_pool.req_header + vul.req_params = method_pool.req_params + vul.req_data = method_pool.req_data + vul.res_header = method_pool.res_header + vul.res_body = method_pool.res_body + vul.taint_value = data + vul.taint_position = position + vul.context_path = method_pool.context_path + vul.client_ip = method_pool.clent_ip + vul.counts = vul.counts + 1 + vul.latest_time = timestamp + vul.method_pool_id = method_pool.id + vul.save(update_fields=[ + 'url', 'req_header', 'req_params', 'req_data', 'res_header', + 'res_body', 'taint_value', 'taint_position', 'context_path', + 'client_ip', 'counts', 'latest_time', 'method_pool_id', + 'latest_time_desc' + ]) + else: + from dongtai_common.models.hook_type import HookType + hook_type = HookType.objects.filter(vul_strategy_id=vul_strategy.id).first() + vul = IastVulnerabilityModel.objects.create( + strategy=vul_strategy, + # fixme: remove field + hook_type=hook_type if hook_type else HookType.objects.first(), + level=vul_strategy.level, + url=method_pool.url, + uri=method_pool.uri, + http_method=method_pool.http_method, + http_scheme=method_pool.http_scheme, + http_protocol=method_pool.http_protocol, + req_header=method_pool.req_header, + req_params=method_pool.req_params, + req_data=method_pool.req_data, + res_header=method_pool.res_header, + res_body=method_pool.res_body, + full_stack=None, + top_stack=None, + bottom_stack=None, + taint_value=data, + taint_position=position, + agent=method_pool.agent, + context_path=method_pool.context_path, + counts=1, + status_id=const.VUL_CONFIRMED, + first_time=method_pool.create_time, + latest_time=timestamp, + client_ip=method_pool.clent_ip, + param_name=None, + method_pool_id=method_pool.id + ) + log_vul_found(vul.agent.user_id, vul.agent.bind_project.name, + vul.agent.bind_project_id, vul.id, vul.strategy.vul_name) + cache.delete(cache_key) + #delete if exists more than one departured use redis lock + #IastVulnerabilityModel.objects.filter( + # strategy=vul_strategy.id, + # uri=method_pool.uri, + # http_method=method_pool.http_method, + # agent__in=project_agents, + # pk__lt=vul.id, + #).delete() diff --git a/dongtai_engine/plugins/strategy_sensitive.py b/dongtai_engine/plugins/strategy_sensitive.py new file mode 100644 index 000000000..accc60f3a --- /dev/null +++ b/dongtai_engine/plugins/strategy_sensitive.py @@ -0,0 +1,103 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/10/22 下午2:29 +# project: DongTai-engine +# desc: data rule, response field rule, sql field rule +import re2 as re + +import jq +from dongtai_common.models.sensitive_info import IastSensitiveInfoRule +from dongtai_common.utils import const +from celery.apps.worker import logger + +from dongtai_engine.plugins.strategy_headers import save_vul + + +def parse_json_response(res_body): + try: + import json + return json.loads(res_body) + except Exception as e: + return None + + +def check_response_content(method_pool): + rules = IastSensitiveInfoRule.objects.filter(status=const.HOOK_TYPE_ENABLE) + needed_check_data = {} + if rules.values("id").count() > 0: + if method_pool.res_body: + needed_check_data['HTTP Response Body'] = method_pool.res_body + if method_pool.req_params: + needed_check_data['HTTP Request Params'] = method_pool.req_params + if method_pool.req_data: + needed_check_data['HTTP Request Data'] = method_pool.req_data + else: + return + + json_response = parse_json_response(method_pool.res_body) + for rule in rules: + try: + if rule.pattern_type.id == 1: + pattern = re.compile(rule.pattern, re.M) + for key, value in needed_check_data.items(): + try: + result = pattern.search(value) + if result and result.groups(): + save_vul( + vul_type=rule.strategy.vul_type, + method_pool=method_pool, + position=key, + data=result.group(0) + ) + except Exception as e: + logger.error( + f'check_response_content error, rule: {rule.id}, rule name: {rule.strategy.vul_type}, reason: {e}', exc_info=True) + elif json_response and rule.pattern_type.id == 2: + pattern = jq.compile(rule.pattern) + result = pattern.input(json_response).all() + if result: + save_vul( + vul_type=rule.strategy.vul_type, + method_pool=method_pool, + position='HTTP Response Body', + data=' '.join(result) + ) + except Exception as e: + logger.error( + f'check_response_content error, rule: {rule.id}, rule name: {rule.strategy.vul_type}, reason: {e}', exc_info=True) + + search_id_card_leak(method_pool) + + +def search_id_card_leak(method_pool): + pattern = re.compile( + r'([1-9]\d{5}(18|19|([23]\d))\d{2}((0[1-9])|(10|11|12))(([0-2][1-9])|10|20|30|31)\d{3}[0-9Xx])|([1-9]\d{5}\d{2}((0[1-9])|(10|11|12))(([0-2][1-9])|10|20|30|31)\d{3})', + re.M) + needed_check_data = {} + needed_check_data['HTTP Response Body'] = method_pool.res_body + needed_check_data['HTTP Request Params'] = method_pool.req_params + needed_check_data['HTTP Request Data'] = method_pool.req_data + + for key, value in needed_check_data.items(): + try: + if value is None: + continue + result = pattern.search(value) + if result is None: + continue + card = result.group(1) + if check_id_card(card): + # todo: add highlight to id_card + save_vul(vul_type='ID Number Leak', method_pool=method_pool, position=key, data=card) + except Exception as e: + logger.error( + f'check_response_content error, rule name: ID Number Leak, Method Pool ID: {method_pool.id}, reason: {e}') + + +def check_id_card(id_card): + try: + from id_validator import validator + return validator.is_valid(id_card) + except BaseException: + return False diff --git a/dongtai_engine/plugins/strategy_taint.py b/dongtai_engine/plugins/strategy_taint.py new file mode 100644 index 000000000..ae2e205b2 --- /dev/null +++ b/dongtai_engine/plugins/strategy_taint.py @@ -0,0 +1,17 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/10/22 下午2:28 +# project: DongTai-engine + +# def check_taint(method_pool_model): +# strategies = load_sink_strategy(method_pool_model.agent.user) +# engine = VulEngine() +# +# method_pool = json.loads(method_pool_model.method_pool) if method_pool_model else [] +# engine.method_pool = method_pool +# if method_pool: +# for strategy in strategies: +# if strategy.get('value') in engine.method_pool_signatures: +# search_and_save_vul(engine, method_pool_model, method_pool, strategy) +# diff --git a/dongtai_engine/preheat.py b/dongtai_engine/preheat.py new file mode 100644 index 000000000..2e4a2b4db --- /dev/null +++ b/dongtai_engine/preheat.py @@ -0,0 +1,91 @@ +from importlib import import_module +from celery import shared_task +from celery.apps.worker import logger +from dongtai_common.common.utils import make_hash +from django.core.cache import cache +import random +from datetime import datetime, timedelta +from django.db.utils import OperationalError +from django.db import connection as conn + +#def function_preheat(func__module__: str, func__name__: str, *args, **kwargs): +# module = import_module(func__module__) +# func = getattr(module, func__name__) +# try: +# func(*args, **kwargs) +# except Exception as e: +# logger.error(e, exc_info=True) + + +@shared_task(queue='dongtai-periodic-task') +def function_preheat(): + from django.contrib.admin.models import LogEntry + time_threshold = datetime.now() - timedelta(hours=1) + need_preheat = LogEntry.objects.filter( + action_time__gt=time_threshold).exists() + if need_preheat: + time_min = datetime.now() - timedelta(hours=72) + user_ids = list( + LogEntry.objects.filter(action_time__gt=time_min).values_list( + 'user__id', flat=True).distinct().order_by('user__id').all()) + logger.info(f"user_ids: {user_ids}") + for user_id in user_ids: + for func in PreHeatRegister.functions: + try: + func(user_id) + except OperationalError as e: + logger.error(e, exc_info=True) + logger.error(f'user_id: {user_id}') + logger.error(f'function name : {func.__name__}') + logger.error(f'latest 5 query:{conn.queries[-5:]}') + except Exception as e: + logger.error(e, exc_info=True) + continue + +class PreHeatException(Exception): + pass + + +class PreHeatRegister: + functions = [] + + @classmethod + def register(cls, function): + annotation_dict = function.__annotations__.copy() + if 'return' in annotation_dict: + del annotation_dict['return'] + if not annotation_dict == {'user_id': int}: + logger.info(f'{function.__name__} annotations not fit in') + raise PreHeatException( + 'function is not fit in , please annotation user_id :int in params' + ) + if function in cls.functions: + logger.info( + f'{function.__name__} already in PreHeatRegister.functions') + return + cls.functions.append(function) + logger.debug(f"preheat functions {PreHeatRegister.functions}") + + + +@shared_task(queue='dongtai-function-flush-data') +def function_flush(func__module__, func__name__, cache_time, args, kwargs): + module = import_module(func__module__) + func = getattr(module, func__name__) + if not getattr(func, '__origin__name__', None) == 'cached': + logger.error( + 'this function is not supported , please use cached to warp function.' + ) + return + origin_func = func.__wrapped__ + random_range = func.__random_range__ + try: + res = origin_func(*args, **kwargs) + cache_key = make_hash((origin_func.__module__ + origin_func.__name__, + tuple(args), kwargs)) + logger.debug(cache_key) + if random_range: + cache_time = random.randint(*random_range) + cache.set(cache_key, res, cache_time) + except Exception as e: + logger.error(e, exc_info=True) diff --git a/dongtai_engine/replay.py b/dongtai_engine/replay.py new file mode 100644 index 000000000..272fdeeb5 --- /dev/null +++ b/dongtai_engine/replay.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/7/16 下午12:17 +# project: dongtai-engine +from dongtai_common.utils import const + + +class Replay: + """ + 封装重放操作为单独的类 + """ + + def __init__(self, replay): + self.replay = replay + self.vul = None + + @staticmethod + def do_replay(replay): + replay_handler = Replay(replay) + status = replay_handler.has_relation_id() + if status is False: + pass + + def has_relation_id(self): + return self.replay.relation_id is None + + @staticmethod + def replay_failed(replay, timestamp): + """ + 当重放请求处理失败时,执行该方法 + """ + replay.update_time = timestamp + replay.verify_time = timestamp + replay.state = const.SOLVED + replay.result = const.RECHECK_ERROR + replay.save(update_fields=['update_time', 'verify_time', 'state', 'result']) diff --git a/dongtai_engine/signals/__init__.py b/dongtai_engine/signals/__init__.py new file mode 100644 index 000000000..3a0b868c6 --- /dev/null +++ b/dongtai_engine/signals/__init__.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/4/30 下午2:57 +# project: dongtai-engine +from .signals import * diff --git a/dongtai_engine/signals/handlers/__init__.py b/dongtai_engine/signals/handlers/__init__.py new file mode 100644 index 000000000..bebb6179c --- /dev/null +++ b/dongtai_engine/signals/handlers/__init__.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/4/30 下午2:58 +# project: dongtai-engine + +from .vul_handler import * diff --git a/dongtai_engine/signals/handlers/parse_param_name.py b/dongtai_engine/signals/handlers/parse_param_name.py new file mode 100644 index 000000000..868e97e39 --- /dev/null +++ b/dongtai_engine/signals/handlers/parse_param_name.py @@ -0,0 +1,23 @@ +from django.http.request import QueryDict + + +class ParamDict(QueryDict): + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.__init_extend_kv_dict() + + def __init_extend_kv_dict(self): + self.extend_kv_dict = {} + self.extend_k_map = {} + for k, v in self.items(): + if '=' in v: + origin_string = '='.join([k, v]) + groups = origin_string.split('=') + for i in range(1, len(groups)): + k_ = '='.join(groups[:i]) + v_ = '='.join(groups[i:]) + self.extend_kv_dict[k_] = v_ + self.extend_k_map[k_] = k +def parse_target_values_from_vul_stack(vul_stack): + return [i['targetValues'] for i in vul_stack[0]] diff --git a/dongtai_engine/signals/handlers/vul_handler.py b/dongtai_engine/signals/handlers/vul_handler.py new file mode 100644 index 000000000..183d0e389 --- /dev/null +++ b/dongtai_engine/signals/handlers/vul_handler.py @@ -0,0 +1,449 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/4/30 下午3:00 +# project: dongtai-engine +import json,random +import time +import requests +from celery.apps.worker import logger +from django.dispatch import receiver +from dongtai_common.models.project import IastProject, VulValidation +from dongtai_common.models.replay_queue import IastReplayQueue +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.utils import const +from dongtai_conf import settings +from dongtai_engine.signals import vul_found +from dongtai_common.utils.systemsettings import get_vul_validate +from dongtai_web.vul_log.vul_log import log_vul_found, log_recheck_vul +from django.db.models import Q +from dongtai_engine.signals.handlers.parse_param_name import parse_target_values_from_vul_stack +from typing import List, Optional, Callable +import json +from collections import defaultdict + +def equals(source, target): + if source == target or source in target or target in source: + return True + + +from dongtai_engine.signals.handlers.parse_param_name import ParamDict + + +def parse_params(param_values: str, taint_value: str) -> Optional[str]: + """ + 从param参数中解析污点的位置 + """ + from urllib.parse import unquote_plus + param_name = None + _param_items = ParamDict(param_values) + for _param_name, _param_value in _param_items.items(): + if taint_value == _param_value or taint_value == _param_name: + param_name = _param_name + break + for _param_name, _param_value in _param_items.extend_kv_dict.items(): + if taint_value == _param_value or taint_value == _param_name: + param_name = _param_items.extend_k_map[_param_name] + break + _param_items = ParamDict(unquote_plus(param_values)) + for _param_name, _param_value in _param_items.items(): + if taint_value == _param_value or taint_value == _param_name: + param_name = _param_name + break + for _param_name, _param_value in _param_items.extend_kv_dict.items(): + if taint_value == _param_value or taint_value == _param_name: + param_name = _param_items.extend_k_map[_param_name] + break + return param_name + + +def parse_body(body: str, taint_value: str) -> Optional[str]: + try: + post_body = json.loads(body) + for key, value in post_body.items(): + if taint_value == value or taint_value == key: + return key + except Exception as e: + return parse_params(body, taint_value) + return None + +from dongtai_engine.filters.utils import parse_headers_dict_from_bytes + + +def parse_header(req_header: str, taint_value: str) -> Optional[str]: + """ + 从header头中解析污点的位置 + """ + import base64 + header_dict = parse_headers_dict_from_bytes(base64.b64decode(req_header)) + for k, v in header_dict.items(): + if v == taint_value or k == taint_value: + return k + return None + + + +def parse_cookie(req_header: str, taint_value: str) -> Optional[str]: + """ + 从cookie中解析 + """ + import base64 + header_raw = base64.b64decode(req_header).decode('utf-8').split('\n') + cookie_raw = '' + for header in header_raw: + # fixme 解析,然后匹配 + _header_list = header.split(':') + _header_name = _header_list[0] + if _header_name == 'cookie' or _header_name == 'Cookie': + cookie_raw = ':'.join(_header_list[1:]) + break + + if cookie_raw: + cookie_raw_items = cookie_raw.split(';') + for item in cookie_raw_items: + cookie_item = item.split('=') + cookie_value = '='.join(cookie_item[1:]) + if taint_value == cookie_value: + return cookie_item[0] + return None + +def parse_path(uri: str, taint_value: str) -> Optional[str]: + """ + 从PathVariable中解析污点位置 + """ + # 根据/拆分uri,然后进行对比 + # location part + path_items = uri.split('/') + for ind, item in enumerate(path_items): + if taint_value == item: + # if equals(taint_value, item): + # fixme 暂时先使用完全匹配,后续考虑解决误报问题 + return f"location:{ind}" + return None + + +from functools import lru_cache + + +@lru_cache(maxsize=128) +def get_location_data() -> dict: + try: + with open('/opt/dongtai/static/data/java_params.json') as fp: + data = json.load(fp) + except Exception as e: + logger.error(e,exc_info=True) + data = {} + #return defaultdict(lambda: [], data) + return defaultdict(lambda: ['GET', 'POST', 'HEADER', 'PATH', 'COOKIE'], data) + + +def get_parser_location(source_method: str) -> List[str]: + return get_location_data()[source_method] + + +def get_location_parser(location: str) -> Callable[[str, str], Optional[str]]: + data = { + "GET": parse_params, + "POST": parse_body, + "HEADER": parse_header, + "PATH": parse_path, + "COOKIE": parse_cookie + } + return defaultdict(lambda: lambda http_locationstr, taint_value: None, data)[location] + + +def parse_taint_params(location: str, http_locationstr: Optional[str], + taint_value: str) -> Optional[str]: + if not http_locationstr: + return None + res = get_location_parser(location)(http_locationstr, taint_value) + return res + + +from dongtai_common.models.agent_method_pool import MethodPool + + +def get_http_locationstr(method_pool: MethodPool, + location: str) -> Optional[str]: + data: dict = { + "GET": "req_params", + "POST": "req_data", + "HEADER": "req_header", + "PATH": "uri", + "COOKIE": "req_header", + } + if location not in data.keys(): + return None + return getattr(method_pool, data[location], None) + + +def parse_taint_position(source_method, vul_meta, taint_value, vul_stack) -> dict: + param_names: dict = dict() + target_values: List[str] = list( + filter(lambda x: x, parse_target_values_from_vul_stack(vul_stack))) + for taint_value in target_values: + locations: List[str] = get_parser_location(source_method) + for location in locations: + param_name: Optional[str] = parse_taint_params( + location, get_http_locationstr(vul_meta, location), + taint_value) + if param_name: + param_names[location] = param_name + logger.info(f'污点来自{location}参数: {param_name}') + return param_names + + +from django.core.cache import cache +import uuid +def get_original_url(uri: str, url_desc: str) -> str: + if url_desc.startswith('location'): + _, location = url_desc.split(":") + else: + return uri + res = uri.split('/') + res[int(location)] = "" + return "/".join(res) + + +def get_real_url(method_pools: dict) -> str: + for method_pool in method_pools: + if method_pool[ + 'signature'] == 'org.springframework.web.util.pattern.PathPattern.getPatternString()': + return method_pool['targetValues'] + return '' + + +def save_vul(vul_meta, vul_level, strategy_id, vul_stack, top_stack, bottom_stack, **kwargs): + logger.info( + f'save vul, strategy id: {strategy_id}, from: {"normal" if "replay_id" not in kwargs else "replay"}, id: {vul_meta.id}') + # 如果是重放请求,且重放请求类型为漏洞验证,更新漏洞状态为 + taint_value = kwargs['taint_value'] + timestamp = int(time.time()) + param_names = parse_taint_position(source_method=top_stack, vul_meta=vul_meta, taint_value=taint_value, vul_stack=vul_stack) + if parse_params: + param_name = json.dumps(param_names) + taint_position = '/'.join(param_names.keys()) + else: + param_name = '' + taint_position = '' + url_desc: str = "" + if 'PATH' in param_names.keys(): + url_desc = param_names['PATH'] + pattern_string: str = get_real_url(vul_meta.method_pool) + pattern_uri: str = pattern_string if pattern_string else get_original_url( + vul_meta.uri, url_desc) + logger.info(f"agent_id: {vul_meta.agent_id} vul_uri_pattern: {pattern_uri} vul_uri: {vul_meta.uri} param_name: {param_name}") + from dongtai_common.models.agent import IastAgent + project_agents = IastAgent.objects.filter(project_version_id=vul_meta.agent.project_version_id) + uuid_key = uuid.uuid4().hex + cache_key = f'vul_save-{strategy_id}-{vul_meta.uri}-{vul_meta.http_method}-{vul_meta.agent.project_version_id}-{param_name}' + is_api_cached = uuid_key != cache.get_or_set(cache_key, uuid_key) + if is_api_cached: + return + # 获取 相同项目版本下的数据 + vul = IastVulnerabilityModel.objects.filter( + strategy_id=strategy_id, + pattern_uri=pattern_uri, + http_method=vul_meta.http_method, + agent__project_version_id=vul_meta.agent.project_version_id, + param_name=param_name, + ).order_by('-latest_time').first() + IastProject.objects.filter(id=vul_meta.agent.bind_project_id).update(latest_time=timestamp) + if vul: + vul.url = vul_meta.url + vul.pattern_uri = pattern_uri + vul.req_header = vul_meta.req_header + vul.req_params = vul_meta.req_params + vul.req_data = vul_meta.req_data + vul.res_header = vul_meta.res_header + vul.res_body = vul_meta.res_body + vul.taint_value = taint_value + vul.taint_position = taint_position + vul.context_path = vul_meta.context_path + vul.client_ip = vul_meta.clent_ip + vul.top_stack = top_stack + vul.bottom_stack = bottom_stack + vul.counts = vul.counts + 1 + vul.latest_time = timestamp + vul.method_pool_id = vul_meta.id + vul.full_stack = json.dumps(vul_stack, ensure_ascii=False) + vul.save(update_fields=[ + 'url', 'req_header', 'req_params', 'req_data', 'res_header', + 'res_body', 'taint_value', 'taint_position', 'method_pool_id', + 'context_path', 'client_ip', 'top_stack', 'bottom_stack', + 'full_stack', 'counts', 'latest_time', 'latest_time_desc' + ]) + else: + from dongtai_common.models.hook_type import HookType + hook_type = HookType.objects.filter(vul_strategy_id=strategy_id).first() + vul = IastVulnerabilityModel.objects.create( + strategy_id=strategy_id, + # fixme: delete field hook_type + hook_type=hook_type if hook_type else HookType.objects.first(), + level_id=vul_level, + url=vul_meta.url, + uri=vul_meta.uri, + pattern_uri = pattern_url, + http_method=vul_meta.http_method, + http_scheme=vul_meta.http_scheme, + http_protocol=vul_meta.http_protocol, + req_header=vul_meta.req_header, + req_params=vul_meta.req_params, + req_data=vul_meta.req_data, + res_header=vul_meta.res_header, + res_body=vul_meta.res_body, + full_stack=json.dumps(vul_stack, ensure_ascii=False), + top_stack=top_stack, + bottom_stack=bottom_stack, + taint_value=taint_value, + taint_position=taint_position, + agent=vul_meta.agent, + context_path=vul_meta.context_path, + counts=1, + status_id=settings.PENDING, + first_time=vul_meta.create_time, + latest_time=timestamp, + client_ip=vul_meta.clent_ip, + param_name=param_name, + method_pool_id=vul_meta.id + ) + log_vul_found(vul.agent.user_id, vul.agent.bind_project.name, + vul.agent.bind_project_id, vul.id, vul.strategy.vul_name) + cache.delete(cache_key) + #delete if exists more than one departured use redis lock + #IastVulnerabilityModel.objects.filter( + # strategy_id=strategy_id, + # uri=vul_meta.uri, + # http_method=vul_meta.http_method, + # agent__in=project_agents, + # param_name=param_name, + # pk__lt=vul.id, + #).delete() + + logger.info(f"vul_found {vul.id}") + return vul + + +from dongtai_common.models.vul_recheck_payload import IastVulRecheckPayload + +def create_vul_recheck_task(vul_id, agent, timestamp): + project = IastProject.objects.filter(id=agent.bind_project_id).first() + if project and project.vul_validation == VulValidation.DISABLE: + return + enable_validate = False + if project is None or (project and project.vul_validation == VulValidation.FOLLOW_GLOBAL): + enable_validate = get_vul_validate() + if project and project.vul_validation == VulValidation.ENABLE: + enable_validate = True + + if enable_validate is False: + return + + replay_model = IastReplayQueue.objects.filter(replay_type=const.VUL_REPLAY, relation_id=vul_id).first() + if replay_model: + if replay_model.state in [const.PENDING, const.WAITING, const.SOLVING]: + return + + replay_model.state = const.PENDING + replay_model.update_time = timestamp + replay_model.count = replay_model.count + 1 + replay_model.save(update_fields=['state', 'update_time', 'count']) + else: + vul = IastVulnerabilityModel.objects.filter( + pk=vul_id).only('strategy_id').first() + queue = [ + IastReplayQueue(agent=agent, + relation_id=vul_id, + state=const.PENDING, + count=1, + create_time=timestamp, + update_time=timestamp, + replay_type=const.VUL_REPLAY, + payload_id=payload_id) + for payload_id in IastVulRecheckPayload.objects.filter( + strategy_id=vul.strategy_id, + user__in=[1, agent.user_id]).values_list('pk', flat=True) + ] + if queue: + IastReplayQueue.objects.bulk_create(queue, ignore_conflicts=True) + else: + IastReplayQueue.objects.create(agent=agent, + relation_id=vul_id, + state=const.PENDING, + count=1, + create_time=timestamp, + update_time=timestamp, + replay_type=const.VUL_REPLAY) + + +def handler_replay_vul(vul_meta, vul_level, strategy_id, vul_stack, top_stack, bottom_stack, **kwargs): + timestamp = int(time.time()) + vul = IastVulnerabilityModel.objects.filter(id=kwargs['relation_id']).first() + logger.info(f'handle vul replay, current strategy:{vul.strategy_id}, target hook_type:{strategy_id}') + if vul and vul.strategy_id == strategy_id: + vul.status_id = settings.CONFIRMED + vul.latest_time = timestamp + vul.save(update_fields=['status_id', 'latest_time','latest_time_desc']) + IastProject.objects.filter(id=vul_meta.agent.bind_project_id).update(latest_time=timestamp) + + IastReplayQueue.objects.filter(id=kwargs['replay_id']).update( + state=const.SOLVED, + result=const.RECHECK_TRUE, + verify_time=timestamp, + update_time=timestamp) + IastReplayQueue.objects.filter(vul_id=vul.id).exclude( + Q(id=kwargs['replay_id']) | Q(state=const.SOLVED)).update( + state=const.DISCARD, + result=const.RECHECK_DISCARD, + verify_time=timestamp, + update_time=timestamp) + log_recheck_vul(vul.agent.user.id, vul.agent.user.username, [vul.id], + '已确认') + else: + vul = save_vul(vul_meta, vul_level, strategy_id, vul_stack, top_stack, bottom_stack, **kwargs) + + create_vul_recheck_task(vul_id=vul.id, agent=vul.agent, timestamp=timestamp) + return vul + + +@receiver(vul_found) +def handler_vul(vul_meta, vul_level, strategy_id, vul_stack, top_stack, bottom_stack, **kwargs): + """ + 保存漏洞数据 + :param vul_meta: + :param vul_level: + :param vul_name: + :param vul_stack: + :param top_stack: + :param bottom_stack: + :return: + """ + # 如果是重放请求,且重放请求类型为漏洞验证,更新漏洞状态为 + timestamp = int(time.time()) + from dongtai_common.models.replay_method_pool import IastAgentMethodPoolReplay + from dongtai_common.models.agent_method_pool import MethodPool + + if isinstance(vul_meta, IastAgentMethodPoolReplay): + replay_id = vul_meta.replay_id + replay_type = vul_meta.replay_type + relation_id = vul_meta.relation_id + + if replay_type == const.VUL_REPLAY: + kwargs['relation_id'] = relation_id + kwargs['replay_id'] = replay_id + vul = handler_replay_vul(vul_meta, vul_level, strategy_id, vul_stack, top_stack, bottom_stack, **kwargs) + elif replay_type == const.REQUEST_REPLAY: + # 数据包调试数据暂不检测漏洞 + vul = None + else: + vul = save_vul(vul_meta, vul_level, strategy_id, vul_stack, + top_stack, bottom_stack, **kwargs) + create_vul_recheck_task(vul_id=vul.id, + agent=vul.agent, + timestamp=timestamp) + elif isinstance(vul_meta, MethodPool): + vul = save_vul(vul_meta, vul_level, strategy_id, vul_stack, top_stack, + bottom_stack, **kwargs) + create_vul_recheck_task(vul_id=vul.id, + agent=vul.agent, + timestamp=timestamp) diff --git a/dongtai_engine/signals/signals.py b/dongtai_engine/signals/signals.py new file mode 100644 index 000000000..03f914e93 --- /dev/null +++ b/dongtai_engine/signals/signals.py @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# + +from django.dispatch import Signal + +vul_found = Signal() diff --git a/dongtai_engine/task_base.py b/dongtai_engine/task_base.py new file mode 100644 index 000000000..935ccf4e2 --- /dev/null +++ b/dongtai_engine/task_base.py @@ -0,0 +1,18 @@ +from dongtai_common.models.agent_method_pool import MethodPool +from dongtai_common.models.vulnerablity import IastVulnerabilityModel + + +def replay_payload_data(relation_ids, replay_type): + if replay_type == 1: + vulnerability = IastVulnerabilityModel.objects.filter(id__in=relation_ids).values( + 'id', 'agent', 'uri', 'http_method', 'http_scheme', 'req_header', 'req_params', 'req_data', 'taint_value', 'param_name' + ) + else: + vulnerability = MethodPool.objects.filter(id__in=relation_ids).values( + 'id', 'agent', 'uri', 'http_method', 'http_scheme', 'req_header', 'req_params', 'req_data' + ) + data = {} + if vulnerability: + for item in vulnerability: + data[item['id']] = item + return data diff --git a/dongtai_engine/tasks.py b/dongtai_engine/tasks.py new file mode 100644 index 000000000..752797e50 --- /dev/null +++ b/dongtai_engine/tasks.py @@ -0,0 +1,631 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2021/1/26 下午4:45 +# software: PyCharm +# project: lingzhi-engine +import hashlib +import json +import time +from json import JSONDecodeError + +from celery import shared_task +from celery.apps.worker import logger +from django.db.models import Sum, Q + +from dongtai_common.engine.vul_engine import VulEngine +from dongtai_common.models import User +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.agent_method_pool import MethodPool +from dongtai_common.models.asset import Asset +from dongtai_common.models.errorlog import IastErrorlog +from dongtai_common.models.heartbeat import IastHeartbeat +from dongtai_common.models.hook_strategy import HookStrategy +from dongtai_common.models.hook_type import HookType +from dongtai_common.models.project import IastProject +from dongtai_common.models.replay_method_pool import IastAgentMethodPoolReplay +from dongtai_common.models.replay_queue import IastReplayQueue +from dongtai_common.models.strategy import IastStrategyModel +from dongtai_common.models.vul_level import IastVulLevel +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.utils import const + +from dongtai_engine.plugins.strategy_headers import check_response_header +from dongtai_engine.plugins.strategy_sensitive import check_response_content +from dongtai_engine.replay import Replay +from dongtai_conf import settings +from dongtai_web.dongtai_sca.utils import sca_scan_asset +from dongtai_common.models.project_report import ProjectReport +import requests +from hashlib import sha1 +from dongtai_engine.task_base import replay_payload_data + +LANGUAGE_MAP = { + "JAVA": 1, + "PYTHON": 2, + "PHP": 3, + "GO": 4 +} + +RETRY_INTERVALS = [10, 30, 90] + + +class RetryableException(Exception): + pass + + +def queryset_to_iterator(queryset): + """ + 将queryset转换为迭代器,解决使用queryset遍历数据导致的一次性加载至内存带来的内存激增问题 + :param queryset: + :return: + """ + page_size = 200 + page = 1 + while True: + temp_queryset = queryset[(page - 1) * page_size:page * page_size] + page += 1 + if len(temp_queryset) > 0: + yield temp_queryset + else: + break + + +def load_sink_strategy(user=None, language=None): + """ + 加载用户user有权限方法的策略 + :param user: edit by song + :return: + """ + logger.info('start load sink_strategy') + strategies = list() + language_id = 0 + if language and language in LANGUAGE_MAP: + language_id = LANGUAGE_MAP[language] + type_query = HookType.objects.filter(type=4) + if language_id != 0: + type_query = type_query.filter(language_id=language_id) + + strategy_models = HookStrategy.objects.filter( + type__in=type_query, + created_by__in=[user.id, 1] if user else [1] + ).values('id', 'value', 'type__value') + sub_method_signatures = set() + for strategy in strategy_models: + # for strategy in sub_queryset: + strategy_value = strategy.get("value", "") + sub_method_signature = strategy_value[:strategy_value.rfind('(')] if strategy_value.rfind( + '(') > 0 else strategy_value + if sub_method_signature in sub_method_signatures: + continue + sub_method_signatures.add(sub_method_signature) + + strategies.append({ + 'strategy': strategy.get("id", ""), + 'type': strategy.get("type__value", ""), + 'value': sub_method_signature + }) + return strategies + + +from dongtai_engine.signals.handlers.vul_handler import handler_vul +from dongtai_engine.filters.main import vul_filter + + +def search_and_save_vul(engine, method_pool_model, method_pool, strategy): + """ + 搜索方法池是否存在满足策略的数据,如果存在,保存相关数据为漏洞 + :param method_pool_model: 方法池实例化对象 + :param strategy: 策略数据 + :return: None + """ + logger.info(f'current sink rule is {strategy.get("type")}') + queryset = IastStrategyModel.objects.filter(vul_type=strategy['type'], state=const.STRATEGY_ENABLE) + if queryset.values('id').exists() is False: + logger.error(f'current method pool hit rule {strategy.get("type")}, but no vul strategy.') + return + engine.search(method_pool=method_pool, vul_method_signature=strategy.get('value')) + status, stack, source_sign, sink_sign, taint_value = engine.result() + filterres = vul_filter( + stack, + source_sign, + sink_sign, + taint_value, + queryset.values('vul_type').first()['vul_type'], + agent_id=method_pool_model.agent_id, + ) + logger.info(f'vul filter_status : {filterres}') + if status and filterres: + logger.info(f'vul_found {method_pool_model.agent_id} {method_pool_model.url} {sink_sign}') + vul_strategy = queryset.values("level", "vul_name", "id").first() + handler_vul( + sender="tasks.search_and_save_vul", + vul_meta=method_pool_model, + vul_level=vul_strategy['level'], + strategy_id=vul_strategy['id'], + vul_stack=stack, + top_stack=source_sign, + bottom_stack=sink_sign, + taint_value=taint_value + ) + else: + try: + if isinstance(method_pool_model, MethodPool): + return + replay_type = method_pool_model.replay_type + if replay_type != const.VUL_REPLAY: + return + + replay_id = method_pool_model.replay_id + relation_id = method_pool_model.relation_id + timestamp = int(time.time()) + IastVulnerabilityModel.objects.filter(id=relation_id).update( + status_id=settings.IGNORE, + latest_time=timestamp + ) + IastReplayQueue.objects.filter(id=replay_id).update( + state=const.SOLVED, + result=const.RECHECK_FALSE, + verify_time=timestamp, + update_time=timestamp + ) + IastProject.objects.filter(id=method_pool.agent.bind_project_id).update(latest_time=timestamp) + except Exception as e: + logger.info(f'漏洞数据处理出错,原因:{e}') + + +def search_and_save_sink(engine, method_pool_model, strategy): + """ + 从方法池中搜索策略strategy对应的sink方法是否存在,如果存在,保存策略与污点池关系 + :param engine: 云端搜索引擎实例化对象 + :param method_pool_model: 方法池模型对象 + :param strategy: json格式的策略 + :return: None + """ + method_pool = json.loads(method_pool_model.method_pool) + # fixme 检索匹配条件的sink点 + is_hit = engine.search_sink( + method_pool=method_pool, + vul_method_signature=strategy.get('value') + ) + if is_hit is None: + return + + logger.info(f'发现sink点{strategy.get("type")}') + method_pool_model.sinks.add(strategy.get('strategy')) + + +@shared_task(bind=True, queue='dongtai-method-pool-scan', + max_retries=settings.config.getint('task', 'max_retries', fallback=3)) +def search_vul_from_method_pool(self, method_pool_sign, agent_id, retryable=False): + logger.info(f'漏洞检测开始,方法池 {method_pool_sign}') + try: + method_pool_model = MethodPool.objects.filter(pool_sign=method_pool_sign, agent_id=agent_id).first() + if method_pool_model is None: + if retryable: + if self.request.retries < self.max_retries: + tries = self.request.retries + 1 + raise RetryableException(f'漏洞检测方法池 {method_pool_sign} 不存在,重试第 {tries} 次') + else: + logger.error(f'漏洞检测超过最大重试次数 {self.max_retries},方法池 {method_pool_sign} 不存在') + else: + logger.warning(f'漏洞检测终止,方法池 {method_pool_sign} 不存在') + return + logger.info( + f"search vul from method_pool found, agent_id: {method_pool_model.agent_id} , uri: {method_pool_model.uri}" + ) + check_response_header(method_pool_model) + check_response_content(method_pool_model) + + strategies = load_sink_strategy(method_pool_model.agent.user, method_pool_model.agent.language) + engine = VulEngine() + method_pool = json.loads(method_pool_model.method_pool) if method_pool_model else [] + engine.method_pool = method_pool + if method_pool: + # print(engine.method_pool_signatures) + for strategy in strategies: + if strategy.get('value') in engine.method_pool_signatures: + search_and_save_vul(engine, method_pool_model, method_pool, strategy) + logger.info(f'漏洞检测成功') + except RetryableException as e: + if self.request.retries < self.max_retries: + delay = 5 + pow(3, self.request.retries) * 10 + self.retry(exc=e, countdown=delay) + else: + logger.error(f'漏洞检测超过最大重试次数,错误原因:{e}') + except Exception as e: + logger.error(e, exc_info=True) + logger.error(f'漏洞检测出错,方法池 {method_pool_sign}. 错误原因:{e}') + + +@shared_task(queue='dongtai-replay-vul-scan') +def search_vul_from_replay_method_pool(method_pool_id): + logger.info(f'重放数据漏洞检测开始,方法池 {method_pool_id}') + try: + method_pool_model = IastAgentMethodPoolReplay.objects.filter(id=method_pool_id).first() + if method_pool_model is None: + logger.warn(f'重放数据漏洞检测终止,方法池 {method_pool_id} 不存在') + strategies = load_sink_strategy(method_pool_model.agent.user, method_pool_model.agent.language) + engine = VulEngine() + method_pool = json.loads(method_pool_model.method_pool) + if method_pool is None or len(method_pool) == 0: + return + engine.method_pool = method_pool + for strategy in strategies: + if strategy.get('value') not in engine.method_pool_signatures: + continue + search_and_save_vul(engine, method_pool_model, method_pool, strategy) + logger.info(f'重放数据漏洞检测成功') + except Exception as e: + logger.error(f'重放数据漏洞检测出错,方法池 {method_pool_id}. 错误原因:{e}') + + +def load_methods_from_strategy(strategy_id): + """ + 根据策略ID加载策略详情、策略对应的方法池数据 + :param strategy_id: 策略ID + :return: + """ + strategy = HookStrategy.objects.filter(type__in=HookType.objects.filter(type=4), id=strategy_id).first() + if strategy is None: + logger.info(f'策略[{strategy_id}]不存在') + return None, None + strategy_value = { + 'strategy': strategy, + 'type': strategy.type.first().value, + 'value': strategy.value.split('(')[0] + } + # fixme 后续根据具体需要,获取用户对应的数据 + if strategy is None: + return strategy_value, None + + user = User.objects.filter(id=strategy.created_by).first() + if user is None: + return strategy_value, None + + agents = IastAgent.objects.filter(user=user) + if agents.values('id').exists() is False: + return strategy_value, None + + method_pool_queryset = MethodPool.objects.filter(agent__in=agents) + return strategy_value, method_pool_queryset + + +def get_project_agents(agent): + agents = IastAgent.objects.filter( + bind_project_id=agent.bind_project_id, + project_version_id=agent.project_version_id, + user=agent.user + ) + return agents + + +@shared_task(queue='dongtai-sca-task') +def update_one_sca(agent_id, package_path, package_signature, package_name, package_algorithm, package_version=''): + """ + 根据SCA数据库,更新SCA记录信息 + :return: + """ + logger.info( + f'SCA检测开始 [{agent_id} {package_path} {package_signature} {package_name} {package_algorithm} {package_version}]') + agent = IastAgent.objects.filter(id=agent_id).first() + version = package_version + if not version: + if agent.language == "JAVA": + version = package_name.split('/')[-1].replace('.jar', '').split('-')[-1] + + if version: + current_version_agents = get_project_agents(agent) + if package_signature: + asset_count = Asset.objects.values("id").filter(signature_value=package_signature, + agent__in=current_version_agents).count() + else: + package_signature = sha_1(package_name) + asset_count = Asset.objects.values("id").filter(package_name=package_name, + version=version, + agent__in=current_version_agents).count() + + if asset_count == 0: + new_level = IastVulLevel.objects.get(name="info") + asset = Asset() + asset.package_name = package_name + asset.package_path = package_path + asset.signature_value = package_signature + asset.signature_algorithm = package_algorithm + asset.version = version + asset.level_id = new_level.id + asset.vul_count = 0 + asset.language = asset.language + if agent: + asset.agent = agent + asset.project_version_id = agent.project_version_id if agent.project_version_id else 0 + asset.project_name = agent.project_name + asset.language = agent.language + asset.project_id = -1 + if agent.bind_project_id: + asset.project_id = agent.bind_project_id + asset.user_id = -1 + if agent.user_id: + asset.user_id = agent.user_id + + asset.license = '' + asset.dt = int(time.time()) + asset.save() + sca_scan_asset(asset) + else: + logger.info( + f'SCA检测开始 [{agent_id} {package_path} {package_signature} {package_name} {package_algorithm} {version}] 组件已存在') + + +def sha_1(raw): + sha1_str = hashlib.sha1(raw.encode("utf-8")).hexdigest() + return sha1_str + + +def is_alive(agent_id, timestamp): + """ + Whether the probe is alive or not, the judgment condition: there is a heartbeat log within 2 minutes + """ + return IastHeartbeat.objects.values('id').filter(agent__id=agent_id, dt__gt=(timestamp - 60 * 2)).exists() + + +@shared_task(queue='dongtai-periodic-task') +def update_agent_status(): + """ + 更新Agent状态 + :return: + """ + logger.info(f'检测引擎状态更新开始') + timestamp = int(time.time()) + try: + running_agents = IastAgent.objects.values("id").filter(online=1) + is_stopped_agents = list() + for agent in running_agents: + agent_id = agent['id'] + if is_alive(agent_id=agent_id, timestamp=timestamp): + continue + else: + is_stopped_agents.append(agent_id) + if is_stopped_agents: + IastAgent.objects.filter(id__in=is_stopped_agents).update(is_running=0, is_core_running=0, online=0) + + logger.info(f'检测引擎状态更新成功') + except Exception as e: + logger.error(f'检测引擎状态更新出错,错误详情:{e}') + + +@shared_task(queue='dongtai-periodic-task') +def heartbeat(): + """ + 发送心跳 + :return: + """ + # 查询agent数量 + + logger.info('dongtai_engine.tasks.heartbeat is running') + agents = IastAgent.objects.all() + agent_enable = agents.values('id').filter(is_running=1).count() + agent_counts = agents.values('id').count() + heartbeat = IastHeartbeat.objects.values('id').filter(agent__in=agents).annotate(Sum("req_count")).count() + project_count = IastProject.objects.values('id').count() + user_count = User.objects.values('id').count() + vul_count = IastVulnerabilityModel.objects.values('id').count() + method_pool_count = MethodPool.objects.values('id').count() + heartbeat_raw = { + "status": 200, + "msg": "engine is running", + "agentCount": agent_counts, + "reqCount": heartbeat, + "agentEnableCount": agent_enable, + "projectCount": project_count, + "userCount": user_count, + "vulCount": vul_count, + "methodPoolCount": method_pool_count, + "timestamp": int(time.time()) + } + try: + logger.info('[dongtai_engine.tasks.heartbeat] send heartbeat data to OpenApi Service.') + resp = requests.post(url='http://openapi.iast.huoxian.cn:8000/api/v1/engine/heartbeat', json=heartbeat_raw) + if resp.status_code == 200: + logger.info('[dongtai_engine.tasks.heartbeat] send heartbeat data to OpenApi Service Successful.') + pass + logger.info('[dongtai_engine.tasks.heartbeat] send heartbeat data to OpenApi Service Failure.') + except Exception as e: + logger.info(f'[dongtai_engine.tasks.heartbeat] send heartbeat data to OpenApi Service Error. reason is {e}') + + +@shared_task(queue='dongtai-periodic-task') +def clear_error_log(): + """ + 清理错误日志 + :return: + """ + logger.info(f'日志清理开始') + try: + timestamp = int(time.time()) + out_date_timestamp = 60 * 60 * 24 * 30 + count = IastErrorlog.objects.filter(dt__lt=(timestamp - out_date_timestamp)).delete() + logger.info(f'日志清理成功,共{count}条') + except Exception as e: + logger.error(f'日志清理失败,错误详情:{e}') + + +@shared_task(queue='dongtai-periodic-task') +def vul_recheck(): + """ + 定时处理漏洞验证 + """ + logger.info('开始处理漏洞重放数据') + + relay_queue_queryset = IastReplayQueue.objects.filter(replay_type=const.VUL_REPLAY, state=const.PENDING).order_by( + "-id") + if relay_queue_queryset is None: + logger.info('暂无需要处理的漏洞重放数据') + return + + timestamp = int(time.time()) + sub_replay_queue = relay_queue_queryset[:100] + vul_ids = [] + pool_ids = [] + for item in sub_replay_queue: + if item.relation_id is None: + logger.info('重放请求数据格式不正确,relation id不能为空') + Replay.replay_failed(timestamp=timestamp, replay=item) + continue + # 漏洞重放 + if item.replay_type == 1: + vul_ids.append(item.relation_id) + # 流量重放 + elif item.replay_type == 2: + pool_ids.append(item.relation_id) + if not vul_ids and not pool_ids: + logger.info('暂无需要处理的漏洞重放数据') + return + vul_data = replay_payload_data(vul_ids, 1) + pool_data = replay_payload_data(pool_ids, 2) + # print(vul_ids) + # print(pool_ids) + for replay in sub_replay_queue: + # 构造重放请求包 + vul_id = replay.relation_id + recheck_payload = replay.payload.value if replay.payload_id != -1 else '.%2F..%2F%60dongtai' + logger.info( + f"generating payload recheck_payload:{recheck_payload} vul_id:{vul_id} replay_id:{replay.id}" + ) + if replay.replay_type == 1: + vulnerability = vul_data.get(vul_id, {}) + else: + vulnerability = pool_data.get(vul_id, {}) + if not vulnerability: + Replay.replay_failed(timestamp=timestamp, replay=replay) + continue + uri = vulnerability['uri'] + param_value = vulnerability['req_params'] if vulnerability['req_params'] else '' + headers = vulnerability['req_header'] + body = vulnerability['req_data'] + logger.info( + f"generating payload by param_name : {vulnerability['param_name']}" + ) + if replay.replay_type == 1: + # 漏洞重放 sink点追加参数 + con = 2 + if vulnerability.get("param_name", ""): + try: + params = json.loads(vulnerability['param_name']) + except JSONDecodeError as e: + logger.error(f'污点数据解析出错,原因:{e}') + Replay.replay_failed(replay=replay, timestamp=timestamp) + con = 1 + else: + con = 1 + taint_value = vulnerability['taint_value'] + # 构造带payload的重放请求 + if con == 2: + for position, param_name in params.items(): + if position == 'GET': + _param_items = param_value.split('&') + item_length = len(_param_items) + for index in range(item_length): + _params = _param_items[index].split('=') + _param_name = _params[0] + if _param_name == param_name: + _param_items[index] = f'{_param_name}={recheck_payload}' + break + param_value = '&'.join(_param_items) + elif position == 'POST': + try: + # Content-Type: application/json + post_body = json.loads(body) + if param_name in post_body: + post_body[param_name] = recheck_payload + body = json.dumps(post_body) + else: # ? it looks weird + _param_items = body.split('&') + item_length = len(_param_items) + for index in range(item_length): + _params = _param_items[index].split('=') + _param_name = _params[0] + if _param_name == param_name: + _param_items[index] = f'{_param_name}={recheck_payload}' + break + body = '&'.join(_param_items) + except BaseException: + # Content-Type: multipart/form-data + _param_items = body.split('&') + item_length = len(_param_items) + for index in range(item_length): + _params = _param_items[index].split('=') + _param_name = _params[0] + if _param_name == param_name: + _param_items[index] = f'{_param_name}={recheck_payload}' + break + body = '&'.join(_param_items) + elif position == 'HEADER': + import base64 + header_raw = base64.b64decode(headers).decode('utf-8').split('\n') + item_length = len(header_raw) + for index in range(item_length): + _header_list = header_raw[index].split(':') + _header_name = _header_list[0] + if _header_name == param_name: + header_raw[index] = f'{_header_name}:{recheck_payload}' + break + try: + headers = base64.b64encode('\n'.join(header_raw)) + except Exception as e: + logger.error(f'请求头解析失败,漏洞ID: {vulnerability["id"]}') + elif position == 'COOKIE': + import base64 + header_raw = base64.b64decode(headers).decode('utf-8').split('\n') + item_length = len(header_raw) + cookie_index = 0 + cookie_raw = None + for index in range(item_length): + _header_list = header_raw[index].split(':') + _header_name = _header_list[0] + if _header_name == 'cookie' or _header_name == 'Cookie': + cookie_index = index + cookie_raw = ':'.join(_header_list[1:]) + break + if cookie_index > 0: + cookie_raw_items = cookie_raw.split(';') + item_length = len(cookie_raw_items) + for index in range(item_length): + cookie_item = cookie_raw_items[index].split('=') + if cookie_item[0] == param_name: + cookie_raw_items[index] = f'{param_name}={recheck_payload}' + break + cookie_raw = ';'.join(cookie_raw_items) + header_raw[cookie_index] = cookie_raw + try: + headers = base64.b64encode('\n'.join(header_raw)) + except Exception as e: + logger.error(f'请求头解析失败,漏洞ID: {vulnerability["id"]}') + + elif position == 'PATH' and taint_value: + # 检查path,替换 + path_items = uri.split('/') + item_length = len(path_items) + for index in range(item_length): + if taint_value == path_items[index]: + path_items[index] = 'dongtai' + break + uri = '/'.join(path_items) + + replay.uri = uri + replay.method = vulnerability['http_method'] + replay.scheme = vulnerability['http_scheme'] + replay.header = headers + replay.params = param_value + replay.body = body + replay.update_time = timestamp + replay.state = const.WAITING + replay.agent_id = vulnerability['agent'] + # print(replay.id) + # print("okkkkkto======update") + replay.save( + update_fields=['uri', 'method', 'scheme', 'header', 'params', 'body', 'update_time', 'state', 'agent_id'] + ) + + # IastReplayQueue.objects.bulk_update(relay_queue_queryset, ['uri', 'method', 'scheme', 'header', 'params', 'body', 'update_time', 'state', 'agent_id']) + logger.info('漏洞重放数据处理完成') diff --git a/dongtai_engine/tests.py b/dongtai_engine/tests.py new file mode 100644 index 000000000..7aea7ac2c --- /dev/null +++ b/dongtai_engine/tests.py @@ -0,0 +1,90 @@ +from test.apiserver.test_agent_base import AgentTestCase,gzipdata +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.agent_method_pool import MethodPool +import gzip +import base64 +from dongtai_protocol.report.report_handler_factory import ReportHandler +import json +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from django.test import TestCase +from dongtai_engine.tasks import search_vul_from_method_pool +from dongtai_protocol.tests import download_if_not_exist +from django.db import connections +import unittest + +@unittest.skip("waiting for rebuild mock data") +class CoreScanTestCase(AgentTestCase): + + def setUp(self): + res = download_if_not_exist( + "https://huoqi-public.oss-cn-beijing.aliyuncs.com/iast/test_data/iast_agent_method_pool.sql", + "/tmp/test_core_iast_agent_method_pool.sql") + super().setUp() + cursor = connections['default'].cursor() + sqlfile = "" + with open('/tmp/test_core_iast_agent_method_pool.sql') as f: + for line in f: + sqlfile += line + cursor.execute(sqlfile) + + def test_benchmark_method_pool_scan(self): + data = MethodPool.objects.all() + vul_count_begin = IastVulnerabilityModel.objects.all().count() + for method_pool in data: + method_pool.agent_id = self.agent_id + method_pool.save() + search_vul_from_method_pool(method_pool.pool_sign, + method_pool.agent_id) + assert IastVulnerabilityModel.objects.filter( + url=method_pool.url, agent_id=self.agent_id).exists() + vul_count_after = IastVulnerabilityModel.objects.all().count() + assert len(data) == vul_count_after - vul_count_begin + + def test_params_empty_count(self): + data = MethodPool.objects.all() + vul_count_without_param_mark_begin = IastVulnerabilityModel.objects.filter( + param_name='{}', level_id__lte=2).all().count() + for method_pool in data: + method_pool.agent_id = self.agent_id + method_pool.save() + search_vul_from_method_pool(method_pool.pool_sign, + method_pool.agent_id) + assert IastVulnerabilityModel.objects.filter( + url=method_pool.url, agent_id=self.agent_id).exists() + vul_count_without_param_mark_after = IastVulnerabilityModel.objects.filter(param_name='{}', + level_id__lte=2).all().count() + res = vul_count_without_param_mark_after - vul_count_without_param_mark_begin + print([ + i.uri for i in IastVulnerabilityModel.objects.filter( + param_name='{}', level_id__lte=2).all() + ]) + assert res == 0 + + def test_params_single_uri(self): + data = MethodPool.objects.filter(uri='/benchmark/cmdi-00/BenchmarkTest00573').all() + vul_count_without_param_mark_begin = IastVulnerabilityModel.objects.filter( + param_name='{}', level_id__lte=2).all().count() + for method_pool in data: + method_pool.agent_id = self.agent_id + method_pool.save() + search_vul_from_method_pool(method_pool.pool_sign, + method_pool.agent_id) + assert IastVulnerabilityModel.objects.filter( + url=method_pool.url, agent_id=self.agent_id).exists() + vul_count_without_param_mark_after = IastVulnerabilityModel.objects.filter(param_name='{}', + level_id__lte=2).all().count() + res = vul_count_without_param_mark_after - vul_count_without_param_mark_begin + print([ + i.uri for i in IastVulnerabilityModel.objects.filter( + param_name='{}', level_id__lte=2).all() + ]) + assert res == 0 + +@unittest.skip("waiting for rebuild mock data") +class CoreTaskTestCase(AgentTestCase): + + def test_search_method_pool(self): + method_pool_id = 4439061 + method_pool = MethodPool.objects.filter(pk=method_pool_id).first() + from dongtai_engine.tasks import search_vul_from_method_pool + search_vul_from_method_pool(method_pool.pool_sign, method_pool.agent_id) diff --git a/dongtai_protocol/__init__.py b/dongtai_protocol/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/dongtai_protocol/admin.py b/dongtai_protocol/admin.py new file mode 100644 index 000000000..8c38f3f3d --- /dev/null +++ b/dongtai_protocol/admin.py @@ -0,0 +1,3 @@ +from django.contrib import admin + +# Register your models here. diff --git a/dongtai_protocol/api_schema.py b/dongtai_protocol/api_schema.py new file mode 100644 index 000000000..e7b644299 --- /dev/null +++ b/dongtai_protocol/api_schema.py @@ -0,0 +1,225 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/9/28 上午10:51 +# project: dongtai-openapi +from drf_spectacular.utils import OpenApiParameter, OpenApiExample + + +class DongTaiAuth: + TOKEN = 'TokenAuthentication' + + +class DongTaiParameter: + OPENAPI_URL = OpenApiParameter( + name='url', + description='OpenAPI Service Addr', + required=True, + type=str, + examples=[ + OpenApiExample( + 'url example', + summary='default', + value='https://openapi.iast.io', + ), + ], + ) + PROJECT_NAME = OpenApiParameter( + name='projectName', + type=str, + description='The name of the project where the Agent needs to be installed', + examples=[ + OpenApiExample( + 'example with https://iast.io', + summary='default', + value='Demo Project', + ), + ], + ) + + LANGUAGE = OpenApiParameter( + name='language', + type=str, + description='The development language of the project that needs to install the Agent', + required=True, + examples=[ + OpenApiExample( + 'example language', + summary='JAVA or PYTHON', + value='JAVA', + ), + ], + ) + + VERSION = OpenApiParameter( + name='version', + type=str, + description='The development language of the project that needs to install the Agent', + required=True, + examples=[ + OpenApiExample( + 'example language', + summary='java or python', + value='java', + ), + ], + ) + + AGENT_NAME = OpenApiParameter( + name='name', + type=str, + description='The development language of the project that needs to install the Agent', + required=True, + examples=[ + OpenApiExample( + 'example language', + summary='java or python', + value='java', + ), + ], + ) + + HOSTNAME = OpenApiParameter( + name='engineName', + type=str, + description='The development language of the project that needs to install the Agent', + required=True, + examples=[ + OpenApiExample( + 'example language', + summary='java or python', + value='java', + ), + ], + ) + + NETWORK = OpenApiParameter( + name='engineName', + type=str, + description='The development language of the project that needs to install the Agent', + required=True, + examples=[ + OpenApiExample( + 'example language', + summary='java or python', + value='java', + ), + ], + ) + + CONTAINER_NAME = OpenApiParameter( + name='containerName', + type=str, + description='The development language of the project that needs to install the Agent', + required=True, + examples=[ + OpenApiExample( + 'example language', + summary='java or python', + value='java', + ), + ], + ) + + + SERVER_ADDR = OpenApiParameter( + name='serverAddr', + type=str, + description='The development language of the project that needs to install the Agent', + required=True, + examples=[ + OpenApiExample( + 'example language', + summary='java or python', + value='java', + ), + ], + ) + + SERVER_PORT = OpenApiParameter( + name='serverPort', + type=str, + description='The development language of the project that needs to install the Agent', + required=True, + examples=[ + OpenApiExample( + 'example language', + summary='java or python', + value='java', + ), + ], + ) + + SERVER_PATH = OpenApiParameter( + name='serverPath', + type=str, + description='The development language of the project that needs to install the Agent', + required=True, + examples=[ + OpenApiExample( + 'example language', + summary='java or python', + value='java', + ), + ], + ) + + SERVER_ENV = OpenApiParameter( + name='serverEnv', + type=str, + description='The development language of the project that needs to install the Agent', + required=True, + examples=[ + OpenApiExample( + 'example language', + summary='java or python', + value='java', + ), + ], + ) + + PID = OpenApiParameter( + name='pid', + type=str, + description= + 'The development language of the project that needs to install the Agent', + required=True, + examples=[ + OpenApiExample( + 'example language', + summary='java or python', + value='java', + ), + ], + ) + + AUTO_CREATE_PROJECT = OpenApiParameter( + name='autoCreateProject', + type=int, + description= + 'auto create project if project not found when this varibale is 1', + required=True, + examples=[ + OpenApiExample( + 'default value', + value=0, + ), + OpenApiExample( + 'enable value', + value=1, + ), + ], + ) + ENGINE_NAME = OpenApiParameter( + name='engineName', + type=str, + description='The development language of the project that needs to install the Agent', + required=True, + examples=[ + OpenApiExample( + 'example language', + summary='java or python', + value='java', + ), + ], + ) diff --git a/dongtai_protocol/apps.py b/dongtai_protocol/apps.py new file mode 100644 index 000000000..eb5f3719c --- /dev/null +++ b/dongtai_protocol/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig +from dongtai_common.common.utils import DongTaiAppConfigPatch + + +class ApiserverConfig(DongTaiAppConfigPatch, AppConfig): + name = 'dongtai_protocol' diff --git a/apiserver/decrypter.py b/dongtai_protocol/decrypter.py similarity index 72% rename from apiserver/decrypter.py rename to dongtai_protocol/decrypter.py index 56ef0f748..c84fc79e4 100644 --- a/apiserver/decrypter.py +++ b/dongtai_protocol/decrypter.py @@ -7,8 +7,6 @@ import gzip import json -from apiserver.encrypter import RsaCrypto - def parse_data(stream_data): """从http request解析iast agent上报的json数据 @@ -16,13 +14,10 @@ def parse_data(stream_data): 步骤: 1.从http request对象读取二进制流 2.gzip解压缩 - 3.rsa解密 4.json反序列化 :param stream_data: POST请求的流式对象 :return: iast agent上报的json数据,如果解压缩、解密过程失败,则抛出异常 """ data = gzip.decompress(stream_data).decode('utf-8') - # fixme JavaAgent中RSA加密后数据无法在云端正常解密,导致部分漏洞无法检出,暂时关闭RSA加解密功能 - # data = RsaCrypto.decrypt(data) objs = json.loads(data) return objs diff --git a/dongtai_protocol/report/__init__.py b/dongtai_protocol/report/__init__.py new file mode 100644 index 000000000..18cd8cf2e --- /dev/null +++ b/dongtai_protocol/report/__init__.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/10/23 11:54 +# software: PyCharm +# project: webapi +from dongtai_protocol.report.handler.error_log_handler import ErrorLogHandler +from dongtai_protocol.report.handler.heartbeat_handler import HeartBeatHandler +from dongtai_protocol.report.handler.narmal_vul_handler import NormalVulnHandler +from dongtai_protocol.report.handler.saas_method_pool_handler import SaasMethodPoolHandler +from dongtai_protocol.report.handler.sca_handler import (ScaHandler, ScaBulkHandler) +from dongtai_protocol.report.handler.api_route_handler import ApiRouteHandler +from dongtai_protocol.report.handler.hardencode_vul_handler import HardEncodeVulHandler +from dongtai_protocol.report.handler.agent_third_service_handler import ThirdPartyServiceHandler +from dongtai_protocol.report.handler.agent_filepath_handler import FilePathHandler + +if __name__ == '__main__': + ErrorLogHandler() + HeartBeatHandler() + ScaHandler() + NormalVulnHandler() + SaasMethodPoolHandler() + ApiRouteHandler() + HardEncodeVulHandler() + ScaBulkHandler() + ThirdPartyServiceHandler() + FilePathHandler() diff --git a/apiserver/report/__init__.py b/dongtai_protocol/report/handler/__init__.py similarity index 100% rename from apiserver/report/__init__.py rename to dongtai_protocol/report/handler/__init__.py diff --git a/dongtai_protocol/report/handler/agent_filepath_handler.py b/dongtai_protocol/report/handler/agent_filepath_handler.py new file mode 100644 index 000000000..e8d0fbd40 --- /dev/null +++ b/dongtai_protocol/report/handler/agent_filepath_handler.py @@ -0,0 +1,38 @@ +from dongtai_protocol.report.handler.report_handler_interface import IReportHandler +from dongtai_protocol.report.report_handler_factory import ReportHandler +from dongtai_common.models.api_route import IastApiRoute, IastApiMethod, \ + IastApiResponse, IastApiParameter, \ + IastApiMethodHttpMethodRelation, HttpMethod +from dongtai_common.models.agent import IastAgent +from dongtai_common.utils import const +import logging +from django.utils.translation import gettext_lazy as _ +from django.db import transaction +from dongtai_common.models.project import IastProject +from dongtai_common.models.agent_thirdservice import IastThirdPartyService +from simhash import Simhash +logger = logging.getLogger('dongtai.openapi') + + + +@ReportHandler.register(const.REPORT_FILE_PATH) +class FilePathHandler(IReportHandler): + + def parse(self): + self.filepath = self.detail.get('serviceDir') + self.servicetype = self.detail.get('serviceType') + + def save(self): + try: + simhash = _data_dump(self.filepath) + IastAgent.objects.filter(pk=self.agent_id).update( + filepathsimhash=simhash, servicetype=self.servicetype) + logger.info( + _('filepath simhash log successed : {} servicetype: {}'). + format(simhash, self.servicetype)) + except Exception as e: + logger.info(_('filepath simhash log failed, why: {}').format(e)) + + +def _data_dump(filepath: str) -> str: + return Simhash(filepath).value diff --git a/dongtai_protocol/report/handler/agent_third_service_handler.py b/dongtai_protocol/report/handler/agent_third_service_handler.py new file mode 100644 index 000000000..7e20ea97c --- /dev/null +++ b/dongtai_protocol/report/handler/agent_third_service_handler.py @@ -0,0 +1,49 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : api_route_handler +# @created : Tuesday Aug 17, 2021 19:59:29 CST +# +# @description : +###################################################################### + +from dongtai_protocol.report.handler.report_handler_interface import IReportHandler +from dongtai_protocol.report.report_handler_factory import ReportHandler +from dongtai_common.models.api_route import IastApiRoute, IastApiMethod, \ + IastApiResponse, IastApiParameter, \ + IastApiMethodHttpMethodRelation, HttpMethod +from dongtai_common.models.agent import IastAgent +from dongtai_common.utils import const +import logging +from django.utils.translation import gettext_lazy as _ +from django.db import transaction +from dongtai_common.models.project import IastProject +from dongtai_common.models.agent_thirdservice import IastThirdPartyService +logger = logging.getLogger('dongtai.openapi') + + +@ReportHandler.register(const.REPORT_THIRD_PARTY_SERVICE) +class ThirdPartyServiceHandler(IReportHandler): + + def parse(self): + self.service_data = self.detail.get('serviceData') + + def save(self): + try: + agent = IastAgent.objects.filter(pk=self.agent_id)[0:1] + if not agent: + raise ValueError(_("No such agent")) + agent = agent[0] + third_party_models = _data_dump(self.service_data, self.agent_id, + agent.bind_project_id) + IastThirdPartyService.objects.bulk_create(third_party_models, + ignore_conflicts=True) + except Exception as e: + logger.info(_('third log failed, why: {}').format(e)) + + +def _data_dump(items, agent_id: int, project_id: int): + return (IastThirdPartyService(agent_id=agent_id, + project_id=project_id, + address=item['address'], + service_type=item['serviceType'], + port=item['port']) for item in items) diff --git a/dongtai_protocol/report/handler/api_route_handler.py b/dongtai_protocol/report/handler/api_route_handler.py new file mode 100644 index 000000000..691725c2b --- /dev/null +++ b/dongtai_protocol/report/handler/api_route_handler.py @@ -0,0 +1,105 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : api_route_handler +# @created : Tuesday Aug 17, 2021 19:59:29 CST +# +# @description : +###################################################################### + +from dongtai_protocol.report.handler.report_handler_interface import IReportHandler +from dongtai_protocol.report.report_handler_factory import ReportHandler +from dongtai_common.models.api_route import IastApiRoute, IastApiMethod, \ + IastApiResponse, IastApiParameter, \ + IastApiMethodHttpMethodRelation, HttpMethod +from dongtai_common.models.agent import IastAgent +from dongtai_common.utils import const +import logging +from django.utils.translation import gettext_lazy as _ +from django.db import transaction +from dongtai_common.models.project import IastProject +logger = logging.getLogger('dongtai.openapi') + + +@ReportHandler.register(const.REPORT_API_ROUTE) +class ApiRouteHandler(IReportHandler): + def parse(self): + self.api_data = self.detail.get('apiData') + self.api_routes = map(lambda x: _data_dump(x), self.api_data) + + def save(self): + try: + agent = IastAgent.objects.filter(pk=self.agent_id)[0:1] + if not agent: + raise ValueError(_("No such agent")) + agent = agent[0] + for api_route in self.api_routes: + http_methods = [] + with transaction.atomic(): + try: + for http_method in api_route['method']: + http_method, __ = HttpMethod.objects.get_or_create( + method=http_method.upper()) + http_methods.append(http_method) + api_method, is_create = IastApiMethod.objects.get_or_create( + method='/'.join(api_route['method'])) + if is_create: + for http_method in http_methods: + IastApiMethodHttpMethodRelation.objects.create( + api_method_id=api_method.id, + http_method_id=http_method.id) + fields = [ + 'uri', 'code_class', 'description', 'code_file', + 'controller', 'agent' + ] + api_route_dict = _dictfilter(api_route, fields) + api_route_obj = _route_dump(api_route_dict, api_method, + agent) + api_route_model, is_create = IastApiRoute.objects.get_or_create( + **api_route_obj) + parameters = api_route['parameters'] + for parameter in parameters: + parameter_obj = _para_dump(parameter, + api_route_model) + IastApiParameter.objects.get_or_create(**parameter_obj) + response_obj = _response_dump( + {'return_type': api_route['returnType']}, + api_route_model) + IastApiResponse.objects.get_or_create(**response_obj) + except Exception as e: + print(e) + logger.info(_('API navigation log record successfully')) + project = IastProject.objects.filter(pk=self.agent.bind_project_id).first() + if project: + project.update_latest() + except Exception as e: + logger.info(_('API navigation log failed, why: {}').format(e)) + + +def _data_dump(item): + item['code_class'] = item['class'] + item['code_file'] = item['file'] + return item + + +def _route_dump(item, api_method, agent): + item['method'] = api_method + item['agent'] = agent + item['path'] = item['uri'] + del item['uri'] + return item + + +def _para_dump(item, api_route): + item['route'] = api_route + item['parameter_type'] = item['type'] + del item['type'] + return item + + +def _response_dump(item, api_route): + item['route'] = api_route + return item + + +def _dictfilter(dict_: dict, fields: list): + return {k: v for k, v in dict_.items() if k in fields} diff --git a/apiserver/report/handler/auth_info_handler.py b/dongtai_protocol/report/handler/auth_info_handler.py similarity index 92% rename from apiserver/report/handler/auth_info_handler.py rename to dongtai_protocol/report/handler/auth_info_handler.py index 9c7a73f27..828eae4ae 100644 --- a/apiserver/report/handler/auth_info_handler.py +++ b/dongtai_protocol/report/handler/auth_info_handler.py @@ -7,10 +7,11 @@ import datetime -from dongtai_models.models.iast_overpower_user import IastOverpowerUserAuth +from dongtai_common.models.iast_overpower_user import IastOverpowerUserAuth +from dongtai_common.utils import const -from apiserver.report.handler.report_handler_interface import IReportHandler -from apiserver.report.report_handler_factory import ReportHandler +from dongtai_protocol.report.handler.report_handler_interface import IReportHandler +from dongtai_protocol.report.report_handler_factory import ReportHandler @ReportHandler.register('auth-info-report') @@ -32,6 +33,7 @@ def get_new_authinfo(authinfo): return _authinfo +@ReportHandler.register(const.REPORT_AUTH_ADD) class AuthAddHandler(IReportHandler): def parse(self): # todo 增加appnem字段 @@ -78,6 +80,7 @@ def save(self): pass +@ReportHandler.register(const.REPORT_AUTH_UPDATE) class AuthUpdateHandler(IReportHandler): """ { diff --git a/dongtai_protocol/report/handler/error_log_handler.py b/dongtai_protocol/report/handler/error_log_handler.py new file mode 100644 index 000000000..6ecc6b7d7 --- /dev/null +++ b/dongtai_protocol/report/handler/error_log_handler.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/10/23 11:54 +# software: PyCharm +# project: webapi +import logging + +import time + +from dongtai_common.models.errorlog import IastErrorlog +from dongtai_common.utils import const + +from dongtai_protocol.report.handler.report_handler_interface import IReportHandler +from dongtai_protocol.report.report_handler_factory import ReportHandler +from django.utils.translation import gettext_lazy as _ + +logger = logging.getLogger('dongtai.openapi') + + +@ReportHandler.register(const.REPORT_ERROR_LOG) +class ErrorLogHandler(IReportHandler): + def __init__(self): + super().__init__() + self.log = None + + def parse(self): + self.log = self.detail.get('log') + + def save(self): + try: + IastErrorlog.objects.create( + errorlog=self.log, + agent=self.agent, + state='已上报', + dt=int(time.time()) + ) + logger.info(_('Error log report saving success')) + except Exception as e: + logger.info(_('Error log report saves failed, why: {}').format(e)) diff --git a/dongtai_protocol/report/handler/hardencode_vul_handler.py b/dongtai_protocol/report/handler/hardencode_vul_handler.py new file mode 100644 index 000000000..58696d179 --- /dev/null +++ b/dongtai_protocol/report/handler/hardencode_vul_handler.py @@ -0,0 +1,121 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : hardencode_vul_handler +# @created : 星期五 12月 17, 2021 19:52:55 CST +# +# @description : +###################################################################### + + +import json +import logging +import random +import time + +from dongtai_common.models.hook_type import HookType +from dongtai_common.models.strategy import IastStrategyModel +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.models.project import IastProject +from dongtai_common.utils import const +from dongtai_conf import settings +from dongtai_protocol.report.handler.report_handler_interface import IReportHandler +from dongtai_protocol.report.report_handler_factory import ReportHandler +from rest_framework import serializers +from django.utils.translation import gettext_lazy as _ +from rest_framework.serializers import ValidationError +from dongtai_web.vul_log.vul_log import log_vul_found +from dongtai_common.models.agent import IastAgent + + +logger = logging.getLogger('dongtai.openapi') + +class HardEncodeVulSerializer(serializers.Serializer): + class_ = serializers.CharField(default=None, + required=False, + help_text=_("class name")) + field = serializers.CharField(default=None, + required=False, + help_text=_("field")) + + value = serializers.CharField(default=None, required=False) + is_jdk = serializers.BooleanField(default=None, required=False) + + file_ = serializers.CharField(default=None, required=False) + + + +@ReportHandler.register(const.REPORT_VULN_HARDCODE) +class HardEncodeVulHandler(IReportHandler): + def parse(self): + ser = HardEncodeVulSerializer(data=self.detail) + try: + if ser.is_valid(True): + pass + except ValidationError as e: + self.validated = False + return + self.validated = True + for k, v in ser.validated_data.items(): + setattr(self, k, v) + + def save(self): + strategy = IastStrategyModel.objects.filter(user_id=1, + vul_type='硬编码').first() + if not strategy or strategy.state != 'enable': + return + project_agents = IastAgent.objects.filter( + project_version_id=self.agent.project_version_id) + iast_vul = IastVulnerabilityModel.objects.filter( + strategy_id=strategy.id, + uri=self.detail.get('file', ''), + http_method='', + agent__in=project_agents).order_by('-latest_time').first() + timestamp = int(time.time()) + if iast_vul: + iast_vul.uri = self.detail.get('file', ''), + iast_vul.url = self.detail.get('class', ''), + iast_vul.latest_time = timestamp, + iast_vul.taint_position = self.field, + iast_vul.taint_value = self.value, + iast_vul.level_id = strategy.level_id, + iast_vul.full_stack = json.dumps(self.detail), + iast_vul.top_stack = "字段:{}".format(self.field), + iast_vul.bottom_stack = "硬编码值:{}".format(self.value), + iast_vul.save() + else: + iast_vul = IastVulnerabilityModel.objects.create( + hook_type_id=-1, + strategy_id=strategy.id, + uri=self.detail.get('file', ''), + url=self.detail.get('class', ''), + http_method='', + http_scheme='', + http_protocol='', + req_header='', + req_params='', + req_data='', + res_header='', + res_body='', + context_path='', + counts=1, + taint_position=self.field, + status_id=settings.CONFIRMED, + first_time=timestamp, + latest_time=timestamp, + client_ip='', + taint_value=self.value, + level_id=strategy.level_id, + full_stack=json.dumps(self.detail), + top_stack="字段:{}".format(self.field), + method_pool_id=-1, + bottom_stack="硬编码值:{}".format(self.value), + agent=self.agent) + IastVulnerabilityModel.objects.filter( + strategy_id=strategy.id, + uri=self.detail.get('file', ''), + http_method='', + agent__in=project_agents, + pk__lt=iast_vul.id).delete() + log_vul_found(iast_vul.agent.user_id, iast_vul.agent.bind_project.name, + iast_vul.agent.bind_project_id, iast_vul.id, + iast_vul.strategy.vul_name) diff --git a/dongtai_protocol/report/handler/heartbeat_handler.py b/dongtai_protocol/report/handler/heartbeat_handler.py new file mode 100644 index 000000000..2110a4274 --- /dev/null +++ b/dongtai_protocol/report/handler/heartbeat_handler.py @@ -0,0 +1,214 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/10/23 11:56 +# software: PyCharm +# project: webapi +import logging +import time + +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.heartbeat import IastHeartbeat +from dongtai_common.models.replay_queue import IastReplayQueue +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.utils import const +from django.utils.translation import gettext_lazy as _ +from dongtai_common.models.server import IastServer +from dongtai_protocol.report.handler.report_handler_interface import IReportHandler +from dongtai_protocol.report.report_handler_factory import ReportHandler +from django.db.models import (QuerySet, Q, F) +from dongtai_common.models.project import IastProject, VulValidation +from dongtai_common.utils.systemsettings import get_vul_validate +from dongtai_common.models.agent import IastAgent + +logger = logging.getLogger('dongtai.openapi') + + +@ReportHandler.register(const.REPORT_HEART_BEAT) +class HeartBeatHandler(IReportHandler): + def __init__(self): + super().__init__() + self.req_count = None + self.cpu = None + self.memory = None + self.network = None + self.report_queue = None + self.method_queue = None + self.replay_queue = None + self.return_queue = None + + + def parse(self): + self.cpu = self.detail.get('cpu') + self.memory = self.detail.get('memory') + self.disk = self.detail.get('disk') + self.req_count = self.detail.get('reqCount') + self.report_queue = self.detail.get('reportQueue', 0) + self.method_queue = self.detail.get('methodQueue', 0) + self.replay_queue = self.detail.get('replayQueue', 0) + self.return_queue = self.detail.get('returnQueue', None) + + def has_permission(self): + self.agent = IastAgent.objects.filter(id=self.agent_id, user=self.user_id).first() + return self.agent + + def save_heartbeat(self): + self.agent.is_running = 1 + self.agent.online = 1 + self.agent.save(update_fields=['is_running', 'online']) + queryset = IastHeartbeat.objects.filter(agent=self.agent) + heartbeat = queryset.order_by('-id').first() + if heartbeat: + queryset.exclude(pk=heartbeat.id).delete() + heartbeat.dt = int(time.time()) + if self.return_queue == 1: + heartbeat.req_count = self.req_count + heartbeat.report_queue = self.report_queue + heartbeat.method_queue = self.method_queue + heartbeat.replay_queue = self.replay_queue + heartbeat.save(update_fields=[ + 'req_count', 'dt', 'report_queue', 'method_queue', 'replay_queue' + ]) + elif self.return_queue == 0: + heartbeat.memory = self.memory + heartbeat.cpu = self.cpu + heartbeat.disk = self.disk + heartbeat.save(update_fields=['disk', 'memory', 'cpu', 'dt']) + else: + heartbeat.memory = self.memory + heartbeat.cpu = self.cpu + heartbeat.req_count = self.req_count + heartbeat.report_queue = self.report_queue + heartbeat.method_queue = self.method_queue + heartbeat.replay_queue = self.replay_queue + heartbeat.disk = self.disk + heartbeat.save(update_fields=[ + 'disk', 'memory', 'cpu', 'req_count', 'dt', 'report_queue', + 'method_queue', 'replay_queue' + ]) + else: + IastHeartbeat.objects.create(memory=self.memory, + cpu=self.cpu, + req_count=self.req_count, + report_queue=self.replay_queue, + method_queue=self.method_queue, + replay_queue=self.replay_queue, + dt=int(time.time()), + agent=self.agent) + + def get_result(self, msg=None): + logger.info('return_queue: {}'.format(self.return_queue)) + if (self.return_queue is None or self.return_queue + == 1) and vul_recheck_state(self.agent_id): + try: + project_agents = IastAgent.objects.values_list( + 'id', flat=True).filter( + bind_project_id=self.agent.bind_project_id, + language=self.agent.language).union( + addtional_agenti_ids_query_filepath_simhash( + self.agent.filepathsimhash, + language=self.agent.language), + addtional_agent_ids_query_deployway_and_path( + self.agent.servicetype, + self.agent.server.path, + self.agent.server.hostname, + language=self.agent.language)) + project_agents = list(project_agents) + if project_agents is None: + logger.info(_('There is no probe under the project')) + logger.info(f"project_agent_ids : {project_agents}") + replay_queryset = IastReplayQueue.objects.values( + 'id', 'relation_id', 'uri', 'method', 'scheme', 'header', + 'params', 'body', 'replay_type').filter( + agent_id__in=project_agents, + state__in=[const.WAITING, const.SOLVING])[:200] + if len(replay_queryset) == 0: + logger.info(_('Replay request does not exist')) + + (success_ids, success_vul_ids, failure_ids, failure_vul_ids, + replay_requests) = ([], [], [], [], []) + for replay_request in replay_queryset: + if replay_request['uri']: + replay_requests.append(replay_request) + success_ids.append(replay_request['id']) + if replay_request['replay_type'] == const.VUL_REPLAY: + success_vul_ids.append( + replay_request['relation_id']) + else: + failure_ids.append(replay_request['id']) + if replay_request['replay_type'] == const.VUL_REPLAY: + failure_vul_ids.append( + replay_request['relation_id']) + + timestamp = int(time.time()) + IastReplayQueue.objects.filter(id__in=success_ids, + state=const.SOLVING).update( + update_time=timestamp, + state=const.SOLVED) + IastReplayQueue.objects.filter(id__in=success_ids, + state=const.WAITING).update( + update_time=timestamp, + state=const.SOLVING) + IastReplayQueue.objects.filter(id__in=failure_ids).update(update_time=timestamp, state=const.SOLVED) + + IastVulnerabilityModel.objects.filter(id__in=success_vul_ids).update(latest_time=timestamp, status_id=2) + IastVulnerabilityModel.objects.filter(id__in=failure_vul_ids).update(latest_time=timestamp, status_id=1) + logger.info(_('Reproduction request issued successfully')) + logger.debug([i['id'] for i in replay_requests]) + return replay_requests + except Exception as e: + logger.info( + _('Replay request query failed, reason: {}').format(e), + exc_info=True) + + return list() + + def save(self): + self.save_heartbeat() + + +def get_k8s_deployment_id(hostname: str) -> str: + return hostname[hostname.rindex('-')] + + +def addtional_agent_ids_query_deployway_and_path(deployway: str, path: str, + hostname: str, + language: str) -> QuerySet: + if deployway == 'k8s': + deployment_id = get_k8s_deployment_id(hostname) + logger.info(f'deployment_id : {deployment_id}') + server_q = Q(server__hostname__startswith=deployment_id) & Q( + server__path=path) & Q(server__path='') & ~Q(server__hostname='') + elif deployway == 'docker': + server_q = Q(server__path=path) & ~Q(server__path='') + else: + server_q = Q(server__path=str(path)) & Q(server__hostname=str( + hostname)) & ~Q(server__path='') & ~Q(server__hostname='') + final_q = server_q & Q(language=language) + return IastAgent.objects.filter(final_q).values_list('id', flat=True) + + +def addtional_agenti_ids_query_filepath_simhash(filepathsimhash: str, + language: str) -> QuerySet: + return IastAgent.objects.filter(filepathsimhash=filepathsimhash, + language=language).values_list('id', + flat=True) + + +def get_project_vul_validation_state(agent_id): + state = IastAgent.objects.filter(pk=agent_id).values_list( + 'bind_project__vul_validation', flat=True).first() + if state is None: + state = VulValidation.FOLLOW_GLOBAL + return state + + +def vul_recheck_state(agent_id): + project_level_validation = get_project_vul_validation_state(agent_id) + global_state = get_vul_validate() + if project_level_validation == VulValidation.FOLLOW_GLOBAL: + return global_state + elif project_level_validation == VulValidation.ENABLE: + return True + else: + return False diff --git a/dongtai_protocol/report/handler/narmal_vul_handler.py b/dongtai_protocol/report/handler/narmal_vul_handler.py new file mode 100644 index 000000000..76562e3ba --- /dev/null +++ b/dongtai_protocol/report/handler/narmal_vul_handler.py @@ -0,0 +1,194 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/4/27 下午2:48 +# project: dongtai-openapi + +import json +import logging +import random +import time +from dongtai_common.models.hook_type import HookType +from dongtai_common.models.strategy import IastStrategyModel +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.models.project import IastProject +from dongtai_common.utils import const + +from dongtai_conf import settings +from dongtai_protocol.report.handler.report_handler_interface import IReportHandler +from dongtai_protocol.report.report_handler_factory import ReportHandler +from dongtai_web.vul_log.vul_log import log_vul_found +from dongtai_common.models.agent import IastAgent + +logger = logging.getLogger('dongtai.openapi') + + +class BaseVulnHandler(IReportHandler): + def __init__(self): + super().__init__() + self.app_name = None + self.http_uri = None + self.http_url = None + self.http_query_string = None + self.http_header = None + self.http_method = None + self.http_scheme = None + self.http_secure = None + self.http_protocol = None + self.vuln_type = None + self.app_caller = None + self.taint_value = None + self.client_ip = None + + @staticmethod + def create_top_stack(obj): + stack = f'{obj["classname"]}.{obj["methodname"]}({obj["in"]})' + return stack + + @staticmethod + def create_bottom_stack(obj): + stack = f'{obj["classname"]}.{obj["methodname"]}("{obj["in"]}")' + return stack + + def get_vul_info(self): + level_id = 0 + vul_type = self.vuln_type + vul_type_enable = 'disable' + hook_type_id = 0 + strategy_id = 0 + # 根据用户ID判断获取策略中的漏洞等级 + hook_type = HookType.objects.values('id', 'enable').filter(value=vul_type).first() + if hook_type: + hook_type_id = hook_type.get('id', 0) + vul_type_enable = hook_type.get('enable', 0) + strategy = IastStrategyModel.objects.values('level_id','id').filter(hook_type_id=hook_type_id).first() + if strategy: + level_id = strategy.get('level_id', 4) + strategy_id = strategy.get('id',0) + + return level_id, vul_type, vul_type_enable, hook_type_id, strategy_id + + @staticmethod + def get_command(envs): + for env in envs: + if 'sun.java.command' in env.lower(): + return '='.join(env.split('=')[1:]) + return '' + + @staticmethod + def get_runtime(envs): + for env in envs: + if 'java.runtime.name' in env.lower(): + return '='.join(env.split('=')[1:]) + return '' + + def parse(self): + + self.server_name = self.detail.get('serverName') + self.server_port = self.detail.get('serverPort') + self.server_env = self.detail.get('serverEnv') + self.hostname = self.detail.get('hostname') + self.agent_version = self.detail.get('agentVersion') + self.app_name = self.detail.get('appName') + self.app_path = self.detail.get('contextPath') + self.http_uri = self.detail.get('uri') + self.http_url = self.detail.get('url') + self.http_query_string = self.detail.get('queryString') + self.http_header = self.detail.get('reqHeader') + self.http_req_data = self.detail.get('reqBody') + self.http_method = self.detail.get('method') + self.http_scheme = self.detail.get('scheme') + self.http_secure = self.detail.get('secure') + self.http_protocol = self.detail.get('protocol') + self.vuln_type = self.detail.get('vulnType') + self.app_caller = self.detail.get('appCaller') + self.taint_value = self.detail.get('taintValue') + self.taint_position = self.detail.get('taintPosition') + self.client_ip = self.detail.get('clientIp') + self.param_name = self.detail.get('paramName') + self.container = self.detail.get('container') + self.container_path = self.detail.get('containerPath') + self.http_replay = self.detail.get('replayRequest') + self.http_res_header = self.detail.get('resHeader') + self.http_res_body = self.detail.get('resBody') + + +@ReportHandler.register(const.REPORT_VULN_NORNAL) +class NormalVulnHandler(BaseVulnHandler): + + def save(self): + logger.info("NormalVulnHandler start") + logger.info( + f"vuln_type: {self.vuln_type} vuln_type: {self.http_uri} agent_id: {self.agent_id}" + ) + if self.http_replay: + return + + level_id, vul_type, vul_type_enable, hook_type_id, strategy_id = self.get_vul_info( + ) + logger.info("get_vul_info start") + logger.info( + f"{level_id} {vul_type} {vul_type_enable} {hook_type_id} {strategy_id}" + ) + if vul_type_enable == 0: + return + project_agents = IastAgent.objects.filter( + project_version_id=self.agent.project_version_id) + iast_vul = IastVulnerabilityModel.objects.filter( + strategy_id=strategy_id, + uri=self.http_uri, + http_method=self.http_method, + agent__in=project_agents).order_by('-latest_time').first() + project = IastProject.objects.filter( + pk=self.agent.bind_project_id).first() + if project: + project.update_latest() + timestamp = int(time.time()) + if iast_vul: + iast_vul.url = self.http_url + iast_vul.req_header = self.http_header + iast_vul.req_params = self.http_query_string + iast_vul.res_header = self.http_res_header + iast_vul.res_body = self.http_res_body + iast_vul.full_stack = json.dumps(self.app_caller) + iast_vul.top_stack = self.app_caller[1] + iast_vul.bottom_stack = self.app_caller[0] + iast_vul.counts = iast_vul.counts + 1 + iast_vul.latest_time = timestamp + iast_vul.status_id = settings.CONFIRMED + iast_vul.save() + else: + iast_vul = IastVulnerabilityModel.objects.create( + strategy_id=strategy_id, + hook_type_id=hook_type_id, + level_id=level_id, + url=self.http_url, + uri=self.http_uri, + http_method=self.http_method, + http_scheme=self.http_scheme, + http_protocol=self.http_protocol, + req_header=self.http_header, + req_params=self.http_query_string, + req_data=self.http_req_data, + res_header=self.http_res_header, + res_body=self.http_res_body, + agent=self.agent, + context_path=self.app_path, + counts=1, + status_id=settings.CONFIRMED, + first_time=timestamp, + latest_time=timestamp, + client_ip=self.client_ip, + full_stack=json.dumps(self.app_caller), + top_stack=self.app_caller[0], + bottom_stack=self.app_caller[-1]) + log_vul_found(iast_vul.agent.user_id, iast_vul.agent.bind_project.name, + iast_vul.agent.bind_project_id, iast_vul.id, + iast_vul.strategy.vul_name) + IastVulnerabilityModel.objects.filter( + strategy_id=strategy_id, + uri=self.http_uri, + http_method=self.http_method, + agent__in=project_agents, + pk__lt=iast_vul.id, + ).delete() diff --git a/apiserver/report/handler/over_power_handler.py b/dongtai_protocol/report/handler/over_power_handler.py similarity index 94% rename from apiserver/report/handler/over_power_handler.py rename to dongtai_protocol/report/handler/over_power_handler.py index f055c8d5c..355ccf4aa 100644 --- a/apiserver/report/handler/over_power_handler.py +++ b/dongtai_protocol/report/handler/over_power_handler.py @@ -8,13 +8,16 @@ import json import time -from dongtai_models.models.iast_overpower_user import IastOverpowerUserAuth -from dongtai_models.models.iast_vul_overpower import IastVulOverpower -from dongtai_models.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.models.iast_overpower_user import IastOverpowerUserAuth +from dongtai_common.models.iast_vul_overpower import IastVulOverpower +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.utils import const -from apiserver.report.handler.report_handler_interface import IReportHandler +from dongtai_protocol.report.handler.report_handler_interface import IReportHandler +from dongtai_protocol.report.report_handler_factory import ReportHandler +@ReportHandler.register(const.REPORT_VULN_OVER_POWER) class OverPowerHandler(IReportHandler): def parse(self): @@ -50,7 +53,6 @@ def parse(self): self.x_trace_id = self.detail.get('x-trace-id') self.cookie = self.detail.get('cookie') self.sql = self.detail.get('sql') - self.language = self.detail.get('language') def save(self): # 检查trace_id是否存于数据库中 @@ -117,7 +119,6 @@ def save(self): server_name=self.server_name, counts=1, status='已上报', - language=self.language, first_time=int(time.time()), latest_time=int(time.time()) ).save() diff --git a/dongtai_protocol/report/handler/report_handler_interface.py b/dongtai_protocol/report/handler/report_handler_interface.py new file mode 100644 index 000000000..119c7cc2e --- /dev/null +++ b/dongtai_protocol/report/handler/report_handler_interface.py @@ -0,0 +1,102 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/10/30 10:31 +# software: PyCharm +# project: webapi +import logging + +from django.db.models import Q +from dongtai_common.models.agent import IastAgent +from django.utils.translation import gettext_lazy as _ + +logger = logging.getLogger('dongtai.openapi') + + +class IReportHandler: + def __init__(self): + self._report = None + self._detail = None + self._user_id = None + self.agent_id = None + self.project_name = None + self.agent = None + + @property + def report(self): + return self._report + + @report.setter + def report(self, reports): + self._report = reports + + @property + def detail(self): + return self._detail + + @detail.setter + def detail(self, detail): + self._detail = detail + + @property + def user_id(self): + return self._user_id + + @user_id.setter + def user_id(self, user_id): + self._user_id = user_id + + def common_header(self): + self.detail = self.report.get('detail') + self.agent_id = self.detail.get('agentId') + + def has_permission(self): + self.agent = self.get_agent(agent_id=self.agent_id) + logger.info( + f"report_type : {self.report.get('type',0)} agent_id: {self.agent_id} has_permission: {'YES' if self.agent else 'No'}" + ) + return self.agent + + def parse(self): + pass + + def save(self): + pass + + def get_result(self, msg=None): + return msg if msg else '' + + def handle(self, report, user): + logger.info(_('[{}] Report resolution start').format(self.__class__.__name__)) + self.report = report + # print(self._user_id) + self.user_id = user + self.common_header() + if self.has_permission(): + self.parse() + self.save() + logger.info( + _('[{classname}] Report Analysis Completed').format( + classname=self.__class__.__name__)) + return self.get_result() + else: + logger.info( + _( + '[{classname}] report resolution failed, Agent does not exist or no right to access, report data: {report}'). + format(classname=self.__class__.__name__, report=self.report)) + return 'no permission' + + def get_project_agents(self, agent): + if agent.bind_project_id != 0: + agents = IastAgent.objects.filter( + Q(project_name=self.project_name) + | Q(bind_project_id=agent.bind_project_id), + online=1, + user=self.user_id, + project_version_id=agent.project_version_id) + else: + agents = IastAgent.objects.filter(project_name=agent.project_name, user=self.user_id) + return agents + + def get_agent(self, agent_id): + return IastAgent.objects.filter(id=agent_id, online=1, user=self.user_id).first() diff --git a/dongtai_protocol/report/handler/saas_method_pool_handler.py b/dongtai_protocol/report/handler/saas_method_pool_handler.py new file mode 100644 index 000000000..5d9f82dd9 --- /dev/null +++ b/dongtai_protocol/report/handler/saas_method_pool_handler.py @@ -0,0 +1,518 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2021/1/5 下午12:36 +# software: PyCharm +# project: lingzhi-webapi +import json +import logging +import random +import time +import uuid +from hashlib import sha256,sha1 + +import requests +from django.db import transaction + +from dongtai_common.models.agent_method_pool import MethodPool +from dongtai_common.models.replay_method_pool import IastAgentMethodPoolReplay +from dongtai_common.models.replay_queue import IastReplayQueue +from dongtai_common.utils import const +from dongtai_common.models.res_header import ( + ProjectSaasMethodPoolHeader, + HeaderType, +) +from dongtai_engine.tasks import search_vul_from_method_pool, search_vul_from_replay_method_pool +from dongtai_conf import settings +from dongtai_protocol import utils +from dongtai_protocol.report.handler.report_handler_interface import IReportHandler +from dongtai_protocol.report.report_handler_factory import ReportHandler +import gzip +import base64 +from typing import Tuple +from django.core.cache import cache +logger = logging.getLogger('dongtai.openapi') + + +@ReportHandler.register(const.REPORT_VULN_SAAS_POOL) +class SaasMethodPoolHandler(IReportHandler): + def __init__(self): + super(SaasMethodPoolHandler, self).__init__() + self.async_send = settings.config.getboolean('task', 'async_send', fallback=False) + self.async_send_delay = settings.config.getint('task', 'async_send_delay', fallback=2) + self.retryable = settings.config.getboolean('task', 'retryable', fallback=False) + + if self.async_send and (ReportHandler.log_service_disabled or ReportHandler.log_service is None): + logger.warning('log service disabled or failed to connect, disable async send method pool') + self.async_send = False + else: + self.log_service = ReportHandler.log_service + + @staticmethod + def parse_headers(headers_raw): + headers = dict() + header_raw = base64.b64decode(headers_raw).decode('utf-8').split('\n') + item_length = len(header_raw) + for index in range(item_length): + _header_list = header_raw[index].split(':') + _header_name = _header_list[0] + headers[_header_name] = ':'.join(_header_list[1:]) + return headers + + def parse(self): + self.version = self.report.get('version', 'v1') + self.http_uri = self.detail.get('uri') + self.http_url = self.detail.get('url') + self.http_query_string = self.detail.get('queryString') + self.http_req_data = self.detail.get('reqBody') + self.http_req_header = self.detail.get('reqHeader') + self.http_method = self.detail.get('method') + self.http_scheme = self.detail.get('scheme') + self.http_secure = self.detail.get('secure') + self.http_protocol = self.detail.get('protocol') + self.http_replay = self.detail.get('replayRequest') + self.http_res_header = self.detail.get('resHeader') + self.http_res_body = self.detail.get('resBody') + self.context_path = self.detail.get('contextPath') + self.client_ip = self.detail.get('clientIp') + self.method_pool = self.report.get('detail', {}).get('pool', None) + if self.method_pool: + self.method_pool = sorted(self.method_pool, + key=lambda e: e.__getitem__('invokeId'), + reverse=True) + logger.info( + f"start record method_pool : {self.agent_id} {self.http_uri} {self.http_method}" + ) + + def save(self): + """ + 如果agent存在,保存数据 + :return: + """ + headers = SaasMethodPoolHandler.parse_headers(self.http_req_header) + save_project_header(headers.keys(), self.agent_id) + add_new_api_route(self.agent_id, self.http_uri, self.http_method) + import base64 + params_dict = get_params_dict(base64.b64decode(self.http_req_header), + self.http_req_data, + self.http_query_string) + update_api_route_deatil(self.agent_id, self.http_uri, self.http_method, + params_dict) + if self.http_replay: + # 保存数据至重放请求池 + replay_id = headers.get('dongtai-replay-id') + replay_type = headers.get('dongtai-replay-type') + relation_id = headers.get('dongtai-relation-id') + timestamp = int(time.time()) + + # fixme 直接查询replay_id是否存在,如果存在,直接覆盖 + query_set = IastAgentMethodPoolReplay.objects.values("id").filter( + replay_id=replay_id) + if query_set.exists(): + # 更新 + replay_model = query_set.first() + replay_model.update( + url=self.http_url, + uri=self.http_uri, + req_header=self.http_req_header, + req_params=self.http_query_string, + req_data=self.http_req_data, + res_header=self.http_res_header, + res_body=decode_content( + get_res_body(self.http_res_body, self.version), + get_content_encoding(self.http_res_header), + self.version), + context_path=self.context_path, + method_pool=json.dumps(self.method_pool), + clent_ip=self.client_ip, + update_time=timestamp) + method_pool_id = replay_model['id'] + else: + # 新增 + replay_model = IastAgentMethodPoolReplay.objects.create( + agent=self.agent, + url=self.http_url, + uri=self.http_uri, + http_method=self.http_method, + http_scheme=self.http_scheme, + http_protocol=self.http_protocol, + req_header=self.http_req_header, + req_params=self.http_query_string, + req_data=self.http_req_data, + res_header=self.http_res_header, + res_body=decode_content( + get_res_body(self.http_res_body, self.version), + get_content_encoding(self.http_res_header), + self.version), + context_path=self.context_path, + method_pool=json.dumps(self.method_pool), + clent_ip=self.client_ip, + replay_id=replay_id, + replay_type=replay_type, + relation_id=relation_id, + create_time=timestamp, + update_time=timestamp) + method_pool_id = replay_model.id + IastReplayQueue.objects.filter(id=replay_id).update( + state=const.SOLVED) + if method_pool_id: + logger.info(f"send replay method pool {self.agent_id} {self.http_uri} {method_pool_id} to celery ") + self.send_to_engine(method_pool_id=method_pool_id, + model='replay') + else: + pool_sign = uuid.uuid4().hex + if self.async_send: + try: + method_pool = self.to_json(pool_sign) + ok = self.log_service.send(method_pool) + if ok: + self.send_to_engine(method_pool_sign=pool_sign) + except Exception as e: + logger.warning(e, exc_info=True) + else: + current_version_agents = self.get_project_agents(self.agent) + with transaction.atomic(): + try: + update_record, method_pool = self.save_method_call( + pool_sign, current_version_agents) + except Exception as e: + logger.info( + f"record method failed : {self.agent_id} {self.http_uri} {self.http_method}" + ) + logger.warning(e, exc_info=True) + try: + logger.info(f"send normal method pool {self.agent_id} {self.http_uri} {pool_sign} to celery ") + self.send_to_engine(method_pool_sign=pool_sign, + update_record=update_record) + except Exception as e: + logger.warning(e, exc_info=True) + + def to_json(self, pool_sign: str): + timestamp = int(time.time()) + pool = { + 'agent_id': self.agent_id, + 'url': self.http_url, + 'uri': self.http_uri, + 'http_method': self.http_method, + 'http_scheme': self.http_scheme, + 'http_protocol': self.http_protocol, + 'req_header': self.http_req_header, + 'req_params': self.http_query_string, + 'req_data': self.http_req_data, + 'req_header_for_search': utils.build_request_header(req_method=self.http_method, + raw_req_header=self.http_req_header, + uri=self.http_uri, + query_params=self.http_query_string, + http_protocol=self.http_protocol), + 'res_header': utils.base64_decode(self.http_res_header), + 'res_body': decode_content(get_res_body(self.http_res_body, self.version), + get_content_encoding(self.http_res_header), self.version), + 'context_path': self.context_path, + 'method_pool': json.dumps(self.method_pool), + 'pool_sign': pool_sign, + 'clent_ip': self.client_ip, + 'create_time': timestamp, + 'update_time': timestamp, + 'uri_sha1': self.sha1(self.http_uri), + 'user_id': self.agent.user_id, + 'bind_project_id': self.agent.bind_project_id, + 'project_version_id': self.agent.project_version_id, + 'language': self.agent.language, + 'agent_id': self.agent.id, + } + return json.dumps(pool) + + def save_method_call(self, pool_sign: str, + current_version_agents) -> Tuple[bool, MethodPool]: + """ + 保存方法池数据 + :param pool_sign: + :param current_version_agents: + :return: + """ + # todo need to del + # pool_sign = random.sample('zyxwvutsrqmlkjihgfedcba',5) + method_pool = MethodPool.objects.filter( + pool_sign=pool_sign, agent__in=current_version_agents).first() + update_record = True + if method_pool: + method_pool.update_time = int(time.time()) + method_pool.method_pool = json.dumps(self.method_pool) + method_pool.uri = self.http_uri + method_pool.url = self.http_url + method_pool.http_method = self.http_method + method_pool.req_header = self.http_req_header + method_pool.req_params = self.http_query_string + method_pool.req_data = self.http_req_data + method_pool.req_header_fs = utils.build_request_header( + req_method=self.http_method, + raw_req_header=self.http_req_header, + uri=self.http_uri, + query_params=self.http_query_string, + http_protocol=self.http_protocol) + method_pool.res_header = utils.base64_decode(self.http_res_header) + method_pool.res_body = decode_content( + get_res_body(self.http_res_body, self.version), + get_content_encoding(self.http_res_header), self.version) + method_pool.uri_sha1 = self.sha1(self.http_uri) + method_pool.save(update_fields=[ + 'update_time', + 'method_pool', + 'uri', + 'url', + 'http_method', + 'req_header', + 'req_params', + 'req_data', + 'req_header_fs', + 'res_header', + 'res_body', + 'uri_sha1', + ]) + else: + # 获取agent + update_record = False + timestamp = int(time.time()) + method_pool = MethodPool.objects.create( + agent=self.agent, + url=self.http_url, + uri=self.http_uri, + http_method=self.http_method, + http_scheme=self.http_scheme, + http_protocol=self.http_protocol, + req_header=self.http_req_header, + req_params=self.http_query_string, + req_data=self.http_req_data, + req_header_fs=utils.build_request_header( + req_method=self.http_method, + raw_req_header=self.http_req_header, + uri=self.http_uri, + query_params=self.http_query_string, + http_protocol=self.http_protocol), + res_header=utils.base64_decode(self.http_res_header), + res_body = decode_content( + get_res_body(self.http_res_body, self.version), + get_content_encoding(self.http_res_header),self.version), + context_path=self.context_path, + method_pool=json.dumps(self.method_pool), + pool_sign=pool_sign, + clent_ip=self.client_ip, + create_time=timestamp, + update_time=timestamp, + uri_sha1=self.sha1(self.http_uri), + ) + return update_record, method_pool + + def send_to_engine(self, method_pool_id="", method_pool_sign="", update_record=False, model=None): + try: + if model is None: + logger.info( + f'[+] send method_pool [{method_pool_sign}] to engine for {"update" if update_record else "new record"}') + delay = 0 + if self.async_send: + delay = self.async_send_delay + kwargs = { + 'method_pool_sign': method_pool_sign, + 'agent_id': self.agent_id, + 'retryable': self.retryable, + } + res = search_vul_from_method_pool.apply_async(kwargs=kwargs, countdown=delay) + logger.info( + f'[+] send method_pool [{method_pool_sign}] to engine for task search_vul_from_method_pool id: {res.task_id}') + else: + logger.info( + f'[+] send method_pool [{method_pool_id}] to engine for {model if model else ""}' + ) + res = search_vul_from_replay_method_pool.delay(method_pool_id) + logger.info( + f'[+] send method_pool [{method_pool_id}] to engine for task search_vul_from_replay_method_pool id: {res.task_id}' + ) + #requests.get(url=settings.REPLAY_ENGINE_URL.format(id=method_pool_id)) + except Exception as e: + logger.warning(f'[-] Failure: send method_pool [{method_pool_id}{method_pool_sign}], Error: {e}') + + def calc_hash(self): + sign_raw = '-'.join( + filter(lambda x: x, [ + getattr(self, i, '') + for i in ('http_uri', 'http_method', 'http_req_header', + 'http_req_params', 'http_req_data') + ])) + for method in self.method_pool: + sign_raw += f"{method.get('className')}.{method.get('methodName')}()->" + sign_sha256 = self.sha256(sign_raw) + return sign_sha256 + + @staticmethod + def sha1(raw): + h = sha1() + h.update(raw.encode('utf-8')) + return h.hexdigest() + + @staticmethod + def sha256(raw): + h = sha256() + h.update(raw.encode('utf-8')) + return h.hexdigest() + + +from dongtai_common.models.api_route import (IastApiRoute, IastApiMethod, + FromWhereChoices) +from django.db.utils import IntegrityError + + +def save_project_header(keys: list, agent_id: int): + uuid_key = uuid.uuid4().hex + keys = list( + filter( + lambda key: uuid_key == cache.get_or_set( + f'project_header-{agent_id}-{key}', uuid_key, 60 * 5), keys)) + objs = [ + ProjectSaasMethodPoolHeader(key=key, + agent_id=agent_id, + header_type=HeaderType.REQUEST) + for key in keys + ] + if not keys: + return + ProjectSaasMethodPoolHeader.objects.bulk_create(objs, + ignore_conflicts=True) + + +def add_new_api_route(agent_id, path, method): + logger.info(f"{agent_id}, {path}, {method}") + uuid_key = uuid.uuid4().hex + is_api_cached = uuid_key != cache.get_or_set( + f'api_route-{agent_id}-{path}-{method}', uuid_key, 60 * 5) + if is_api_cached: + logger.info( + f"found cache api_route-{agent_id}-{path}-{method} ,skip its insert" + ) + return + try: + api_method, is_create = IastApiMethod.objects.get_or_create( + method=method.upper()) + api_route, is_create = IastApiRoute.objects.get_or_create( + from_where=FromWhereChoices.FROM_METHOD_POOL, + method_id=api_method.id, + path=path, + agent_id=agent_id) + + except IntegrityError as e: + logger.info(e) + + +from django.http.request import QueryDict + + +def get_params_dict(req_header, req_body, req_params): + try: + from dongtai_engine.filters.utils import parse_headers_dict_from_bytes + res = parse_headers_dict_from_bytes(req_header) + req_header_keys = list( + filter(lambda x: x.upper() == 'cookie', res.keys())) + except BaseException: + req_header_keys = [] + try: + from http.cookies import SimpleCookie + cookie = SimpleCookie() + cookie.load(res['cookie']) + cookie_keys = list(cookie.keys()) + except BaseException: + cookie_keys = [] + try: + body_keys = list(json.loads(req_body).keys()) + except BaseException: + body_keys = [] + try: + query_keys = list(QueryDict(req_params).keys()) + except BaseException: + query_keys = [] + return { + 'header': req_header_keys, + "cookie": cookie_keys, + "jsonbody": body_keys, + "query": query_keys + } + + +def update_api_route_deatil(agent_id, path, method, params_dict): + annotation_dict = { + 'query': 'GET请求参数', + 'cookie': 'Cookie参数', + 'header': 'Header参数', + 'jsonbody': 'POST的json参数' + } + api_method, is_create = IastApiMethod.objects.get_or_create( + method=method.upper()) + api_route = IastApiRoute.objects.filter(agent_id=agent_id, + path=path, + method_id=api_method.id).first() + for key, value in params_dict.items(): + annotation = annotation_dict[key] + for param_name in value: + single_insert(api_route.id, param_name, annotation) + + +from dongtai_common.models.api_route import IastApiParameter + + + +def single_insert(api_route_id, param_name, annotation) -> None: + logger.info(f"{api_route_id}, {param_name}, {annotation}") + uuid_key = uuid.uuid4().hex + is_api_cached = uuid_key != cache.get_or_set( + f'api_route_param-{api_route_id}-{param_name}', uuid_key, 60 * 5) + if is_api_cached: + logger.info( + f"found cache api_route_param-{api_route_id}-{param_name}-{annotation} ,skip its insert" + ) + return + try: + param, _ = IastApiParameter.objects.get_or_create( + route_id=api_route_id, + name=param_name, + defaults={'annotation': annotation}) + except IntegrityError as e: + logger.info(e) + +def decode_content(body, content_encoding, version): + if version == 'v1': + return body + if content_encoding == 'gzip': + try: + return gzip.decompress(body).decode('utf-8') + except BaseException: + logger.warning('not gzip type but using gzip as content_encoding') + # TODO not content_encoding + if content_encoding: + logger.info('not found content_encoding :{}'.format(content_encoding)) + try: + return body.decode('utf-8') + except BaseException: + logger.info('decode_content, {}'.format(body)) + logger.info('utf-8 decode failed, use raw ') + return body.decode('raw_unicode_escape') + + +def get_content_encoding(b64_res_headers): + res_headers = utils.base64_decode(b64_res_headers) + for header in res_headers.split('\n'): + try: + k, v = [i.strip().lower() for i in header.split(':')] + if k == "content-encoding": + if 'gzip' in v: + return 'gzip' + break + except BaseException: + pass + return '' + + +def get_res_body(res_body, version): + if version == 'v1': + return res_body # bytes + elif version == 'v2': + return base64.b64decode(res_body) # bytes + logger.info('no match version now version: {}'.format(version)) + return res_body diff --git a/dongtai_protocol/report/handler/sca_handler.py b/dongtai_protocol/report/handler/sca_handler.py new file mode 100644 index 000000000..7a27ab0cc --- /dev/null +++ b/dongtai_protocol/report/handler/sca_handler.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/10/23 11:55 +# software: PyCharm +# project: webapi +import json +import logging + +from dongtai_common.utils import const +from django.utils.translation import gettext_lazy as _ +from dongtai_web.dongtai_sca.scan.utils import update_one_sca +from dongtai_protocol.report.handler.report_handler_interface import IReportHandler +from dongtai_protocol.report.report_handler_factory import ReportHandler +import requests +from dongtai_conf import settings + +logger = logging.getLogger('dongtai.openapi') + + +@ReportHandler.register(const.REPORT_SCA) +class ScaHandler(IReportHandler): + + def parse(self): + self.package_path = self.detail.get('packagePath') + self.package_signature = self.detail.get('packageSignature') + self.package_name = self.detail.get('packageName') + self.package_algorithm = self.detail.get('packageAlgorithm') + self.package_version = self.detail.get('packageVersion', '') + + @staticmethod + def send_to_engine(agent_id, package_path, package_signature, package_name, package_algorithm, package_version): + try: + + logger.info( + f'[+] 处理SCA请求[{agent_id}, {package_path}, {package_signature}, {package_name}, {package_algorithm} {package_version}]正在下发扫描任务') + update_one_sca.delay(agent_id, package_path, package_signature, package_name, package_algorithm, package_version) + logger.info( + f'[+] 处理SCA请求[{agent_id}, {package_path}, {package_signature}, {package_name}, {package_algorithm} {package_version}]任务下发完成') + except Exception as e: + logger.info( + f'[-] Failure: sca package [{agent_id} {package_path} {package_signature} {package_name} {package_algorithm} {package_version}], Error: {e}') + + def save(self): + if all([self.agent_id, self.package_path, self.package_name]) is False: + logger.warning(_("Data is incomplete, data: {}").format(json.dumps(self.report))) + else: + # post to dongtai engine async deal + ScaHandler.send_to_engine(self.agent_id, self.package_path, self.package_signature, self.package_name, + self.package_algorithm, self.package_version) + + +@ReportHandler.register(const.REPORT_SCA + 1) +class ScaBulkHandler(ScaHandler): + def parse(self): + self.packages = self.detail.get('packages') + self.package_path = self.detail.get('packagePath') + self.package_signature = self.detail.get('packageSignature') + self.package_name = self.detail.get('packageName') + self.package_algorithm = self.detail.get('packageAlgorithm') + self.package_version = self.detail.get('packageVersion', '') + + def save(self): + for package in self.packages: + self.package_path = package.get('packagePath', None) + self.package_signature = package.get('packageSignature', None) + self.package_name = package.get('packageName', None) + self.package_algorithm = package.get('packageAlgorithm', None) + self.package_version = package.get('packageVersion', '') + super().save() diff --git a/dongtai_protocol/report/handler/utils.py b/dongtai_protocol/report/handler/utils.py new file mode 100644 index 000000000..139597f9c --- /dev/null +++ b/dongtai_protocol/report/handler/utils.py @@ -0,0 +1,2 @@ + + diff --git a/dongtai_protocol/report/log_service.py b/dongtai_protocol/report/log_service.py new file mode 100644 index 000000000..e0d52709f --- /dev/null +++ b/dongtai_protocol/report/log_service.py @@ -0,0 +1,48 @@ +import logging +import socket + +logger = logging.getLogger('dongtai.openapi') + + +class LogService: + def __init__(self, host, port): + super(LogService, self).__init__() + self.host = host + self.port = port + self.socket = None + + def create_socket(self): + if self.socket: + return + + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.settimeout(5) + try: + sock.connect((self.host, self.port)) + sock.setblocking(False) + self.socket = sock + return True + except OSError: + logger.error(f'failed to connect log service {self.host}:{self.port}') + self.socket = None + sock.close() + return False + + def __del__(self): + if self.socket: + self.socket.close() + self.socket = None + + def send(self, message): + try: + if not self.socket: + self.create_socket() + if self.socket: + self.socket.sendall(bytes(message + "\n", encoding='utf-8'), socket.MSG_DONTWAIT) + return True + except Exception as e: + logger.error('failed to send message to log service', exc_info=e) + if self.socket: + self.socket.close() + self.socket = None + return False diff --git a/dongtai_protocol/report/report_handler_factory.py b/dongtai_protocol/report/report_handler_factory.py new file mode 100644 index 000000000..849c43501 --- /dev/null +++ b/dongtai_protocol/report/report_handler_factory.py @@ -0,0 +1,98 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/10/23 12:00 +# software: PyCharm +# project: webapi +import logging, requests, json, time +from django.utils.translation import gettext_lazy as _ +from dongtai_conf import settings +from dongtai_protocol.report.log_service import LogService +from dongtai_common.models.agent import IastAgent + +logger = logging.getLogger('dongtai.openapi') +jsonlogger = logging.getLogger('jsonlogger') + +class ReportHandler: + HANDLERS = {} + log_service = None + log_service_disabled = False + + # 注册handler到当前命名空间,后续进行异步处理数据 + @staticmethod + def handler(reports, user): + """ + 处理上传的报告,如果报告的类型不存在,则忽略本次上传; + 检查用户与agent的权限 + :param reports: + :return: + """ + try: + report_type = reports.get('type') + # 根据消息类型,转发上报到指定地址 + if report_type == 1: + isCoreInstalled = reports.get("detail",{}).get("isCoreInstalled", 0) + isCoreRunning = reports.get("detail",{}).get("isCoreRunning", 0) + agentId = reports.get("detail",{}).get("agentId", 0) + # is_core_running 0 未运行,1运行中,2已卸载 + if isCoreInstalled == 0: + is_core_running = 2 + IastAgent.objects.filter( + user=user, + id=agentId).update(actual_running_status=2) + else: + if isCoreRunning == 1: + is_core_running = 1 + IastAgent.objects.filter( + user=user, + id=agentId).update(actual_running_status=1) + else: + is_core_running = 0 + IastAgent.objects.filter( + user=user, + id=agentId).update(actual_running_status=2) + + IastAgent.objects.filter(user=user,id=agentId).update(is_core_running=is_core_running) + # web hook + # req = requests.post( + # settings.AGENT_ENGINE_URL.format(user_id=user.id, report_type=report_type), + # json=reports, + # timeout=60) + class_of_handler = ReportHandler.HANDLERS.get(report_type) + if class_of_handler is None: + if report_type in [1, 81, 33, 36, 17, 18, 97, 37]: + logger.error(_('Report type {} handler does not exist').format(report_type)) + return None + #if report_type == 36: + # jsonlogger.error('report', extra=reports) + result = class_of_handler().handle(reports, user) + return result + except Exception as e: + logger.error(e, exc_info=True) + return None + + @classmethod + def register(cls, handler_name): + + def wrapper(handler): + async_send = settings.config.getboolean('task', 'async_send', fallback=False) + if not async_send: + cls.log_service_disabled = True + if cls.log_service is None and not cls.log_service_disabled: + host = settings.config.get('log_service', 'host') + port = settings.config.getint('log_service', 'port') + if not host or not port: + logger.error('log service must config host and post') + cls.log_service_disabled = True + srv = LogService(host, port) + if srv.create_socket(): + cls.log_service = srv + + logger.info( + _('Registration report type {} handler {}').format( + handler_name, handler.__name__)) + if handler_name not in cls.HANDLERS: + cls.HANDLERS[handler_name] = handler + return handler + + return wrapper diff --git a/apiserver/serializers/__init__.py b/dongtai_protocol/serializers/__init__.py similarity index 100% rename from apiserver/serializers/__init__.py rename to dongtai_protocol/serializers/__init__.py diff --git a/apiserver/serializers/agent_properties.py b/dongtai_protocol/serializers/agent_properties.py similarity index 84% rename from apiserver/serializers/agent_properties.py rename to dongtai_protocol/serializers/agent_properties.py index 662710386..69b7275bc 100644 --- a/apiserver/serializers/agent_properties.py +++ b/dongtai_protocol/serializers/agent_properties.py @@ -4,7 +4,7 @@ # datetime:2021/1/14 下午2:59 # software: PyCharm # project: lingzhi-agent-server -from dongtai_models.models.agent_properties import IastAgentProperties +from dongtai_common.models.agent_properties import IastAgentProperties from rest_framework import serializers diff --git a/dongtai_protocol/tests.py b/dongtai_protocol/tests.py new file mode 100644 index 000000000..591bf14e7 --- /dev/null +++ b/dongtai_protocol/tests.py @@ -0,0 +1,110 @@ +from test.apiserver.test_agent_base import AgentTestCase,gzipdata +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.agent_method_pool import MethodPool +import gzip +import base64 +from dongtai_protocol.report.report_handler_factory import ReportHandler +import json +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from result import Ok, Err, Result +import unittest + +class AgentMethodPoolUploadTestCase(AgentTestCase): + + def test_benchmark_agent_method_pool_upload(self): + data = [] + res = download_if_not_exist( + "https://huoqi-public.oss-cn-beijing.aliyuncs.com/iast/test_data/server.log", + "/tmp/test_apiserver_server.log") + with open('/tmp/test_apiserver_server.log') as f: + for line in f: + data.append(json.loads(line)) + for report in data: + report["detail"]["agentId"] = self.agent_id + del report["message"] + res = ReportHandler.handler(report, self.user) + assert res == "" + assert MethodPool.objects.filter( + url=report['detail']["url"]).exists() + assert MethodPool.objects.filter(agent_id=self.agent_id).count() == len(data) + + +import requests +from os.path import exists + + +def download_file(url, filepath): + with requests.get(url, stream=True) as r: + r.raise_for_status() + with open(filepath, 'wb') as f: + for chunk in r.iter_content(chunk_size=8192): + f.write(chunk) + return Ok() + + +def download_if_not_exist(url: str, path: str) -> Result: + if exists(path): + return Ok() + res = download_file(url, path) + return res + + +from dongtai_protocol.report.handler.heartbeat_handler import ( + addtional_agent_ids_query_deployway_and_path, + addtional_agenti_ids_query_filepath_simhash) +from dongtai_common.models.replay_queue import IastReplayQueue +from dongtai_common.utils import const +from dongtai_protocol.report.handler.heartbeat_handler import HeartBeatHandler + +class AgentHeartBeatTestCase(AgentTestCase): + + def test_agent_replay_queryset(self): + self.agent = IastAgent.objects.filter(pk=self.agent_id).first() + project_agents = IastAgent.objects.values_list('id', flat=True).filter( + bind_project_id=self.agent.bind_project_id, + language=self.agent.language).union( + addtional_agenti_ids_query_filepath_simhash( + self.agent.filepathsimhash, language=self.agent.language), + addtional_agent_ids_query_deployway_and_path( + self.agent.servicetype, + self.agent.server.path, + self.agent.server.hostname, + language=self.agent.language)) + replay_queryset = IastReplayQueue.objects.values( + 'id', 'relation_id', 'uri', 'method', 'scheme', 'header', + 'params', 'body', 'replay_type').filter( + agent_id__in=project_agents, + state__in=[const.WAITING, const.SOLVING])[:200] + + def test_agent_replay_queryset_result(self): + self.agent = IastAgent.objects.filter(pk=self.agent_id).first() + handler = HeartBeatHandler() + handler.agent = self.agent + handler.anget_id = self.agent_id + handler.return_queue = 1 + res1 = handler.get_result() + res2 = handler.get_result() + res3 = handler.get_result() + set1, set2, set3 = map(get_replay_id_set, [res1, res2, res3]) + assert set3.intersection(set1) == set([]) + + +def get_replay_id_set(replay_list: list) -> set: + return set([i['id'] for i in replay_list]) + + +import base64 + + +@unittest.skip("waiting for rebuild mock data") +class AgentSaasMethodPoolParseApiTestCase(AgentTestCase): + def test_api_parse(self): + mp = MethodPool.objects.filter(pk=500483715).first() + mp.req_header + headers_bytes = base64.b64decode(mp.req_header) + from dongtai_engine.filters.utils import parse_headers_dict_from_bytes + res = parse_headers_dict_from_bytes(headers_bytes) + from http.cookies import SimpleCookie + cookie = SimpleCookie() + cookie.load(res['cookie']) + print(cookie.keys()) diff --git a/dongtai_protocol/urls.py b/dongtai_protocol/urls.py new file mode 100644 index 000000000..82ec79c9a --- /dev/null +++ b/dongtai_protocol/urls.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2021/1/12 下午6:59 +# software: PyCharm +# project: lingzhi-agent-server + +# 报告接口:上传报告 +from django.urls import include, path + +from dongtai_protocol.views.agent_download import AgentDownload +from dongtai_protocol.views.agent_register import AgentRegisterEndPoint +from dongtai_protocol.views.agent_update import AgentUpdateEndPoint +from dongtai_protocol.views.engine_auto_deploy import AutoDeployEndPoint +from dongtai_protocol.views.engine_download import EngineDownloadEndPoint +from dongtai_protocol.views.engine_heartbeat import EngineHeartBeatEndPoint +from dongtai_protocol.views.engine_status import EngineUpdateEndPoint +from dongtai_protocol.views.engine_status import EngineAction +from dongtai_protocol.views.hook_profiles import HookProfilesEndPoint +from dongtai_protocol.views.properties import PropertiesEndPoint +from dongtai_protocol.views.report_upload import ReportUploadEndPoint +from dongtai_protocol.views.health import HealthView +from dongtai_protocol.views.health_oss import OSSHealthView +from dongtai_protocol.views.agent_limit import LimitView +from dongtai_protocol.views.startuptime import (StartupTimeEndPoint, + StartupTimeGzipEndPoint) +from dongtai_protocol.views.agent_config import ( + AgentConfigView, + AgentConfigv2View, +) +from dongtai_protocol.views.except_action import AgentActionV2EndPoint + +urlpatterns = [ + path('agent/download', AgentDownload.as_view()), + path('agent/limit', LimitView.as_view()), + path('agent/startuptime', StartupTimeEndPoint.as_view()), + path('agent/gzipstartuptime', StartupTimeGzipEndPoint.as_view()), + # agent get destroy strategy + path('agent/threshold', AgentConfigView.as_view()), + path('agent/thresholdv2', AgentConfigv2View.as_view()), + path('deploy/auto', AutoDeployEndPoint.as_view()), + path('engine/heartbeat', EngineHeartBeatEndPoint.as_view()), + path('engine/download', EngineDownloadEndPoint.as_view()), + path('agent/register', AgentRegisterEndPoint.as_view()), + path('agent/update', AgentUpdateEndPoint.as_view()), + path('engine/update', EngineUpdateEndPoint.as_view()), + path('engine/update/', EngineUpdateEndPoint.as_view()), + path('profiles', HookProfilesEndPoint.as_view()), + path('properties', PropertiesEndPoint.as_view()), + path('report/upload', ReportUploadEndPoint.as_view()), + path('engine/action', EngineAction.as_view()), + # todo 增加重放请求获取接口,用于后续逻辑漏洞/漏洞验证等功能,暂时先不实现 + path('health', HealthView.as_view()), + path('oss/health', OSSHealthView.as_view()), + path('except_action', + AgentActionV2EndPoint.as_view({'get': 'except_running_status'})), + path('actual_action', + AgentActionV2EndPoint.as_view({'post': 'actual_running_status'})), +] + +urlpatterns = [path('api/v1/', include(urlpatterns), name='OpenAPI'), ] diff --git a/dongtai_protocol/utils.py b/dongtai_protocol/utils.py new file mode 100644 index 000000000..73ab60258 --- /dev/null +++ b/dongtai_protocol/utils.py @@ -0,0 +1,134 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/6/1 上午9:53 +# project: dongtai-openapi + +# -*- coding: utf-8 -*- +import base64 +import logging + +import oss2 +from oss2.exceptions import NoSuchKey +from oss2.exceptions import RequestError + +from dongtai_conf import settings + +logger = logging.getLogger('dongtai.openapi') + + +class OssDownloader(object): + BUCKET_URL = 'https://oss-cn-beijing.aliyuncs.com' + BUCKET_NAME = 'dongtai' + + @staticmethod + def download_file_to_path(bucket_url, + bucket_name, + object_name, + local_file, + access_key='', + access_key_secret='', + anonymous=True): + """ + + :param access_key: + :param access_key_secret: + :param bucket_url: + :param bucket_name: + :param object_name: + :param local_file: + :return: + """ + try: + if anonymous: + auth = oss2.AnonymousAuth() + else: + auth = oss2.Auth(access_key, access_key_secret) + bucket = oss2.Bucket(auth, bucket_url, bucket_name) + bucket.get_object_to_file(object_name, local_file) + return True + except NoSuchKey as e: + # NoSuchKey表示oss云端文件不存在,通知管理员 + logger.error(f'oss download failure, reason: remote file not found, filename: {object_name}') + return False + except Exception as e: + logger.error(f'oss download failure, reason: {e}') + return False + + @staticmethod + def download_file(object_name, local_file): + return OssDownloader.download_file_to_path( # access_key=settings.ACCESS_KEY, + #access_key_secret=settings.ACCESS_KEY_SECRET, + bucket_url=OssDownloader.BUCKET_URL, + bucket_name=OssDownloader.BUCKET_NAME, + object_name=object_name, + local_file=local_file) + + +def base64_decode(raw): + try: + return base64.b64decode(raw).decode('utf-8').strip() + except Exception as decode_error: + logger.error(f'base64 decode error, raw: {raw}\nreason:{decode_error}') + return "" + + +def build_request_header(req_method, raw_req_header, uri, query_params, http_protocol): + decode_req_header = base64_decode(raw_req_header) + return f"{req_method} {uri + ('?' + query_params if query_params else '')} {http_protocol}\n{decode_req_header}" + + +STATUSMAP = {True: 1, False: 0} + + +def updateossstatus(): + from dongtai_protocol.views.agent_download import JavaAgentDownload, PythonAgentDownload + from dongtai_protocol.views.engine_download import EngineDownloadEndPoint, PACKAGE_NAME_LIST + try: + status_, _ = checkossstatus() + if not status_: + return False, None + import shutil + shutil.rmtree('/tmp') + OssDownloader.download_file( + JavaAgentDownload.REMOTE_AGENT_FILE, + local_file=JavaAgentDownload.LOCAL_AGENT_FILE) + OssDownloader.download_file( + object_name=PythonAgentDownload.REMOTE_AGENT_FILE, + local_file=PythonAgentDownload.LOCAL_AGENT_FILE) + for package_name in PACKAGE_NAME_LIST: + EngineDownloadEndPoint.download_agent_jar( + EngineDownloadEndPoint.REMOTE_AGENT_FILE.format( + package_name=package_name), + EngineDownloadEndPoint.LOCAL_AGENT_FILE.format( + package_name=package_name)) + downloadstatus = JavaAgentDownload.download_agent( + ) and PythonAgentDownload.download_agent() + return downloadstatus, None + except RequestError: + return False, None + except Exception as e: + logger.info("Health check oss status:{}".format(e)) + return False, None + return True, None + + +def checkossstatus(): + from dongtai_protocol.views.agent_download import JavaAgentDownload, PythonAgentDownload + from dongtai_protocol.views.engine_download import EngineDownloadEndPoint + from oss2.exceptions import AccessDenied + try: + bucket = oss2.Bucket(oss2.AnonymousAuth(), + settings.BUCKET_URL, + settings.BUCKET_NAME, + connect_timeout=4) + bucket.list_objects() + return True, None + except RequestError: + return False, None + except AccessDenied: + return True, None + except Exception as e: + logger.info("Health check oss status:{}".format(e)) + return False, None + return True, None diff --git a/apiserver/views/__init__.py b/dongtai_protocol/views/__init__.py similarity index 100% rename from apiserver/views/__init__.py rename to dongtai_protocol/views/__init__.py diff --git a/dongtai_protocol/views/agent_config.py b/dongtai_protocol/views/agent_config.py new file mode 100644 index 000000000..49fa853de --- /dev/null +++ b/dongtai_protocol/views/agent_config.py @@ -0,0 +1,167 @@ +from dongtai_protocol.decrypter import parse_data +from dongtai_common.endpoint import OpenApiEndPoint, R +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.agent_config import IastAgentConfig +from django.db.models import Q +from drf_spectacular.utils import extend_schema +import logging +from dongtai_common.utils.systemsettings import get_circuit_break +from django.utils.translation import gettext_lazy as _ +from result import Ok, Err, Result +from dongtai_common.models.agent_config import MetricGroup + +logger = logging.getLogger('dongtai.openapi') + + +class AgentConfigView(OpenApiEndPoint): + + @extend_schema( + description='Through agent_ Id get disaster recovery strategy', + responses=R, + methods=['POST']) + def post(self, request): + try: + # agent_id = request.data.get('agentId', None) + param = parse_data(request.read()) + agent_id = param.get('agentId', None) + if agent_id is None: + return R.failure(msg="agentId is None") + except Exception as e: + logger.error(e) + return R.failure(msg="agentId is None") + if not get_circuit_break(): + return R.success(msg=_('Successfully'), data={}) + user = request.user + agent = IastAgent.objects.filter(pk=agent_id).first() + data = {} + if agent and agent.server_id: + server = agent.server + if server: + config = IastAgentConfig.objects.filter( + user=user, + cluster_name__in=('', server.cluster_name), + cluster_version__in=('', server.cluster_version), + hostname__in=('', server.hostname), + ip__in=('', server.ip)).order_by('priority').first() + if config: + data = config.details + + return R.success(data=data) + + +from dongtai_common.models.agent_config import ( + IastCircuitTarget, + IastCircuitConfig, + IastCircuitMetric, + TargetType, + TargetOperator, + DealType, + MetricType, + MetricGroup, + MetricOperator, +) + +class AgentConfigv2View(OpenApiEndPoint): + + def post(self, request): + try: + param = parse_data(request.read()) + agent_id = int(param.get('agentId', None)) + if agent_id is None: + return R.failure(msg="agentId is None") + except Exception as e: + logger.error(e) + return R.failure(msg="agentId is None") + if not get_circuit_break(): + return R.success(msg=_('Successfully'), data={}) + res = get_agent_config(agent_id) + if isinstance(res, Err): + return R.success(msg=_(res.value), data={}) + agent_config = res.value + return R.success(msg=_('Successfully'), data=agent_config) + + +from django.db.models import F + + +def get_agent_filter_details(agent_id): + return IastAgent.objects.filter(pk=agent_id).values( + "bind_project__name", "user__username", "server__protocol", "token", + "server__ip", "server__path", "server__port", "language").annotate( + PROJECT_NAME=F("bind_project__name"), + ACCOUNT_NAME=F("user__username"), + PROTOCOL=F("server__protocol"), + AGENT_IP=F("server__ip"), + AGENT_NAME=F("alias"), + AGENT_PATH=F("server__path"), + PORT=F("server__port"), + AGENT_LANGUAGE=F("language"), + ).first() + + +def get_agent_config_by_scan(agent_id: int, mg: MetricGroup) -> Result: + agent_detail = get_agent_filter_details(agent_id) + queryset = IastCircuitConfig.objects.filter( + is_deleted=0, metric_group=mg, + is_enable=1).order_by('priority').only('id') + for i in queryset: + result_list = [] + for target in IastCircuitTarget.objects.filter( + circuit_config_id=i.id).all(): + result_list.append(get_filter_by_target(target)(agent_detail)) + if all(result_list): + return Ok(i.id) + return Err("config not found") + + +def get_function(opt: TargetOperator): + if opt == TargetOperator.EQUAL: + return lambda x, y: x == y + if opt == TargetOperator.NOT_EQUAL: + return lambda x, y: x != y + if opt == TargetOperator.CONTAIN: + return lambda x, y: x in y + if opt == TargetOperator.NOT_CONTAIN: + return lambda x, y: x not in y + +def get_filter_by_target(target): + targetattr = TargetType(target.target_type).name + opt_function = get_function(TargetType(target.opt)) + return lambda x:opt_function(x[targetattr], target.value) + +def get_agent_config(agent_id: int) -> Result: + data = { + "enableAutoFallback": True, + "performanceLimitRiskMaxMetricsCount": 30, + } + interval_list = [] + for mg in MetricGroup: + res = get_agent_config_by_scan(agent_id, mg) + if isinstance(res,Err): + continue + config_id = res.value + config = IastCircuitConfig.objects.filter( + pk=config_id).first() + metric_list = [] + for metric in IastCircuitMetric.objects.filter( + circuit_config_id=config.id).all(): + metric_list.append(convert_metric(metric)) + data[mg.name.lower()] = metric_list + data[mg.name.lower() + + "IsUninstall"] = True if config.deal == DealType.UNLOAD else False + interval_list.append(config.interval) + # if interval_list is [], there is mean no config found here. + # because interval is required in create config. + if not interval_list: + return Err('No config found') + data["performanceLimitRiskMaxMetricsCount"] = min(interval_list) + return Ok(data) + + + +def convert_metric(metric): + return { + "fallbackName": MetricType(metric.metric_type).name, + "conditions": MetricOperator(metric.opt).name.lower(), + "value": metric.value + } diff --git a/dongtai_protocol/views/agent_download.py b/dongtai_protocol/views/agent_download.py new file mode 100644 index 000000000..156bf10ba --- /dev/null +++ b/dongtai_protocol/views/agent_download.py @@ -0,0 +1,332 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2021/1/14 下午7:17 +# software: PyCharm +# project: lingzhi-agent-server +import json +import os, re +import uuid, logging + +from django.http import FileResponse +from dongtai_common.endpoint import UserEndPoint, R +from drf_spectacular.utils import extend_schema, OpenApiParameter, OpenApiExample +from rest_framework.authtoken.models import Token +from django.utils.translation import gettext_lazy as _ + +from dongtai_protocol.api_schema import DongTaiParameter, DongTaiAuth +from dongtai_protocol.utils import OssDownloader +from dongtai_conf.settings import BUCKET_NAME_BASE_URL, VERSION + +import shutil +import tarfile, os +import threading +import time + +logger = logging.getLogger('dongtai.openapi') + +class JavaAgentDownload(): + + def __init__(self, user_id): + t = threading.currentThread() + self.user_id = user_id + self.agent_file = "dongtai-agent.jar" + self.original_agent_path = f'/tmp/iast_cache/package' + self.original_agent_file = f'/tmp/iast_cache/package/{self.agent_file}' + self.user_target_path = f'/tmp/{os.getpid()}-{t.ident}-{user_id}' + self.target_path = f'/tmp/{os.getpid()}-{t.ident}-{user_id}/iast_cache/package' + self.remote_agent_file = BUCKET_NAME_BASE_URL + 'java/'+ VERSION +'/dongtai-agent.jar' + if not os.path.exists(f"{self.target_path}"): + os.makedirs(f"{self.target_path}") + if not os.path.exists(self.original_agent_path): + os.makedirs(self.original_agent_path) + + def download_agent(self): + if os.path.exists(self.original_agent_file): + return True + else: + return OssDownloader.download_file( + object_name=self.remote_agent_file, local_file=f"{self.original_agent_file}" + ) + + def create_config(self, base_url, agent_token, auth_token, project_name): + try: + user_file = f"{self.target_path}/{self.agent_file}" + if not os.path.exists(user_file): + shutil.copyfile(self.original_agent_file, user_file) + + data = "iast.response.name=DongTai Iast\niast.server.url={url}\niast.server.token={token}\niast.allhook.enable=false\niast.dump.class.enable=false\niast.dump.class.path=/tmp/iast-class-dump/\niast.service.report.interval=30000\napp.name=DongTai\nengine.status=start\nengine.name={agent_token}\njdk.version={jdk_level}\nproject.name={project_name}\niast.proxy.enable=false\niast.proxy.host=\niast.proxy.port=\niast.server.mode=local\n" + with open(f'{self.user_target_path}/iast.properties', 'w') as config_file: + config_file.write( + data.format(url=base_url, token=auth_token, agent_token=agent_token, jdk_level=1, + project_name=project_name) + ) + return True + except Exception as e: + logger.error(_('Agent configuration file creation failed, reason: {E}').format(e)) + return False + + def replace_config(self): + user_file = f"{self.target_path}/{self.agent_file}" + # 执行jar -uvf {JavaAgentDownload.LOCAL_AGENT_FILE} iast.properties更新jar包的文件 + import os + os.system(f'cd {self.user_target_path};jar -uvf {user_file} iast.properties') + + +class PythonAgentDownload(): + + def __init__(self, user_id): + t = threading.currentThread() + self.user_id = user_id + self.agent_file = "dongtai_agent_python.tar.gz" + self.original_agent_file = f'/tmp/{self.agent_file}' + self.target_path = f'/tmp/{os.getpid()}-{t.ident}-{user_id}' + self.target_source_path = f'/tmp/{os.getpid()}-{t.ident}-{user_id}/dongtai_agent_python' + self.remote_agent_file = BUCKET_NAME_BASE_URL + 'python/dongtai_agent_python.tar.gz' + if not os.path.exists(self.target_path): + os.makedirs(self.target_path) + if not os.path.exists(self.target_source_path): + os.makedirs(self.target_source_path) + + def download_agent(self): + if os.path.exists(self.original_agent_file): + return True + else: + return OssDownloader.download_file( + object_name=self.remote_agent_file, local_file=f"{self.original_agent_file}" + ) + + def create_config(self, base_url, agent_token, auth_token, project_name): + try: + user_file = f"{self.target_path}/{self.agent_file}" + if not AgentDownload.is_tar_file(self.original_agent_file): + shutil.rmtree(self.original_agent_file) + return False + + if not os.path.exists(user_file): + shutil.copyfile(self.original_agent_file, user_file) + shutil.copyfile(self.original_agent_file, f"{user_file}.bak") + + agent_file = tarfile.open(user_file) + agent_file.extractall(path=self.target_path) + names = agent_file.getnames() + self.target_source_path = f"{self.target_path}/{names[0]}" + config_path = "" + for item in names: + res = re.search("config.json", item) + if res is not None: + config_path = item + break + with open(f"{self.target_path}/{config_path}", "r") as config_file: + config = json.load(config_file) + config['iast']['server']['token'] = auth_token + config['iast']['server']['url'] = base_url + config['project']['name'] = project_name + config['engine']['name'] = agent_token + with open(f"{self.target_path}/{config_path}", "w+") as config_file: + json.dump(config, config_file) + return True + except Exception as e: + print(type(e)) + print(e) + return False + + def replace_config(self): + user_file = f"{self.target_path}/{self.agent_file}" + try: + with tarfile.open(user_file, "w:gz") as tar: + tar.add(self.target_source_path, arcname=os.path.basename(self.target_source_path)) + return True + except Exception as e: + logger.error(f'replace config error: {e}') + return False + + +class PhpAgentDownload(): + + def __init__(self, user_id): + t = threading.currentThread() + self.user_id = user_id + self.agent_file = "php-agent.tar.gz" + self.original_agent_file = f'/tmp/{self.agent_file}' + self.target_path = f'/tmp/{os.getpid()}-{t.ident}-{user_id}' + self.target_source_path = f'/tmp/{os.getpid()}-{t.ident}-{user_id}/php-agent' + self.remote_agent_file = BUCKET_NAME_BASE_URL + 'php/php-agent.tar.gz' + if not os.path.exists(self.target_path): + os.makedirs(self.target_path) + if not os.path.exists(self.target_source_path): + os.makedirs(self.target_source_path) + + def download_agent(self): + if os.path.exists(self.original_agent_file): + return True + else: + return OssDownloader.download_file( + object_name=self.remote_agent_file, local_file=f"{self.original_agent_file}" + ) + + def create_config(self, base_url, agent_token, auth_token, project_name): + try: + user_file = f"{self.target_path}/{self.agent_file}" + if not AgentDownload.is_tar_file(self.original_agent_file): + shutil.rmtree(self.original_agent_file) + return False + + if not os.path.exists(user_file): + shutil.copyfile(self.original_agent_file, user_file) + shutil.copyfile(self.original_agent_file, f"{user_file}.bak") + + agent_file = tarfile.open(user_file) + agent_file.extractall(path=self.target_path) + agent_file.close() + + config_lines = [] + config_path = "dongtai-php-property.ini" + with open(os.path.join(self.target_source_path, config_path), 'rb') as fp: + for line in fp.readlines(): + try: + key, value = line.decode().split('=') + except ValueError as e: + continue + if key == 'iast.server.url': + print(base_url) + value = base_url + if key == 'iast.server.token': + value = auth_token + if key == 'engine.name': + value = agent_token + if key == 'project.name': + value = project_name + config_lines.append("=".join([key, value + '\n'])) + with open(os.path.join(self.target_source_path, config_path), 'w+') as fp: + fp.writelines(config_lines) + return True + except Exception as e: + logger.error(f'create config error: {e}') + return False + + def replace_config(self): + user_file = f"{self.target_path}/{self.agent_file}" + try: + with tarfile.open(user_file, "w:gz") as tar: + tar.add(self.target_source_path, arcname=os.path.basename(self.target_source_path)) + return True + except Exception as e: + logger.error(f'replace config error: {e}') + return False + +class GoAgentDownload(): + + def __init__(self, user_id): + t = threading.currentThread() + self.user_id = user_id + self.agent_file = "dongtai-go-agent-config.yaml" + self.original_agent_file = f'/tmp/{self.agent_file}' + self.target_path = f'/tmp/{os.getpid()}-{t.ident}-{user_id}' + self.target_source_path = f'/tmp/{os.getpid()}-{t.ident}-{user_id}/php-agent' + self.remote_agent_file = BUCKET_NAME_BASE_URL + 'php/php-agent.tar.gz' + if not os.path.exists(self.target_path): + os.makedirs(self.target_path) + if not os.path.exists(self.target_source_path): + os.makedirs(self.target_source_path) + + def download_agent(self): + return True + + def create_config(self, base_url, agent_token, auth_token, project_name): + with open(f"{self.target_path}/{self.agent_file}", "w") as fp: + configs = [ + f'DongtaiGoOpenapi: "{base_url}"', + f'DongtaiGoToken: "{auth_token}"', + f'DongtaiGoProjectName: "{project_name}"', + 'DongtaiGoProjectVersion: "0.1.0"', + 'DongtaiGoProjectCreate: true', + f'DongtaiGoAgentToken: "{agent_token}"', + ] + fp.writelines([config + "\n" for config in configs]) + return True + def replace_config(self): + return True + + +class AgentDownload(UserEndPoint): + """ + 当前用户详情 + """ + name = "download_iast_agent" + description = "下载洞态Agent" + + @staticmethod + def is_tar_file(file): + tmp_path = f"/tmp/.dongtai_agent_test/{time.time_ns()}" + try: + agent_file = tarfile.open(file) + agent_file.extractall(path=tmp_path) + except tarfile.ReadError: + return False + except Exception as e: + raise e + finally: + shutil.rmtree(tmp_path) + return True + + def make_download_handler(self, language, user_id): + if language == 'python': + return PythonAgentDownload(user_id) + if language == 'java': + return JavaAgentDownload(user_id) + if language == 'php': + return PhpAgentDownload(user_id) + if language == 'go': + return GoAgentDownload(user_id) + return + + @extend_schema( + parameters=[ + DongTaiParameter.OPENAPI_URL, + DongTaiParameter.PROJECT_NAME, + DongTaiParameter.LANGUAGE + ], + auth=[DongTaiAuth.TOKEN], + responses=[FileResponse], + methods=['GET'] + ) + def get(self, request): + try: + base_url = request.query_params.get('url', 'https://www.huoxian.cn') + project_name = request.query_params.get('projectName', 'Demo Project') + language = request.query_params.get('language') + user_token = request.query_params.get('token', None) + if not user_token: + token, success = Token.objects.get_or_create(user=request.user) + final_token = token.key + else: + final_token = user_token + agent_token = ''.join(str(uuid.uuid4()).split('-')) + + handler = self.make_download_handler(language, request.user.id) + + if handler.download_agent() is False: + return R.failure(msg="agent file download failure. please contact official staff for help.") + + if handler.create_config(base_url=base_url, agent_token=agent_token, auth_token=final_token, + project_name=project_name): + handler.replace_config() + response = FileResponse( + open(f"{handler.target_path}/{handler.agent_file}", "rb")) + response['content_type'] = 'application/octet-stream' + response[ + 'Content-Disposition'] = f"attachment; filename={handler.agent_file}" + return response + else: + return R.failure(msg="agent file not exit.") + except Exception as e: + logger.error( + _('Agent download failed, user: {}, error details: {}').format( + request.user.get_username(), e)) + return R.failure(msg="agent file not exit.") + finally: + try: + shutil.rmtree(f"{handler.target_path}") + except Exception as e: + logger.info(e) diff --git a/dongtai_protocol/views/agent_limit.py b/dongtai_protocol/views/agent_limit.py new file mode 100644 index 000000000..c2efdcfe4 --- /dev/null +++ b/dongtai_protocol/views/agent_limit.py @@ -0,0 +1,24 @@ + +import logging +import time + +from dongtai_common.models.profile import IastProfile +from dongtai_common.endpoint import OpenApiEndPoint, R + +from drf_spectacular.utils import extend_schema +from dongtai_protocol.api_schema import DongTaiParameter, DongTaiAuth + +logger = logging.getLogger('dongtai.openapi') +from django.forms.models import model_to_dict + + +class LimitView(OpenApiEndPoint): + @extend_schema(description='Agent Limit', auth=[DongTaiAuth.TOKEN]) + def get(self, request): + keys = ['cpu_limit'] + profiles = IastProfile.objects.filter(key__in=keys).all() + if profiles: + data = [model_to_dict(profile) for profile in profiles] + else: + data = [{'id': 1, 'key': 'cpu_limit', 'value': '60'}] + return R.success(data=data) diff --git a/dongtai_protocol/views/agent_register.py b/dongtai_protocol/views/agent_register.py new file mode 100644 index 000000000..8b2beb236 --- /dev/null +++ b/dongtai_protocol/views/agent_register.py @@ -0,0 +1,353 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/11/30 下午3:13 +# software: PyCharm +# project: lingzhi-webapi +import base64 +import logging +import time + +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.project import IastProject +from dongtai_common.models.project_version import IastProjectVersion +from dongtai_common.models.server import IastServer +from dongtai_common.models.profile import IastProfile +from drf_spectacular.utils import extend_schema +from rest_framework.request import Request +from django.utils.translation import gettext_lazy as _ +from django.db import transaction +import time +from dongtai_common.endpoint import OpenApiEndPoint, R + +import json +from dongtai_protocol.api_schema import DongTaiAuth, DongTaiParameter +from dongtai_protocol.decrypter import parse_data + +logger = logging.getLogger('dongtai.openapi') + + +class AgentRegisterEndPoint(OpenApiEndPoint): + """ + 引擎注册接口 + """ + name = "api-v1-agent-register" + description = "引擎注册" + + @staticmethod + def register_agent(token, version, language, project_name, user, + project_version): + project = IastProject.objects.values('id').filter(name=project_name, + user=user).first() + is_audit = AgentRegisterEndPoint.get_is_audit() + if project: + if project_version: + project_current_version = project_version + else: + project_current_version = IastProjectVersion.objects.filter( + project_id=project['id'], + current_version=1, + status=1 + ).first() + agent_id = AgentRegisterEndPoint.get_agent_id( + token, project_name, user, project_current_version.id) + if agent_id == -1: + agent_id = AgentRegisterEndPoint.__register_agent( + exist_project=True, + token=token, + user=user, + version=version, + project_id=project['id'], + project_name=project_name, + project_version_id=project_current_version.id, + language=language, + is_audit=is_audit + ) + else: + agent_id = AgentRegisterEndPoint.get_agent_id(token=token, project_name=project_name, user=user, + current_project_version_id=0) + if agent_id == -1: + agent_id = AgentRegisterEndPoint.__register_agent( + exist_project=False, + token=token, + user=user, + version=version, + project_id=0, + project_name=project_name, + project_version_id=0, + language=language, + is_audit=is_audit + ) + return agent_id + + @staticmethod + def get_is_audit(): + return 1 + + @staticmethod + def get_command(envs): + for env in envs: + if 'sun.java.command' in env.lower(): + return '='.join(env.split('=')[1:]) + return '' + + @staticmethod + def get_runtime(envs): + for env in envs: + if 'java.runtime.name' in env.lower(): + return '='.join(env.split('=')[1:]) + return '' + + @staticmethod + def register_server(agent_id, hostname, network, container_name, server_addr, server_port, cluster_name,cluster_version, + server_path, server_env, pid): + """ + 注册server,并关联server至agent + :param agent_id: + :param hostname: + :param network: + :param container_name: + :param server_addr: + :param server_port: + :param server_path: + :param server_env: + :param pid: + :return: + """ + agent = IastAgent.objects.filter(id=agent_id).first() + if agent is None: + return + # todo 需要根据不同的语言做兼容 + if server_env: + env = base64.b64decode(server_env).decode('utf-8') + env = env.replace('{', '').replace('}', '') + envs = env.split(',') + command = AgentRegisterEndPoint.get_command(envs) + else: + command = '' + env = '' + envs = [] + + try: + port = int(server_port) + except Exception as e: + logger.error(_('The server port does not exist, has been set to the default: 0')) + port = 0 + + server_id = agent.server_id + + server = IastServer.objects.filter(id=server_id).first() if server_id else None + if server: + server.hostname = hostname + server.network = network + server.command = command + server.ip = server_addr + server.port = port + server.pid = pid + server.env = env + server.cluster_name = cluster_name + server.cluster_version = cluster_version + server.status = 'online' + server.update_time = int(time.time()) + server.save(update_fields=['hostname', 'command', 'ip', 'port', 'env', 'status', 'update_time', 'cluster_name', 'cluster_version']) + else: + server = IastServer.objects.create( + hostname=hostname, + ip=server_addr, + port=port, + pid=pid, + network=network, + env=env, + path=server_path, + status='online', + container=container_name, + container_path=server_path, + cluster_name=cluster_name, + cluster_version=cluster_version, + command=command, + runtime=AgentRegisterEndPoint.get_runtime(envs), + create_time=int(time.time()), + update_time=int(time.time()) + ) + agent.server_id = server.id + agent.save(update_fields=['server_id']) + logger.info(_('Server record creation success')) + + @extend_schema( + description='Agent Register, Data is Gzip', + parameters=[ + DongTaiParameter.AGENT_NAME, + DongTaiParameter.LANGUAGE, + DongTaiParameter.VERSION, + DongTaiParameter.PROJECT_NAME, + DongTaiParameter.HOSTNAME, + DongTaiParameter.NETWORK, + DongTaiParameter.CONTAINER_NAME, + DongTaiParameter.SERVER_ADDR, + DongTaiParameter.SERVER_PORT, + DongTaiParameter.SERVER_PATH, + DongTaiParameter.SERVER_ENV, + DongTaiParameter.PID, + DongTaiParameter.AUTO_CREATE_PROJECT, + ], + responses=[ + {204: None} + ], + methods=['POST'] + ) + def post(self, request: Request): + try: + param = parse_data(request.read()) + # param = request.data + token = param.get('name') + language = param.get('language') + version = param.get('version') + project_name = param.get('projectName', 'Demo Project').strip() + if not token or not version or not project_name: + return R.failure(msg="参数错误") + hostname = param.get('hostname') + network = param.get('network') + container_name = param.get('containerName') + server_addr = param.get('serverAddr') + server_port = param.get('serverPort') + server_path = param.get('serverPath') + server_env = param.get('serverEnv') + # add by song + cluster_name = param.get('clusterName', "") + cluster_version = param.get('clusterVersion', "") + # end by song + pid = param.get('pid') + auto_create_project = param.get('autoCreateProject', 0) + user = request.user + version_name = param.get('projectVersion', 'V1.0') + version_name = version_name if version_name else 'V1.0' + with transaction.atomic(): + obj, project_created = IastProject.objects.get_or_create( + name=project_name, + user=request.user, + defaults={ + 'scan_id': 5, + 'agent_count': 0, + 'mode': '插桩模式', + 'latest_time': int(time.time()) + }) + project_version, version_created = IastProjectVersion.objects.get_or_create( + project_id=obj.id, + version_name=version_name, + defaults={ + 'user': request.user, + 'version_name': version_name, + 'status': 1, + 'description': '', + 'current_version': 0, + }) + if version_created: + count = IastProjectVersion.objects.filter( + project_id=obj.id).count() + if count == 1: + project_version.current_version = 1 + project_version.save() + if project_created: + logger.info(_('auto create project {}').format(obj.id)) + if version_created: + logger.info( + _('auto create project version {}').format( + project_version.id)) + if param.get('projectName', None) and param.get( + 'projectVersion', None): + agent_id = self.register_agent(token=token, + project_name=project_name, + language=language, + version=version, + project_version=project_version, + user=user) + else: + agent_id = self.register_agent(token=token, + project_name=project_name, + language=language, + version=version, + user=user, + project_version=None) + + self.register_server( + agent_id=agent_id, + hostname=hostname, + network=network, + container_name=container_name, + server_addr=get_ipaddress(network) + if get_ipaddress(network) else server_addr, + server_port=server_port, + server_path=server_path, + cluster_name=cluster_name, + cluster_version=cluster_version, + server_env=server_env, + pid=pid, + ) + + core_auto_start = 0 + if agent_id != -1: + agent = IastAgent.objects.filter(pk=agent_id).first() + agent.register_time = int(time.time()) + IastAgent.objects.filter(pk=agent_id).update( + register_time=int(time.time())) + agent.save() + core_auto_start = agent.is_audit + + return R.success(data={'id': agent_id, 'coreAutoStart': core_auto_start}) + except Exception as e: + logger.error("探针注册失败,原因:{reason}".format(reason=e), exc_info=True) + return R.failure(msg="探针注册失败") + + @staticmethod + def get_agent_id(token, project_name, user, current_project_version_id): + queryset = IastAgent.objects.values('id').filter( + token=token, + project_name=project_name, + user=user, + project_version_id=current_project_version_id + ) + agent = queryset.first() + if agent: + queryset.update(is_core_running=1, online=1, is_running=1) + return agent['id'] + return -1 + + @staticmethod + def __register_agent(exist_project, token, user, version, project_id, project_name, project_version_id, language, is_audit): + if exist_project: + IastAgent.objects.filter(token=token, online=1, user=user).update(online=0) + agent = IastAgent.objects.create( + token=token, + version=version, + latest_time=int(time.time()), + user=user, + is_running=1, + bind_project_id=project_id, + project_name=project_name, + control=0, + is_control=0, + is_core_running=1, + online=1, + project_version_id=project_version_id, + language=language, + is_audit=is_audit + ) + return agent.id + + +def get_ipaddress(network: str): + try: + dic = json.loads(network) + res = dic[0]['ip'] + for i in dic: + if i['name'].startswith('en'): + res = i['ip'] + if i.get("isAddress", 0): + res = i['ip'] + break + return res + except KeyError as e: + return '' + except Exception as e: + logger.error(e, exc_info=True) + return '' diff --git a/dongtai_protocol/views/agent_update.py b/dongtai_protocol/views/agent_update.py new file mode 100644 index 000000000..392e9c6c8 --- /dev/null +++ b/dongtai_protocol/views/agent_update.py @@ -0,0 +1,57 @@ +import logging +import time + +from dongtai_common.models.agent import IastAgent +from dongtai_common.endpoint import OpenApiEndPoint, R +from dongtai_protocol.decrypter import parse_data +from drf_spectacular.utils import extend_schema +from dongtai_common.models.server import IastServer +from django.utils.translation import gettext_lazy as _ +from urllib.parse import urlparse, urlunparse +from dongtai_web.views.project_add import is_ip +logger = logging.getLogger('dongtai.openapi') + + +class AgentUpdateEndPoint(OpenApiEndPoint): + @extend_schema( + description='Agent Update, Data is Gzip', + responses=[ + {204: None} + ], + methods=['POST']) + def post(self, request): + try: + param = parse_data(request.read()) + agent_id = int(param.get('agentId', None)) + server_addr = param.get('serverAddr', None) + server_port = int(param.get('serverPort', None)) + protocol = param.get('protocol', '') + except Exception as e: + logger.error(e, exc_info=True) + return R.failure(msg="参数错误") + logger.info(f"agent_id:{agent_id} update_fields:{param}") + ip = '' + parse_re = urlparse(server_addr) + if parse_re.hostname and is_ip(parse_re.hostname): + ip = parse_re.hostname + user = request.user + agent = IastAgent.objects.filter(id=agent_id, user=user).first() + if not agent: + return R.failure(msg="agent no register") + else: + server = IastServer.objects.filter(id=agent.server_id).first() + if not server: + return R.failure(msg="agent no register") + else: + update_fields = ["port", 'update_time'] + if protocol: + server.protocol = protocol + update_fields.append('protocol') + if ip: + server.ip = ip + update_fields.append('ip') + server.port = server_port + server.update_time = int(time.time()) + server.save(update_fields=update_fields) + logger.info(_('Server record update success')) + return R.success(msg="success update") diff --git a/apiserver/views/engine_auto_deploy.py b/dongtai_protocol/views/engine_auto_deploy.py similarity index 77% rename from apiserver/views/engine_auto_deploy.py rename to dongtai_protocol/views/engine_auto_deploy.py index 45fe50fc2..f5794d057 100644 --- a/apiserver/views/engine_auto_deploy.py +++ b/dongtai_protocol/views/engine_auto_deploy.py @@ -5,11 +5,12 @@ # software: PyCharm # project: webapi +import logging +from dongtai_common.endpoint import OpenApiEndPoint from django.http import StreamingHttpResponse from rest_framework.authtoken.models import Token -from rest_framework.request import Request -from apiserver.base.openapi import OpenApiEndPoint +logger = logging.getLogger("django") TEMPLAGE_DATA = """#/bin/bash PID='' @@ -88,13 +89,18 @@ class AutoDeployEndPoint(OpenApiEndPoint): name = "download_iast_agent" description = "白帽子-下载IAST 自动部署脚本" - def get(self, request: Request): + def get(self, request): """ - IAST下载 agent接口s + IAST下载 agent接口 :param request: :return: """ - url = request.query_params['url'] - token, success = Token.objects.get_or_create(user=request.user) - data = TEMPLAGE_DATA.replace("{url}", url).replace("{token}", token.key) - return StreamingHttpResponse(data) + try: + url = request.query_params['url'] + token, success = Token.objects.get_or_create(user=request.user) + data = TEMPLAGE_DATA.replace("{url}", url).replace("{token}", token.key) + return StreamingHttpResponse(data) + except Exception as e: + logger.info(e) + return StreamingHttpResponse(TEMPLAGE_DATA) + diff --git a/apiserver/views/engine_download.py b/dongtai_protocol/views/engine_download.py similarity index 55% rename from apiserver/views/engine_download.py rename to dongtai_protocol/views/engine_download.py index 3e254ea56..e81e48281 100644 --- a/apiserver/views/engine_download.py +++ b/dongtai_protocol/views/engine_download.py @@ -9,36 +9,45 @@ import os from django.http import FileResponse +from drf_spectacular.utils import extend_schema from rest_framework import status from rest_framework.request import Request -from AgentServer.base import R -from apiserver.base.openapi import OpenApiEndPoint -from apiserver.utils import OssDownloader +from dongtai_common.endpoint import OpenApiEndPoint, R +from dongtai_protocol.api_schema import DongTaiParameter +from dongtai_protocol.utils import OssDownloader +from dongtai_conf.settings import BUCKET_NAME_BASE_URL, VERSION logger = logging.getLogger("dongtai.openapi") +PACKAGE_NAME_LIST = ('dongtai-core', 'dongtai-spy', 'dongtai-api', + 'dongtai-grpc', 'dongtai-log', 'dongtai-spring-api', + 'dongtai-core-jdk6', 'dongtai-api-jdk6', + 'dongtai-spy-jdk6') + class EngineDownloadEndPoint(OpenApiEndPoint): name = "download_core_jar_package" description = "iast agent-下载IAST依赖的core、inject jar包" - LOCAL_AGENT_FILE = '/tmp/{package_name}.jar' - REMOTE_AGENT_FILE = 'agent/java/{package_name}.jar' + LOCAL_AGENT_PATH = '/tmp/iast_cache/package' + LOCAL_AGENT_FILE = '/tmp/iast_cache/package/{package_name}.jar' + REMOTE_AGENT_FILE = BUCKET_NAME_BASE_URL + 'java/'+ VERSION + '/{package_name}.jar' + @extend_schema( + description='Agent Engine Download', + parameters=[ + DongTaiParameter.ENGINE_NAME, + ], + responses=R, + methods=['GET'] + ) def get(self, request: Request): - """ - IAST下载 agent接口 - :param request: - :return: - """ - package_name = request.query_params.get('package_name') - jdk = request.query_params.get('jdk.version') - if package_name not in ('iast-core', 'iast-inject') or jdk not in ('1', '2'): + package_name = request.query_params.get('engineName') + if package_name not in PACKAGE_NAME_LIST: return R.failure({ "status": -1, "msg": "bad gay." }) - local_file_name = EngineDownloadEndPoint.LOCAL_AGENT_FILE.format(package_name=package_name) remote_file_name = EngineDownloadEndPoint.REMOTE_AGENT_FILE.format(package_name=package_name) logger.debug(f'download file from oss or local cache, file: {local_file_name}') @@ -48,13 +57,17 @@ def get(self, request: Request): response['content_type'] = 'application/octet-stream' response['Content-Disposition'] = f"attachment; filename={package_name}.jar" return response - except: - return R.failure(msg="file not exit.", status=status.HTTP_500_INTERNAL_SERVER_ERROR) + except Exception as e: + logger.error(e, exc_info=True) + return R.failure(msg="file not exit.", + status=status.HTTP_500_INTERNAL_SERVER_ERROR) else: return R.failure(msg="file not exit.", status=status.HTTP_500_INTERNAL_SERVER_ERROR) @staticmethod def download_agent_jar(remote_agent_file, local_agent_file): + if not os.path.exists(EngineDownloadEndPoint.LOCAL_AGENT_PATH): + os.makedirs(EngineDownloadEndPoint.LOCAL_AGENT_PATH) if os.path.exists(local_agent_file): return True else: diff --git a/apiserver/views/engine_heartbeat.py b/dongtai_protocol/views/engine_heartbeat.py similarity index 74% rename from apiserver/views/engine_heartbeat.py rename to dongtai_protocol/views/engine_heartbeat.py index 049fb1f56..0c7b9e400 100644 --- a/apiserver/views/engine_heartbeat.py +++ b/dongtai_protocol/views/engine_heartbeat.py @@ -6,10 +6,10 @@ # project: webapi import logging -from dongtai_models.models.engine_heartbeat import IastEngineHeartbeat +from dongtai_common.models.engine_heartbeat import IastEngineHeartbeat -from AgentServer.base import R -from apiserver.base.openapi import OpenApiEndPoint +from dongtai_common.endpoint import OpenApiEndPoint, R +from django.utils.translation import gettext_lazy as _ logger = logging.getLogger("dongtai.openapi") @@ -43,20 +43,22 @@ def post(self, request): methodpoolcount=data['methodPoolCount'], timestamp=data['timestamp'], ) - logger.info(f'【{client_ip}】心跳数据处理成功') + logger.info(_('[{}] Heartbeat data is successful').format(client_ip)) return R.success(data=data) except Exception as e: - logger.error(f'心跳数据处理失败,错误原因:{e}') + logger.error(_('Heartbeat data failed, error reason: {}').format(e)) return R.failure() @staticmethod def get_client_ip(request): try: - if request.META.has_key('HTTP_X_FORWARDED_FOR'): + logger.info(request.META) + if 'HTTP_X_FORWARDED_FOR' in request.META: ip = request.META['HTTP_X_FORWARDED_FOR'] else: ip = request.META['REMOTE_ADDR'] return ip except Exception as e: - logger.error(f'客户端IP获取失败,原因:{e}') + logger.error( + _('Client IP acquisition failed, reasons: {}').format(e)) return '' diff --git a/dongtai_protocol/views/engine_status.py b/dongtai_protocol/views/engine_status.py new file mode 100644 index 000000000..b6099171f --- /dev/null +++ b/dongtai_protocol/views/engine_status.py @@ -0,0 +1,130 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/8/4 16:47 +# software: PyCharm +# project: webapi +import logging +import time + +from dongtai_common.models.agent import IastAgent + +from dongtai_common.endpoint import OpenApiEndPoint, R +from drf_spectacular.utils import extend_schema +from django.core.cache import cache + +from dongtai_protocol.api_schema import DongTaiParameter + +logger = logging.getLogger("django") + + +class EngineUpdateEndPoint(OpenApiEndPoint): + name = "iast_engine_update_status_edit" + description = "IAST 检测引擎更新状态修改接口" + + def get(self, request, status=None): + """ + IAST 检测引擎 agent接口 + :param request: + :return: + """ + agent_name = request.query_params.get('agent_name') + agent = IastAgent.objects.filter(user=request.user, token=agent_name, is_running=1).first() + if not agent: + return R.failure("agent不存在或无权限访问") + + if status: + if agent.is_control == 1: + agent.control = status + agent.is_control = 0 + agent.latest_time = int(time.time()) + agent.save() + return R.success(msg="安装完成") + else: + return R.failure(msg="引擎正在被安装或卸载,请稍后再试") + else: + if agent.control == 1 and agent.is_control == 0: + agent.is_control = 1 + agent.latest_time = int(time.time()) + agent.save() + return R.success(data=agent.control) + else: + return R.failure(msg="不需要更新或正在更新中") + + +class EngineAction(OpenApiEndPoint): + name = "iast_engine_update_status_edit" + description = "IAST 检测引擎更新状态修改接口" + + @extend_schema( + description='Check Agent Engine Control Code', + parameters=[ + DongTaiParameter.AGENT_NAME, + ], + responses=R, + methods=['GET'] + ) + def get(self, request): + agent_id = request.query_params.get('agentId') + agent = IastAgent.objects.filter(user=request.user, pk=agent_id, is_running=1).first() + if not agent: + return R.failure("agent不存在或无权限访问") + agent_status = { + 0: { + "key": "无下发指令", + "value": "notcmd", + }, + 2: { + "key": "注册启动引擎", + "value": "coreRegisterStart", + }, + 3: { + "key": "开启引擎核心", + "value": "coreStart", + }, + 4: { + "key": "关闭引擎核心", + "value": "coreStop", + }, + 5: { + "key": "卸载引擎核心", + "value": "coreUninstall", + }, + 6: { + "key": "强制开启引擎核心性能熔断", + "value": "corePerformanceForceOpen", + }, + 7: { + "key": "强制关闭引擎核心性能熔断", + "value": "corePerformanceForceClose", + }, + 8: { + "key": "Agent升级", + "value": "update", + }, + } + if agent.is_control == 0: + return R.failure(msg="暂无命令", data="notcmd") + else: + agent.is_control = 0 + agent.latest_time = int(time.time()) + if agent.control in [4, 5, 6]: + agent.is_core_running = 0 + else: + agent.is_core_running = 1 + if agent.control == 8: + if cache.get(f'agent_update_{agent_id}', False): + agent.is_control = 0 + agent.control = 2 + cache.delete(f'agent_update_{agent_id}') + else: + cache.set(f"agent_update_{agent_id}", True, 60 * 5) + agent.is_control = 1 + agent.control = 5 + agent.save(update_fields=['is_control', 'is_core_running', 'latest_time']) + result_cmd = agent_status.get(agent.control, { + "key": "无下发指令", + "value": "notcmd" + }).get("value") + # print(result_cmd) + return R.success(data=result_cmd) diff --git a/dongtai_protocol/views/except_action.py b/dongtai_protocol/views/except_action.py new file mode 100644 index 000000000..0d81a5270 --- /dev/null +++ b/dongtai_protocol/views/except_action.py @@ -0,0 +1,52 @@ +import logging +import time + +from dongtai_common.models.agent import IastAgent +from dongtai_common.endpoint import OpenApiEndPoint, R +from dongtai_protocol.decrypter import parse_data +from drf_spectacular.utils import extend_schema +from dongtai_common.models.server import IastServer +from django.utils.translation import gettext_lazy as _ +from urllib.parse import urlparse, urlunparse +from dongtai_web.views.project_add import is_ip +from rest_framework.viewsets import ViewSet +logger = logging.getLogger('dongtai.openapi') + + +class AgentActionV2EndPoint(OpenApiEndPoint,ViewSet): + + @extend_schema(description='Agent Update, Data is Gzip', + responses=[{ + 204: None + }], + methods=['POST']) + def actual_running_status(self, request): + try: + param = parse_data(request.read()) + agent_id = int(param.get('agentId', None)) + actual_running_status = int(param.get('actualRunningStatus', None)) + state_status = int(param.get('stateStatus', None)) + except Exception as e: + logger.error(e, exc_info=True) + return R.failure(msg="参数错误") + agent = IastAgent.objects.filter(pk=agent_id).first() + if not agent: + return R.failure(msg=_("Agent not found")) + agent.actual_running_status = actual_running_status + agent.state_status = state_status + return R.success(msg="success update") + + @extend_schema(description='Agent Update, Data is Gzip', + responses=[{ + 204: None + }], + methods=['POST']) + def except_running_status(self, request): + if 'agentId' not in request.GET.keys(): + return R.failure() + agent_id = request.GET['agentId'] + agent = IastAgent.objects.filter(pk=agent_id).first() + if not agent: + return R.failure(msg=_("Agent not found")) + data = {"exceptRunningStatus": agent.except_running_status} + return R.success(msg="success update", data=data) diff --git a/dongtai_protocol/views/health.py b/dongtai_protocol/views/health.py new file mode 100644 index 000000000..9fa3b5d5b --- /dev/null +++ b/dongtai_protocol/views/health.py @@ -0,0 +1,107 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : health +# @created : Wednesday Aug 25, 2021 16:19:13 CST +# +# @description : +###################################################################### + +import logging +from dongtai_common.endpoint import UserEndPoint, R +from drf_spectacular.utils import extend_schema + +from dongtai_protocol.utils import OssDownloader +import oss2 +from oss2.exceptions import RequestError +import requests +from requests.exceptions import ConnectionError, ConnectTimeout +import json +from dongtai_protocol.utils import checkossstatus + +logger = logging.getLogger("dongtai.openapi") + +def checkenginestaus(): + import redis + mock_data = { + "dongtai_engine": { + "status": 1 + }, + "engine_monitoring_indicators": [ + { + "key": "dongtai-replay-vul-scan", + "value": 0, + "name": "dongtai-replay-vul-scan" + }, + { + "key": "dongtai_method_pool_scan", + "value": 0, + "name": "dongtai-method-pool-scan" + }, + ], + } + # 读取数据库中的redis键,然后查找队列大小 + from dongtai_common.models.engine_monitoring_indicators import IastEnginMonitoringIndicators + try: + monitor_models = IastEnginMonitoringIndicators.objects.all() + if monitor_models.values('id').count() > 0: + from dongtai_conf import settings + redis_cli = redis.StrictRedis( + host=settings.config.get("redis", 'host'), + password=settings.config.get("redis", 'password'), + port=settings.config.get("redis", 'port'), + db=settings.config.get("redis", 'db'), + ) + + monitor_models = monitor_models.values('key', 'name', 'name_en', 'name_zh') + mock_data['engine_monitoring_indicators'] = list() + for monitor_model in monitor_models: + mock_data['engine_monitoring_indicators'].append({ + 'key': monitor_model['key'], + 'name': monitor_model['name'], + 'name_en': monitor_model['name_en'], + 'name_zh': monitor_model['name_zh'], + 'value': redis_cli.llen(monitor_model['key']) + }) + except Exception as e: + logger.info(e) + return R.success(data=mock_data) + return R.success(data=mock_data) + +def _checkenginestatus(): + try: + resp = checkenginestaus() + resp = json.loads(resp.content) + resp = resp.get("data", None) + except (ConnectionError, ConnectTimeout): + return False, None + except Exception as e: + logger.info("HealthView_checkenginestatus:{}".format(e)) + return False, None + return True, resp + +class HealthView(UserEndPoint): + @extend_schema( + description='Check OpenAPI Service Status', + responses=R, + methods=['GET'] + ) + def get(self, request): + oss_status, _ = checkossstatus() + statusmap = {True: 1, False: 0} + engine_status, engine_resp = _checkenginestatus() + data = { + "dongtai_openapi": { + "status": 1 + }, + "oss": { + "status": statusmap[oss_status] + } + } + if engine_status and engine_resp is not None: + data.update(engine_resp) + else: + data.update({ + "dongtai_engine": 0, + "engine_monitoring_indicators": [] + }) + return R.success(data=data) diff --git a/dongtai_protocol/views/health_oss.py b/dongtai_protocol/views/health_oss.py new file mode 100644 index 000000000..434aa95af --- /dev/null +++ b/dongtai_protocol/views/health_oss.py @@ -0,0 +1,29 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : health_oss +# @created : Thursday Aug 26, 2021 10:37:17 CST +# +# @description : +###################################################################### + +import oss2 +from drf_spectacular.utils import extend_schema +from oss2.exceptions import RequestError +import logging +from dongtai_protocol.utils import checkossstatus, STATUSMAP +from dongtai_common.endpoint import OpenApiEndPoint, R, UserEndPoint + +logger = logging.getLogger("dongtai.openapi") + + +class OSSHealthView(UserEndPoint): + @extend_schema( + description='Check OSS Health', + responses=R, + methods=['GET'] + ) + def get(self, request): + oss_status, _ = checkossstatus() + data = {"oss": {"status": 1}} + #data = {"oss": {"status": STATUSMAP[oss_status]}} + return R.success(data=data) diff --git a/dongtai_protocol/views/hook_profiles.py b/dongtai_protocol/views/hook_profiles.py new file mode 100644 index 000000000..a6479dcbe --- /dev/null +++ b/dongtai_protocol/views/hook_profiles.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/11/24 下午9:16 +# software: PyCharm +# project: lingzhi-webapi +import logging + +from dongtai_common.models.hook_strategy import HookStrategy +from dongtai_common.models.hook_type import HookType +from drf_spectacular.utils import extend_schema +from rest_framework.request import Request +from dongtai_common.models.strategy import IastStrategyModel +from dongtai_common.utils import const +from dongtai_common.endpoint import OpenApiEndPoint, R +from django.db.models import (Prefetch, OuterRef, Subquery) +# note: 当前依赖必须保留,否则无法通过hooktype反向查找策略 +from dongtai_protocol.api_schema import DongTaiParameter + +logger = logging.getLogger("django") +JAVA = 1 +LANGUAGE_DICT = {'JAVA': 1, 'PYTHON': 2, 'PHP': 3, 'G0': 4} + +class HookProfilesEndPoint(OpenApiEndPoint): + name = "api-v1-profiles" + description = "获取HOOK策略" + + @staticmethod + def get_profiles(user=None, language_id=JAVA): + profiles = list() + hook_types = HookType.objects.filter(vul_strategy__state='enable', + vul_strategy__user_id__in=set( + [1, user.id]), + language_id=language_id, + enable=const.HOOK_TYPE_ENABLE, + type__in=(3, 4)) + hook_types_a = HookType.objects.filter(language_id=language_id, + enable=const.HOOK_TYPE_ENABLE, + type__in=(1, 2)) + for hook_type in list(hook_types) + list(hook_types_a): + strategy_details = list() + + profiles.append({ + 'type': hook_type.type, + 'enable': hook_type.enable, + 'value': hook_type.value, + 'details': strategy_details + }) + strategies = hook_type.strategies.filter( + created_by__in=[1, user.id] if user else [1], + enable=const.HOOK_TYPE_ENABLE).values() + for strategy in strategies: + strategy_details.append({ + "source": strategy.get("source"), + "track": strategy.get("track"), + "target": strategy.get("target"), + "value": strategy.get("value"), + "inherit": strategy.get("inherit") + }) + return profiles + + @extend_schema( + description='Pull Agent Engine Hook Rule', + parameters=[ + DongTaiParameter.LANGUAGE, + ], + responses=R, + methods=['GET'] + ) + def get(self, request): + user = request.user + + language = request.query_params.get('language') + language_id = LANGUAGE_DICT.get(language, + None) if language is not None else None + language_id = JAVA if language_id is None and language is None else language_id + profiles = self.get_profiles(user, language_id) + + return R.success(data=profiles) + +# def put(self, request): +# pass +# +# def post(self): +# pass + + +if __name__ == '__main__': + strategy_count = HookStrategy.objects.count() diff --git a/apiserver/views/properties.py b/dongtai_protocol/views/properties.py similarity index 79% rename from apiserver/views/properties.py rename to dongtai_protocol/views/properties.py index e6eef8d3f..f0dae4e29 100644 --- a/apiserver/views/properties.py +++ b/dongtai_protocol/views/properties.py @@ -6,13 +6,12 @@ # project: webapi import logging -from dongtai_models.models.agent import IastAgent -from dongtai_models.models.agent_properties import IastAgentProperties +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.agent_properties import IastAgentProperties from rest_framework.request import Request -from AgentServer.base import R -from apiserver.base.openapi import OpenApiEndPoint -from apiserver.serializers.agent_properties import AgentPropertiesSerialize +from dongtai_common.endpoint import OpenApiEndPoint, R +from dongtai_protocol.serializers.agent_properties import AgentPropertiesSerialize logger = logging.getLogger("django") diff --git a/dongtai_protocol/views/report_upload.py b/dongtai_protocol/views/report_upload.py new file mode 100644 index 000000000..4b5a17e0e --- /dev/null +++ b/dongtai_protocol/views/report_upload.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2021/1/12 下午7:45 +# software: PyCharm +# project: lingzhi-agent-server + +from dongtai_common.endpoint import OpenApiEndPoint, R +from drf_spectacular.utils import extend_schema +import time,logging +from dongtai_protocol.api_schema import DongTaiParameter +from dongtai_protocol.decrypter import parse_data +from dongtai_protocol.report.report_handler_factory import ReportHandler +from rest_framework.views import APIView +from django.http import JsonResponse + +logger = logging.getLogger('dongtai.openapi') + +class ReportUploadEndPoint(OpenApiEndPoint): + name = "api-v1-report-upload" + description = "agent上传报告" + + @extend_schema( + description='Pull Agent Engine Hook Rule', + parameters=[ + DongTaiParameter.LANGUAGE, + ], + responses=R, + methods=['GET'] + ) + def post(self, request): + try: + report = parse_data(request.read()) + data = ReportHandler.handler(report, request.user) + return R.success(msg="report upload success.", data=data) + except Exception as e: + logger.error(f"report upload failed, reason: {e}", exc_info=True) + return R.failure(msg="report upload failed") diff --git a/dongtai_protocol/views/startuptime.py b/dongtai_protocol/views/startuptime.py new file mode 100644 index 000000000..f4424bbc0 --- /dev/null +++ b/dongtai_protocol/views/startuptime.py @@ -0,0 +1,55 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : startuptime +# @created : 星期三 10月 20, 2021 16:07:48 CST +# +# @description : +###################################################################### + + + +import logging + +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.agent_properties import IastAgentProperties +from rest_framework.request import Request + +from dongtai_common.endpoint import OpenApiEndPoint, R +from dongtai_protocol.serializers.agent_properties import AgentPropertiesSerialize +import time +from dongtai_protocol.api_schema import DongTaiParameter, DongTaiAuth +from drf_spectacular.utils import extend_schema +from dongtai_protocol.decrypter import parse_data +from django.http.request import QueryDict + +logger = logging.getLogger("django") + + +class StartupTimeEndPoint(OpenApiEndPoint): + name = "api-v1-startuptime" + + @extend_schema(description='Agent Limit', auth=[DongTaiAuth.TOKEN]) + def post(self, request: Request): + agent_id = request.data.get('agentId', None) + startup_time = request.data.get('startupTime', None) + agent = IastAgent.objects.filter(pk=agent_id).first() + if agent: + agent.startup_time = startup_time + agent.save(update_fields=['startup_time']) + return R.success(data=None) + logger.error('agent not found') + return R.failure(data=None) + + +class StartupTimeGzipEndPoint(StartupTimeEndPoint): + name = "api-v1-startuptime" + + @extend_schema(description='Agent Limit', auth=[DongTaiAuth.TOKEN]) + def post(self, request: Request): + try: + param = parse_data(request.read()) + request._full_data = param + return super().post(request) + except Exception as e: + logger.info(e) + return R.failure(data=None) diff --git a/dongtai_web/__init__.py b/dongtai_web/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/apiserver/report/handler/__init__.py b/dongtai_web/account/__init__.py similarity index 63% rename from apiserver/report/handler/__init__.py rename to dongtai_web/account/__init__.py index db4c69f92..e1f6a6efe 100644 --- a/apiserver/report/handler/__init__.py +++ b/dongtai_web/account/__init__.py @@ -1,6 +1,5 @@ #!/usr/bin/env python #-*- coding:utf-8 -*- # author:owefsad -# datetime:2020/10/23 11:54 # software: PyCharm -# project: webapi +# project: lingzhi-webapi diff --git a/dongtai_web/admin.py b/dongtai_web/admin.py new file mode 100644 index 000000000..846f6b406 --- /dev/null +++ b/dongtai_web/admin.py @@ -0,0 +1 @@ +# Register your models here. diff --git a/dongtai_web/aggr_vul/aggr_vul_list.py b/dongtai_web/aggr_vul/aggr_vul_list.py new file mode 100644 index 000000000..5f950c0ac --- /dev/null +++ b/dongtai_web/aggr_vul/aggr_vul_list.py @@ -0,0 +1,446 @@ +# 按类型获取 组件漏洞 应用漏洞列表 +from elasticsearch_dsl import Q, Search +from dongtai_common.models.asset_vul import IastAssetVulnerabilityDocument +from dongtai_common.common.utils import make_hash +from dongtai_conf import settings +from django.core.cache import cache +from dongtai_common.models.vul_level import IastVulLevel +from dongtai_common.models.project import IastProject +from dongtai_common.models.program_language import IastProgramLanguage +from dongtai_common.models.vulnerablity import IastVulnerabilityStatus +from dongtai_common.models.strategy import IastStrategyModel +from elasticsearch_dsl import A +from elasticsearch import Elasticsearch +import json +import time +import logging +from dongtai_common.endpoint import R +from django.forms import model_to_dict +from dongtai_common.endpoint import UserEndPoint + +from dongtai_web.utils import extend_schema_with_envcheck +from dongtai_web.serializers.aggregation import AggregationArgsSerializer +from rest_framework.serializers import ValidationError +from django.utils.translation import gettext_lazy as _ +from dongtai_web.aggregation.aggregation_common import getAuthUserInfo, turnIntListOfStr, getAuthBaseQuery, auth_user_list_str +import pymysql +from dongtai_web.serializers.vul import VulSerializer +from dongtai_common.models.asset_vul import IastAssetVul, IastVulAssetRelation, IastAssetVulType, IastAssetVulTypeRelation +from dongtai_common.models import AGGREGATION_ORDER, LANGUAGE_ID_DICT, SHARE_CONFIG_DICT, APP_LEVEL_RISK, LICENSE_RISK, \ + SCA_AVAILABILITY_DICT +from dongtai_conf.settings import ELASTICSEARCH_STATE +from typing import List + + +logger = logging.getLogger("django") +INT_LIMIT: int = 2**64 - 1 + + +def convert_cwe(cwe: [List, str]) -> str: + if isinstance(cwe, list): + if len(cwe) > 0: + return cwe[0].replace("CWE-", "") + return "" + elif isinstance(cwe, str): + return cwe.replace("CWE-", "") + return "" + + +def get_cve_from_cve_nums(cve_nums: dict) -> str: + cwe = cve_nums.get("cwe", []) + return convert_cwe(cwe) + + +class GetAggregationVulList(UserEndPoint): + name = "api-v1-aggregation-vul-list" + description = _("New application") + + @extend_schema_with_envcheck( + request=AggregationArgsSerializer, + tags=[_('VulList')], + summary=_('Vul List Select'), + description=_( + "select sca vul and app vul by keywords" + ), + ) + # 组件漏洞 列表 + def post(self, request): + ser = AggregationArgsSerializer(data=request.data) + keywords = "" + join_table = "" + query_condition = " where rel.is_del=0 " + try: + if ser.is_valid(True): + page_size = ser.validated_data['page_size'] + page = ser.validated_data['page'] + begin_num = (page - 1) * page_size + end_num = page * page_size + # should refact into serilizer + if begin_num > INT_LIMIT or end_num > INT_LIMIT: + return R.failure() + keywords = ser.validated_data.get("keywords", "") + es_query = {} + if keywords: + keywords = pymysql.converters.escape_string(keywords) + keywords = "+" + keywords + es_query['search_keyword'] = ser.validated_data.get( + "keywords", "") + order_type = AGGREGATION_ORDER.get( + str(ser.validated_data['order_type']), "vul.level_id") + order_type_desc = "desc" if ser.validated_data[ + 'order_type_desc'] else "asc" + es_dict = { + "1": "level_id", + "2": "create_time", + "3": "vul_update_time" + } + order_type_es = es_dict.get( + str(ser.validated_data['order_type']), "level_id") + es_query['order'] = {order_type_es: {"order": order_type_desc}} + # 从项目列表进入 绑定项目id + if ser.validated_data.get("bind_project_id", 0): + query_condition = query_condition + \ + " and asset.project_id={} ".format(str(ser.validated_data.get("bind_project_id"))) + es_query['bind_project_id'] = ser.validated_data.get( + "bind_project_id") + # 项目版本号 + if ser.validated_data.get("project_version_id", 0): + query_condition = query_condition + \ + " and asset.project_version_id={} ".format(str(ser.validated_data.get("project_version_id"))) + es_query['project_version_id'] = ser.validated_data.get( + "project_version_id") + # 按项目筛选 + if ser.validated_data.get("project_id_str", ""): + project_str = turnIntListOfStr( + ser.validated_data.get( + "project_id_str", ""), "asset.project_id") + query_condition = query_condition + project_str + es_query['project_ids'] = turnIntListOfStr( + ser.validated_data.get("project_id_str")) + # 按语言筛选 + if ser.validated_data.get("language_str", ""): + language_str = ser.validated_data.get("language_str", "") + type_list = language_str.split(",") + # 安全校验,强制转int + type_list = list(map(int, type_list)) + type_int_list = list(map(str, type_list)) + lang_str = [] + for one_type in type_int_list: + lang_str.append( + "'" + + LANGUAGE_ID_DICT.get( + one_type, + "") + "'") + type_int_str = ",".join(lang_str) + language_str_change = " and {} in ({}) ".format( + "vul.package_language", type_int_str) + query_condition = query_condition + language_str_change + language_id_list = turnIntListOfStr( + ser.validated_data.get("language_str", "")) + language_arr = [] + for lang in language_id_list: + language_arr.append(LANGUAGE_ID_DICT.get(str(lang))) + es_query['language_ids'] = language_arr + # 漏洞类型筛选 弃用 + if ser.validated_data.get("hook_type_id_str", ""): + vul_type_str = turnIntListOfStr(ser.validated_data.get( + "hook_type_id_str", ""), "typeR.asset_vul_type_id") + query_condition = query_condition + vul_type_str + join_table = join_table + \ + "left JOIN iast_asset_vul_type_relation as typeR on vul.id=typeR.asset_vul_id " + # 漏洞等级筛选 + if ser.validated_data.get("level_id_str", ""): + status_str = turnIntListOfStr( + ser.validated_data.get( + "level_id_str", ""), "vul.level_id") + query_condition = query_condition + status_str + es_query['level_ids'] = turnIntListOfStr( + ser.validated_data.get("level_id_str")) + # 可利用性 + if ser.validated_data.get("availability_str", ""): + availability_arr = turnIntListOfStr( + ser.validated_data.get("availability_str", "")) + # there is a bug, and it has been fix in validator. + # in fact, a more reasonable approch is use serializer to + # handle the cover in prepose. + if 3 in availability_arr: + query_condition = query_condition + " and vul.have_article=0 and vul.have_poc=0 " + else: + if 1 in availability_arr: + query_condition = query_condition + " and vul.have_poc=1 " + if 2 in availability_arr: + query_condition = query_condition + " and vul.have_article=1 " + es_query['availability_ids'] = turnIntListOfStr( + ser.validated_data.get("availability_str")) + + except ValidationError as e: + return R.failure(data=e.detail) + user_auth_info = auth_user_list_str( + user=request.user, user_table="asset") + query_condition = query_condition + \ + user_auth_info.get("user_condition_str") + + if keywords: + query_base = "SELECT DISTINCT(vul.id),vul.*, " \ + " MATCH( `vul`.`vul_name`,`vul`.`aql`,`vul`.`vul_serial` ) AGAINST ( %s IN NATURAL LANGUAGE MODE ) AS `score`" \ + " from iast_asset_vul_relation as rel " \ + "left JOIN iast_asset_vul as vul on rel.asset_vul_id=vul.id " \ + "left JOIN iast_asset as asset on rel.asset_id=asset.id " + join_table + query_condition + + else: + query_base = "SELECT DISTINCT(vul.id),vul.* from iast_asset_vul_relation as rel " \ + "left JOIN iast_asset_vul as vul on rel.asset_vul_id=vul.id " \ + "left JOIN iast_asset as asset on rel.asset_id=asset.id " + join_table + query_condition + + # mysql 全文索引下,count不准确,等于全部数量 + new_order = order_type + " " + order_type_desc + if order_type == "vul.level_id": + if order_type_desc == "desc": + new_order = new_order + ", vul.update_time desc" + else: + new_order = new_order + ", vul.update_time_desc" + + if keywords: + all_vul = IastAssetVul.objects.raw( + query_base + " order by score desc, %s limit %s,%s; " % + (new_order, begin_num, end_num), [keywords]) + else: + all_vul = IastAssetVul.objects.raw( + query_base + " order by %s limit %s,%s; " % + (new_order, begin_num, end_num)) + if ELASTICSEARCH_STATE: + all_vul = get_vul_list_from_elastic_search( + request.user.id, + page_size=ser.validated_data['page_size'], + page=ser.validated_data['page'], + **es_query) + content_list = [] + + if all_vul: + vul_ids = [] + # print(all_vul.query.__str__()) + for item in all_vul: + # 拼写 漏洞类型 + # 拼写 漏洞编号 + availability_arr = [] + if item.have_poc: + availability_arr.append(SCA_AVAILABILITY_DICT.get("1")) + if item.have_article: + availability_arr.append(SCA_AVAILABILITY_DICT.get("2")) + if not availability_arr: + availability_arr.append(SCA_AVAILABILITY_DICT.get("3")) + availability_str = ",".join(availability_arr) + cur_data = { + "id": item.id, + "vul_name": item.vul_name, + "create_time": item.create_time, + "level_id": item.level_id, + "level_name": APP_LEVEL_RISK.get(str(item.level_id), ""), + "license": item.license, + "license_level": item.license_level, + "license_risk_name": LICENSE_RISK.get(str(item.license_level), ""), + "vul_cve_nums": item.vul_cve_nums, + "package_name": item.package_name, + "package_safe_version": item.package_safe_version, + "package_latest_version": item.package_latest_version, + "package_language": item.package_language, + # "type_id": item.type_id, + "availability_str": availability_str, + # "type_name": item.type_name, + } + cwe = get_cve_from_cve_nums(cur_data["vul_cve_nums"]) + if cwe: + cur_data['vul_cve_nums']['cwe_num'] = cwe + vul_ids.append(item.id) + content_list.append(cur_data) + # 追加 用户 权限 + base_relation = IastVulAssetRelation.objects.filter( + asset_vul_id__in=vul_ids, + is_del=0, + asset__user_id__in=user_auth_info['user_list'], + asset__project_id__gt=0) + # base_relation = getAuthUserInfo(request.user,base_relation) + pro_info = base_relation.values( + "asset_vul_id", "asset__project_id", "asset__project_name", + "asset__project_version__version_name", + "asset__agent__server__container").distinct() + pro_arr = {} + for item in pro_info: + vul_id = item['asset_vul_id'] + item['server_type'] = VulSerializer.split_container_name( + item['asset__agent__server__container']) + del item['asset_vul_id'] + if pro_arr.get(vul_id, []): + pro_arr[vul_id].append(item) + else: + pro_arr[vul_id] = [item] + # 根据vul_id获取对应的漏洞类型 一对多 + type_info = IastAssetVulTypeRelation.objects.filter( + asset_vul_id__in=vul_ids).values( + "asset_vul_id", "asset_vul_type__name") + type_arr = {} + for item in type_info: + if not type_arr.get(item['asset_vul_id'], []): + type_arr[item['asset_vul_id']] = [ + item['asset_vul_type__name']] + elif item['asset_vul_type__name'] not in type_arr[item['asset_vul_id']]: + type_arr[item['asset_vul_id']].append( + item['asset_vul_type__name']) + for row in content_list: + row["pro_info"] = pro_arr.get(row['id'], []) + row['type_name'] = ",".join(type_arr.get(row['id'], [])) + return R.success(data={ + 'messages': content_list, + 'page': { + "page_size": page_size, + "cur_page": page + } + }, ) + + +def get_vul_list_from_elastic_search(user_id, + project_ids=[], + project_version_ids=[], + level_ids=[], + language_ids=[], + availability_ids=[], + search_keyword="", + page=1, + page_size=10, + bind_project_id=0, + project_version_id=0, + order={}): + user_id_list = [user_id] + auth_user_info = auth_user_list_str(user_id=user_id) + user_id_list = auth_user_info['user_list'] + must_query = [ + Q('terms', asset_user_id=user_id_list), + Q('terms', asset_vul_relation_is_del=[0]), + Q('range', asset_project_id={'gt': 0}), + ] + order_list = [ + 'update_time', '-asset_vul_relation_id', "asset_vul_id" + ] + if order: + order_list.insert(0, order) + if bind_project_id: + must_query.append(Q('terms', asset_project_id=[bind_project_id])) + if project_version_id: + must_query.append( + Q('terms', asset_project_version_id=[project_version_id])) + if project_ids: + must_query.append(Q('terms', asset_project_id=project_ids)) + if project_version_ids: + must_query.append( + Q('terms', asset_project_version_id=project_version_ids)) + if level_ids: + must_query.append(Q('terms', level_id=level_ids)) + if language_ids: + must_query.append( + Q('terms', **{"package_language.keyword": language_ids})) + if availability_ids: + sub_bool_query = [] + for availability in availability_ids: + if availability == 3: + sub_bool_query.append(Q('terms', have_article=[0])) + sub_bool_query.append(Q('terms', have_poc=[0])) + elif availability == 1: + sub_bool_query.append(Q('terms', have_poc=[1])) + elif availability == 2: + sub_bool_query.append(Q('terms', have_article=[1])) + must_query.append(Q('bool', should=sub_bool_query)) + + if search_keyword: + must_query.append( + Q('multi_match', + query=search_keyword, + fields=["vul_name", "vul_serial", "aql"])) + hashkey = make_hash([ + user_id, project_ids, project_version_ids, level_ids, + language_ids, search_keyword, page_size, bind_project_id, + project_version_id + ]) + after_table = cache.get(hashkey, {}) + after_key = after_table.get(page, None) + extra_dict = {} + if after_key: + sub_after_must_query = [] + sub_after_should_query = [] + for info, value in zip(order_list, after_key): + field = '' + opt = '' + if isinstance(info, dict): + field = list(info.keys())[0] + if info[field]['order'] == 'desc': + opt = 'lt' + else: + opt = 'gt' + if isinstance(info, str): + if info.startswith('-'): + field = info[1::] + opt = 'lt' + else: + field = info + opt = 'gt' + if info == "asset_vul_id": + sub_after_must_query.append( + Q('range', **{field: {opt: value}})) + else: + sub_after_should_query.append( + Q('range', **{field: { + opt: value + }})) + must_query.append( + Q('bool', must=sub_after_must_query, + should=sub_after_should_query, minimum_should_match=1)) + #extra_dict['search_after'] = after_key + a = Q('bool', + must=must_query) + res = IastAssetVulnerabilityDocument.search().query(a).extra( + collapse={ + "field": "asset_vul_id" + }).extra(**extra_dict).sort(*order_list)[:page_size].using( + Elasticsearch(settings.ELASTICSEARCH_DSL['default']['hosts'])) + resp = res.execute() + vuls = [i._d_ for i in list(resp)] + if resp.hits: + afterkey = resp.hits[-1].meta['sort'] + after_table[page + 1] = afterkey + cache.set(hashkey, after_table) + keymaps = {"asset_vul_id": "id"} + for i in vuls: + for k, v in keymaps.items(): + i[v] = i[k] + del i[k] + from collections import namedtuple + import json + namedtuple_vuls = [] + if vuls: + keys = [ + 'vul_cve_nums', 'asset_project_version_id', 'license_level', + 'cve_code', 'asset_id', 'asset_project_id', 'vul_publish_time', + 'update_time_desc', 'package_language', 'have_poc', 'search_title', + 'package_safe_version', 'asset_user_id', 'package_version', + 'vul_update_time', 'update_time', 'asset_vul_relation_id', + 'vul_name', 'have_article', 'level_id', 'vul_detail', + 'asset_agent_id', 'package_name', 'vul_serial', 'create_time', + 'package_hash', 'license', 'cve_id', 'aql', + 'package_latest_version', 'asset_vul_relation_is_del', 'id' + ] + # filter(lambda x: x != '@timestamp', vuls[0].keys()) + AssetVul = namedtuple('AssetVul', keys) + for i in vuls: + i['vul_cve_nums'] = json.loads(i['vul_cve_nums']) + if '@timestamp' in i.keys(): + del i['@timestamp'] + for key in keys: + if key not in i.keys(): + i[key] = None + i['id'] = i['id'][0] + dic = {} + for k, v in i.items(): + if k in keys: + dic[k] = v + asset_vul = AssetVul(**dic) + namedtuple_vuls.append(asset_vul) + return namedtuple_vuls diff --git a/dongtai_web/aggr_vul/aggr_vul_summary.py b/dongtai_web/aggr_vul/aggr_vul_summary.py new file mode 100644 index 000000000..f9f9ccd78 --- /dev/null +++ b/dongtai_web/aggr_vul/aggr_vul_summary.py @@ -0,0 +1,327 @@ +from dongtai_conf.settings import ELASTICSEARCH_STATE +import copy + +from dongtai_common.endpoint import UserEndPoint +from dongtai_web.utils import extend_schema_with_envcheck +from dongtai_web.serializers.aggregation import AggregationArgsSerializer +from django.utils.translation import gettext_lazy as _ +from dongtai_common.endpoint import R +from dongtai_web.aggregation.aggregation_common import auth_user_list_str +from dongtai_common.models import LANGUAGE_DICT +from rest_framework.serializers import ValidationError +from django.db import connection +from dongtai_common.common.utils import cached_decorator +from dongtai_common.models import APP_LEVEL_RISK + + +def get_annotate_sca_common_data(user_id: int, pro_condition: str): + return get_annotate_sca_base_data(user_id, pro_condition) + +# @cached_decorator(random_range=(2 * 60 * 60, 2 * 60 * 60), use_celery_update=True) + + +def get_annotate_sca_cache_data(user_id: int, pro_condition: str): + return get_annotate_sca_base_data(user_id, pro_condition) + + +def get_annotate_sca_base_data(user_id: int, pro_condition: str): + base_summary = { + "level": [], + "availability": { + "have_poc": { + "name": "存在利用代码", + "num": 0, + "id": 1 + }, + "have_article": { + "name": "存在分析文章", + "num": 0, + "id": 2 + }, + "no_availability": { + "name": "无利用信息", + "num": 0, + "id": 3 + }, + }, + "hook_type": [], + "language": [], + "project": [] + } + # auth_condition = getAuthBaseQuery(user_id=user_id, table_str="asset") + user_auth_info = auth_user_list_str(user_id=user_id, user_table="asset") + query_condition = " where rel.is_del=0 and asset.project_id>0 " + \ + user_auth_info.get("user_condition_str") + pro_condition + base_join = "left JOIN iast_asset_vul_relation as rel on rel.asset_vul_id=vul.id " \ + "left JOIN iast_asset as asset on rel.asset_id=asset.id " + # level_join = "left JOIN iast_vul_level as level on level.id=vul.level_id " + + with connection.cursor() as cursor: + count_level_query = "SELECT vul.level_id,count( DISTINCT(vul.id )) from iast_asset_vul as vul " \ + + base_join + query_condition + " group by vul.level_id " + result_summary = base_summary + + # level_summary = IastVulLevel.objects.raw(count_level_query) + cursor.execute(count_level_query) + level_summary = cursor.fetchall() + if level_summary: + for item in level_summary: + level_id, count = item + result_summary['level'].append({ + "name": APP_LEVEL_RISK.get(str(level_id), "None"), + "num": count, + "id": level_id + }) + + # 存在利用代码 + count_poc_query = "SELECT count(DISTINCT( vul.id )) as have_poc_count from iast_asset_vul as vul " \ + + base_join + query_condition + " and vul.have_poc=1 " + cursor.execute(count_poc_query) + poc_summary = cursor.fetchone() + if poc_summary: + result_summary['availability']['have_poc']['num'] = poc_summary[0] + # 存在分析文章 + count_article_query = "SELECT count(DISTINCT( vul.id )) as have_article_count from iast_asset_vul as vul " \ + + base_join + query_condition + " and vul.have_article=1 " + cursor.execute(count_article_query) + article_summary = cursor.fetchone() + if article_summary: + result_summary['availability']['have_article']['num'] = article_summary[0] + # 无利用信息 + count_no_availability_query = "SELECT count(DISTINCT( vul.id )) as no_availability from iast_asset_vul as vul " \ + + base_join + query_condition + " and vul.have_article=0 and vul.have_poc=0 " + cursor.execute(count_no_availability_query) + no_availability_summary = cursor.fetchone() + if no_availability_summary: + result_summary['availability']['no_availability']['num'] = no_availability_summary[0] + + count_language_query = "SELECT vul.package_language, count( DISTINCT(vul.id )) AS count_package_language from iast_asset_vul as vul " \ + + base_join + query_condition + " group by vul.package_language " + cursor.execute(count_language_query) + language_summary = cursor.fetchall() + lang_arr = copy.copy(LANGUAGE_DICT) + lang_key = lang_arr.keys() + if language_summary: + for item in language_summary: + package_language, count_package_language = item + result_summary['language'].append({ + "id": lang_arr.get(str(package_language)), + "num": count_package_language, + "name": package_language + }) + if package_language in lang_key: + del lang_arr[package_language] + if lang_arr: + for item in lang_arr.keys(): + result_summary["language"].append( + { + "id": LANGUAGE_DICT.get(item), + "num": 0, + "name": item + } + ) + + # 漏洞类型 统计 + vul_type_join = "left JOIN iast_asset_vul_type_relation as typeR on vul.id=typeR.asset_vul_id " \ + "left JOIN iast_asset_vul_type as typeInfo on typeInfo.id=typeR.asset_vul_type_id " + count_vul_type_query = "SELECT typeR.asset_vul_type_id as vul_type_id, count( DISTINCT(vul.id )) AS count_vul_type, " \ + "typeInfo.name as type_name from iast_asset_vul as vul " \ + + base_join + vul_type_join + query_condition + " group by typeR.asset_vul_type_id " + cursor.execute(count_vul_type_query) + type_summary = cursor.fetchall() + if type_summary: + for item in type_summary: + vul_type_id, count_vul_type, type_name = item + result_summary['hook_type'].append({ + "id": vul_type_id, + "num": count_vul_type, + "name": type_name + }) + # 归属项目 统计 + count_project_query = "SELECT asset.project_id, count( DISTINCT(vul.id )), " \ + " asset.project_name from iast_asset_vul as vul " \ + + base_join + query_condition + " and asset.project_id>0 group by asset.project_id,asset.project_name " + cursor.execute(count_project_query) + project_summary = cursor.fetchall() + if project_summary: + for item in project_summary: + project_id, count_project, project_name = item + result_summary['project'].append({ + "id": project_id, + "num": count_project, + "name": project_name + }) + + return result_summary + + +def get_annotate_data_es( + user_id, + bind_project_id=None, + project_version_id=None): + from dongtai_common.models.vulnerablity import IastVulnerabilityDocument + from elasticsearch_dsl import Q, Search + from elasticsearch import Elasticsearch + from elasticsearch_dsl import A + from dongtai_common.models.strategy import IastStrategyModel + from dongtai_common.models.vulnerablity import IastVulnerabilityStatus + from dongtai_common.models.program_language import IastProgramLanguage + from dongtai_common.models.project import IastProject + from dongtai_common.models.vul_level import IastVulLevel + from dongtai_common.models.asset_vul import IastAssetVulnerabilityDocument + from dongtai_conf import settings + from dongtai_web.utils import dict_transfrom + user_id_list = [user_id] + auth_user_info = auth_user_list_str(user_id=user_id) + user_id_list = auth_user_info['user_list'] + must_query = [ + Q('terms', asset_user_id=user_id_list), + Q('terms', asset_vul_relation_is_del=[0]), + Q('range', asset_project_id={'gt': 0}), + ] + if bind_project_id: + must_query.append(Q('terms', asset_project_id=[bind_project_id])) + if project_version_id: + must_query.append( + Q('terms', asset_project_version_id=[project_version_id])) + search = IastAssetVulnerabilityDocument.search().query( + Q('bool', must=must_query))[:0] + buckets = { + 'level': A('terms', field='level_id', size=2147483647), + 'project': A('terms', field='asset_project_id', size=2147483647), + "language": A('terms', + field='package_language.keyword', + size=2147483647) + } + for k, v in buckets.items(): + search.aggs.bucket(k, v).bucket("distinct_asset_vul", + A("cardinality", field="asset_vul_id")) + search.aggs.bucket('poc', A('terms', field='have_poc', + size=2147483647)).bucket( + 'article', + A('terms', + field='have_article', + size=2147483647)) + res = search.using(Elasticsearch( + settings.ELASTICSEARCH_DSL['default']['hosts'])).execute() + dic = {} + for key in buckets.keys(): + origin_buckets = res.aggs[key].to_dict()['buckets'] + for i in origin_buckets: + i['id'] = i['key'] + del i['key'] + i['num'] = i['distinct_asset_vul']["value"] + del i['distinct_asset_vul'] + del i['doc_count'] + if key == 'language': + for i in origin_buckets: + i['name'] = i['id'] + del i['id'] + language_names = [i['name'] for i in origin_buckets] + for i in origin_buckets: + i['id'] = LANGUAGE_DICT.get(i['name']) + for language_key in LANGUAGE_DICT.keys(): + if language_key not in language_names: + origin_buckets.append({ + 'id': LANGUAGE_DICT[language_key], + 'name': language_key, + 'num': 0, + }) + if key == 'project': + project_ids = [i['id'] for i in origin_buckets] + project = IastProject.objects.filter(pk__in=project_ids).values( + 'id', 'name').all() + project_dic = dict_transfrom(project, 'id') + for i in origin_buckets: + if project_dic.get(i['id'], None): + i['name'] = project_dic[i['id']]['name'] + else: + del i + if key == 'level': + level_ids = [i['id'] for i in origin_buckets] + level = IastVulLevel.objects.filter(pk__in=level_ids).values( + 'id', 'name_value').all() + level_dic = dict_transfrom(level, 'id') + for i in origin_buckets: + i['name'] = level_dic[i['id']]['name_value'] + dic[key] = list(origin_buckets) + have_article_count = 0 + have_poc_count = 0 + no_usable_count = 0 + for i in res.aggs['poc'].to_dict()['buckets']: + if i['key'] == 1: + have_poc_count = i['doc_count'] + for k in i['article']['buckets']: + if k['key'] == 1: + have_article_count += k['doc_count'] + if i['key'] == 0: + for k in i['article']['buckets']: + if k['key'] == 1: + have_article_count += k['doc_count'] + if k['key'] == 0: + no_usable_count = k['doc_count'] + + dic["availability"] = { + "have_poc": { + "name": "存在利用代码", + "num": have_poc_count, + "id": 1 + }, + "have_article": { + "name": "存在分析文章", + "num": have_article_count, + "id": 2 + }, + "no_availability": { + "name": "无利用信息", + "num": no_usable_count, + "id": 3 + }, + } + return dic + + +class GetScaSummary(UserEndPoint): + name = "api-v1-aggregation-summary" + description = _("New application") + + @extend_schema_with_envcheck( + request=AggregationArgsSerializer, + tags=[_('VulList')], + summary=_('Vul List Select'), + description=_( + "count sca vul and app vul by keywords" + ), + ) + def post(self, request): + ser = AggregationArgsSerializer(data=request.data) + pro_condition = "" + try: + if ser.is_valid(True): + # 从项目列表进入 绑定项目id + if ser.validated_data.get("bind_project_id", 0): + pro_condition = pro_condition + " and asset.project_id={} ".format( + str(ser.validated_data.get("bind_project_id"))) + # 项目版本号 + if ser.validated_data.get("project_version_id", 0): + pro_condition = pro_condition + " and asset.project_version_id={} ".format( + str(ser.validated_data.get("project_version_id"))) + except ValidationError as e: + return R.failure(data=e.detail) + + if ELASTICSEARCH_STATE: + result_summary = get_annotate_data_es( + request.user.id, ser.validated_data.get("bind_project_id", 0), + ser.validated_data.get("project_version_id", 0)) + elif pro_condition: + # 存在项目筛选条件 + result_summary = get_annotate_sca_common_data( + request.user.id, pro_condition) + else: + # 全局数据,没有项目信息 数据按用户id缓存 + result_summary = get_annotate_sca_cache_data( + request.user.id, pro_condition) + + return R.success(data={ + 'messages': result_summary + }, ) diff --git a/dongtai_web/aggr_vul/app_vul_list.py b/dongtai_web/aggr_vul/app_vul_list.py new file mode 100644 index 000000000..3883f52ed --- /dev/null +++ b/dongtai_web/aggr_vul/app_vul_list.py @@ -0,0 +1,313 @@ +from rest_framework.serializers import ValidationError +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_web.aggregation.aggregation_common import turnIntListOfStr, auth_user_list_str +from dongtai_web.serializers.vul import VulSerializer +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from dongtai_common.models.vulnerablity import IastVulnerabilityStatus +import pymysql +from dongtai_web.serializers.aggregation import AggregationArgsSerializer +from dongtai_common.models import AGGREGATION_ORDER, LANGUAGE_ID_DICT, APP_LEVEL_RISK, APP_VUL_ORDER +from django.db.models import F +from dongtai_common.utils.db import SearchLanguageMode +from dongtai_common.models.vulnerablity import IastVulnerabilityDocument +from elasticsearch_dsl import Q, Search +from elasticsearch import Elasticsearch +from elasticsearch_dsl import A +from dongtai_common.models.strategy import IastStrategyModel +from dongtai_common.models.vulnerablity import IastVulnerabilityStatus +from dongtai_common.models.program_language import IastProgramLanguage +from dongtai_common.models.project import IastProject +from dongtai_common.models.vul_level import IastVulLevel +from django.core.cache import cache +from dongtai_conf import settings +from dongtai_common.common.utils import make_hash +from dongtai_conf.settings import ELASTICSEARCH_STATE +from dongtai_engine.elatic_search.data_correction import data_correction_interpetor + + +INT_LIMIT: int = 2**64 - 1 + + +class GetAppVulsList(UserEndPoint): + + @ extend_schema_with_envcheck( + request=AggregationArgsSerializer, + tags=[_('app VulList')], + summary=_('app List Select'), + description=_( + "select sca vul and app vul by keywords" + ), + ) + def post(self, request): + """ + :param request: + :return: + """ + end = { + "status": 201, + "msg": "success", + "data": [], + } + ser = AggregationArgsSerializer(data=request.data) + user = request.user + # 获取用户权限 + auth_user_info = auth_user_list_str(user=user) + queryset = IastVulnerabilityModel.objects.filter( + is_del=0, + agent__bind_project_id__gt=0, + agent__user_id__in=auth_user_info['user_list']) + + try: + if ser.is_valid(True): + page_size = ser.validated_data['page_size'] + page = ser.validated_data['page'] + begin_num = (page - 1) * page_size + end_num = page * page_size + # should refact into serilizer + if begin_num > INT_LIMIT or end_num > INT_LIMIT: + return R.failure() + keywords = ser.validated_data.get("keywords", "") + es_query = {} + # 从项目列表进入 绑定项目id + if ser.validated_data.get("bind_project_id", 0): + queryset = queryset.filter( + agent__bind_project_id=ser.validated_data.get("bind_project_id")) + es_query['bind_project_id'] = ser.validated_data.get( + "bind_project_id") + # 项目版本号 + if ser.validated_data.get("project_version_id", 0): + queryset = queryset.filter( + agent__project_version_id=ser.validated_data.get("project_version_id")) + es_query['project_version_id'] = ser.validated_data.get( + "project_version_id") + # 按项目筛选 + if ser.validated_data.get("project_id_str", ""): + project_id_list = turnIntListOfStr( + ser.validated_data.get("project_id_str", "")) + queryset = queryset.filter( + agent__bind_project_id__in=project_id_list) + es_query['project_ids'] = project_id_list + # 漏洞类型筛选 + if ser.validated_data.get("hook_type_id_str", ""): + vul_type_list = turnIntListOfStr( + ser.validated_data.get("hook_type_id_str", "")) + queryset = queryset.filter(strategy_id__in=vul_type_list) + es_query['strategy_ids'] = vul_type_list + # 漏洞等级筛选 + if ser.validated_data.get("level_id_str", ""): + level_id_list = turnIntListOfStr( + ser.validated_data.get("level_id_str", "")) + queryset = queryset.filter(level_id__in=level_id_list) + es_query['level_ids'] = level_id_list + # 按状态筛选 + if ser.validated_data.get("status_id_str", ""): + status_id_list = turnIntListOfStr( + ser.validated_data.get("status_id_str", "")) + queryset = queryset.filter(status_id__in=status_id_list) + es_query['status_ids'] = status_id_list + # 按语言筛选 + if ser.validated_data.get("language_str", ""): + language_id_list = turnIntListOfStr( + ser.validated_data.get("language_str", "")) + language_arr = [] + for lang in language_id_list: + language_arr.append(LANGUAGE_ID_DICT.get(str(lang))) + queryset = queryset.filter( + agent__language__in=language_arr) + es_query['language_ids'] = language_arr + order_list = [] + fields = [ + "id", + "uri", + "http_method", + "top_stack", + "bottom_stack", + "level_id", + "taint_position", + "status_id", + "first_time", + "latest_time", + "strategy__vul_name", + "agent__language", + "agent__project_name", + "agent__server__container", + "agent__bind_project_id"] + if keywords: + es_query['search_keyword'] = keywords + keywords = pymysql.converters.escape_string(keywords) + order_list = ["-score"] + fields.append("score") + + queryset = queryset.annotate( + score=SearchLanguageMode( + [ + F('search_keywords'), + F('uri'), + F('vul_title'), + F('http_method'), + F('http_protocol'), + F('top_stack'), + F('bottom_stack')], + search_keyword=keywords)) + # 排序 + order_type = APP_VUL_ORDER.get( + str(ser.validated_data['order_type']), "level_id") + order_type_desc = "-" if ser.validated_data['order_type_desc'] else "" + if order_type == "level_id": + order_list.append(order_type_desc + order_type) + if ser.validated_data['order_type_desc']: + order_list.append("-latest_time") + else: + order_list.append("latest_time_desc") + else: + order_list.append(order_type_desc + order_type) + es_query['order'] = order_type_desc + order_type + if ELASTICSEARCH_STATE: + vul_data = get_vul_list_from_elastic_search( + request.user.id, + page=page, + page_size=page_size, + **es_query) + else: + vul_data = queryset.values( + + * tuple(fields)).order_by( + + * tuple(order_list))[ + begin_num:end_num] + except ValidationError as e: + return R.failure(data=e.detail) + if vul_data: + for item in vul_data: + item['level_name'] = APP_LEVEL_RISK.get( + str(item['level_id']), "") + item['server_type'] = VulSerializer.split_container_name( + item['agent__server__container']) + end['data'].append(item) + + # all Iast Vulnerability Status + status = IastVulnerabilityStatus.objects.all() + status_obj = {} + for tmp_status in status: + status_obj[tmp_status.id] = tmp_status.name + for i in end['data']: + i['status__name'] = status_obj.get(i['status_id'], "") + + return R.success(data={ + 'messages': end['data'], + 'page': { + "page_size": page_size, + "cur_page": page + } + }, ) + + +def get_vul_list_from_elastic_search(user_id, + project_ids=[], + project_version_ids=[], + hook_type_ids=[], + level_ids=[], + status_ids=[], + strategy_ids=[], + language_ids=[], + search_keyword="", + page=1, + page_size=10, + bind_project_id=0, + project_version_id=0, + order=""): + user_id_list = [user_id] + auth_user_info = auth_user_list_str(user_id=user_id) + user_id_list = auth_user_info['user_list'] + from dongtai_common.models.strategy import IastStrategyModel + from dongtai_common.models.agent import IastAgent + must_query = [ + Q('terms', user_id=user_id_list), + Q('terms', is_del=[0]), + Q('range', bind_project_id={'gt': 0}), + Q('range', strategy_id={'gt': 0}), + ] + order_list = ['_score', 'level_id', '-latest_time', '-id'] + if order: + order_list.insert(0, order) + if bind_project_id: + must_query.append(Q('terms', bind_project_id=[bind_project_id])) + if project_version_id: + must_query.append(Q('terms', project_version_id=[project_version_id])) + if project_ids: + must_query.append(Q('terms', project_id=project_ids)) + if project_version_ids: + must_query.append(Q('terms', project_version_id=project_version_ids)) + if level_ids: + must_query.append(Q('terms', level_id=level_ids)) + if status_ids: + must_query.append(Q('terms', status_id=status_ids)) + if language_ids: + must_query.append(Q('terms', **{"language.keyword": language_ids})) + if strategy_ids: + must_query.append(Q('terms', strategy_id=strategy_ids)) + if search_keyword: + must_query.append( + Q('multi_match', + query=search_keyword, + fields=[ + "search_keywords", "uri", "vul_title", "http_protocol", + "top_stack", "bottom_stack" + ])) + a = Q('bool', + must=must_query) + hashkey = make_hash([ + user_id, project_ids, project_version_ids, hook_type_ids, level_ids, + status_ids, language_ids, search_keyword, page_size, bind_project_id, + project_version_id + ]) + if page == 1: + cache.delete(hashkey) + after_table = cache.get(hashkey, {}) + after_key = after_table.get(page, None) + extra_dict = {} + if after_key: + extra_dict['search_after'] = after_key + res = IastVulnerabilityDocument.search().query(a).extra(**extra_dict).sort( + *order_list)[:page_size].using( + Elasticsearch(settings.ELASTICSEARCH_DSL['default']['hosts'])) + resp = res.execute() + extra_datas = IastVulnerabilityModel.objects.filter( + pk__in=[i['id'] + for i in resp]).values('strategy__vul_name', 'agent__language', + 'agent__project_name', + 'agent__server__container', + 'agent__bind_project_id', 'id') + extra_data_dic = {ex_data['id']: ex_data for ex_data in extra_datas} + vuls = [i._d_ for i in list(resp)] + vul_incorrect_id = [] + agent_values = ('language', 'project_name', 'server__container', + 'bind_project_id', 'id') + strategy_values = ('vul_name', ) + for vul in vuls: + if vul['id'] not in extra_data_dic.keys(): + vul_incorrect_id.append(vul['id']) + strategy_dic = IastStrategyModel.objects.filter( + pk=vul['strategy_id']).values(*strategy_values).first() + agent_dic = IastAgent.objects.filter(pk=vul['agent_id']).values( + *agent_values).first() + if not strategy_dic: + strategy_dic = {i: '' for i in strategy_values} + if not agent_dic: + agent_dic = {i: '' for i in agent_values} + for k, v in strategy_dic.items(): + vul['strategy__' + k] = v + for k, v in agent_dic.items(): + vul['agent__' + k] = v + else: + vul.update(extra_data_dic[vul['id']]) + if vul_incorrect_id: + data_correction_interpetor.delay('vulnerablity_sync_fail') + if resp.hits: + afterkey = resp.hits[-1].meta['sort'] + after_table[page + 1] = afterkey + cache.set(hashkey, after_table) + return vuls diff --git a/dongtai_web/aggr_vul/app_vul_summary.py b/dongtai_web/aggr_vul/app_vul_summary.py new file mode 100644 index 000000000..06e19406a --- /dev/null +++ b/dongtai_web/aggr_vul/app_vul_summary.py @@ -0,0 +1,296 @@ +from dongtai_web.utils import dict_transfrom +from dongtai_engine.elatic_search.data_correction import data_correction_interpetor +from dongtai_conf.settings import ELASTICSEARCH_STATE +import copy +from rest_framework.serializers import ValidationError +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck +from dongtai_web.serializers.aggregation import AggregationArgsSerializer +from dongtai_common.models import LANGUAGE_DICT +from dongtai_web.aggregation.aggregation_common import auth_user_list_str +from django.db.models import Count +from dongtai_common.common.utils import cached_decorator +from django.db.models import Q +import logging +logger = logging.getLogger('dongtai-webapi') + + +def _annotate_by_query(q, value_fields, count_field): + return ( + IastVulnerabilityModel.objects.filter(q) + .values(*value_fields) + .annotate(count=Count(count_field)) + ) + +# @cached_decorator(random_range=(2 * 60 * 60, 2 * 60 * 60), +# use_celery_update=True) + + +def get_annotate_cache_data(user_id: int): + return get_annotate_data(user_id, 0, 0) + + +def get_annotate_data( + user_id: int, bind_project_id=int, project_version_id=int +) -> dict: + auth_user_info = auth_user_list_str(user_id=user_id) + cache_q = Q(is_del=0, agent__bind_project_id__gt=0, + agent__user_id__in=auth_user_info['user_list']) + + # 从项目列表进入 绑定项目id + if bind_project_id: + cache_q = cache_q & Q(agent__bind_project_id=bind_project_id) + # 项目版本号 + if project_version_id: + cache_q = cache_q & Q(agent__project_version_id=project_version_id) + # 项目统计 + pro_info = _annotate_by_query( + cache_q, + ("agent__bind_project_id", "agent__project_name"), + "agent__bind_project_id", + ) + + result_summary = { + "level": [], + "status": [], + "hook_type": [], + "language": [], + "project": [], + } + + for item in pro_info: + result_summary["project"].append( + { + "name": item["agent__project_name"], + "num": item["count"], + "id": item["agent__bind_project_id"], + } + ) + # 漏洞类型统计 + strategy_info = _annotate_by_query( + cache_q, ("strategy_id", "strategy__vul_name"), "strategy_id" + ) + for item in strategy_info: + result_summary["hook_type"].append( + { + "name": item["strategy__vul_name"], + "num": item["count"], + "id": item["strategy_id"], + } + ) + + # 漏洞等级筛选 + count_info_level = _annotate_by_query( + cache_q, ("level_id", "level__name_value"), "level_id" + ) + for item in count_info_level: + result_summary["level"].append( + { + "name": item["level__name_value"], + "num": item["count"], + "id": item["level_id"], + } + ) + + # # 按状态筛选 + status_info = _annotate_by_query( + cache_q, ("status_id", "status__name"), "status_id" + ) + for item in status_info: + result_summary["status"].append( + { + "name": item["status__name"], + "num": item["count"], + "id": item["status_id"], + } + ) + + # # 按语言筛选 + language_info = _annotate_by_query( + cache_q, ("agent__language",), "agent__language") + lang_arr = copy.copy(LANGUAGE_DICT) + lang_key = lang_arr.keys() + for item in language_info: + result_summary["language"].append( + { + "name": item["agent__language"], + "num": item["count"], + "id": lang_arr.get(item["agent__language"]), + } + ) + if item["agent__language"] in lang_key: + del lang_arr[item["agent__language"]] + if lang_arr: + for item in lang_arr.keys(): + result_summary["language"].append( + { + "name": item, + "num": 0, + "id": LANGUAGE_DICT.get(item), + } + ) + return result_summary + + +class GetAppVulsSummary(UserEndPoint): + @extend_schema_with_envcheck( + request=AggregationArgsSerializer, + tags=[_("app Vul count")], + summary=_("app List count"), + description=_("select app vul by keywords"), + ) + def post(self, request): + """ + :param request: + :return: + """ + + user = request.user + user_id = user.id + + ser = AggregationArgsSerializer(data=request.data) + bind_project_id = 0 + project_version_id = 0 + try: + if ser.is_valid(True): + if ser.validated_data.get("bind_project_id", 0): + bind_project_id = ser.validated_data.get( + "bind_project_id", 0) + if ser.validated_data.get("project_version_id", 0): + project_version_id = ser.validated_data.get( + "project_version_id", 0) + + if ELASTICSEARCH_STATE: + result_summary = get_annotate_data_es(user_id, bind_project_id, + project_version_id) + + elif bind_project_id or project_version_id: + result_summary = get_annotate_data( + user_id, bind_project_id, project_version_id + ) + else: + # 全局下走缓存 + result_summary = get_annotate_cache_data(user_id) + except ValidationError as e: + logger.info(e) + return R.failure(data=e.detail) + + return R.success( + data={ + "messages": result_summary, + }, + ) + + +def get_annotate_data_es(user_id, bind_project_id, project_version_id): + from dongtai_common.models.vulnerablity import IastVulnerabilityDocument + from elasticsearch_dsl import Q, Search + from elasticsearch import Elasticsearch + from elasticsearch_dsl import A + from dongtai_common.models.strategy import IastStrategyModel + from dongtai_common.models.vulnerablity import IastVulnerabilityStatus + from dongtai_common.models.program_language import IastProgramLanguage + from dongtai_common.models.project import IastProject + from dongtai_common.models.vul_level import IastVulLevel + + user_id_list = [user_id] + auth_user_info = auth_user_list_str(user_id=user_id) + user_id_list = auth_user_info['user_list'] + strategy_ids = list(IastStrategyModel.objects.all().values_list('id', + flat=True)) + must_query = [ + Q('terms', user_id=user_id_list), + Q('terms', is_del=[0]), + Q('terms', is_del=[0]), + Q('range', bind_project_id={'gt': 0}), + Q('range', strategy_id={'gt': 0}), + Q('terms', strategy_id=strategy_ids), + ] + if bind_project_id: + must_query.append(Q('terms', bind_project_id=[bind_project_id])) + if project_version_id: + must_query.append(Q('terms', project_version_id=[project_version_id])) + search = IastVulnerabilityDocument.search().query( + Q('bool', must=must_query))[:0] + buckets = { + 'level': A('terms', field='level_id', size=2147483647), + 'project': A('terms', field='bind_project_id', size=2147483647), + "strategy": A('terms', field='strategy_id', size=2147483647), + 'status': A('terms', field='status_id', size=2147483647), + "language": A('terms', field='language.keyword', size=2147483647) + } + for k, v in buckets.items(): + search.aggs.bucket(k, v) + from dongtai_conf import settings + res = search.using(Elasticsearch( + settings.ELASTICSEARCH_DSL['default']['hosts'])).execute() + dic = {} + for key in buckets.keys(): + origin_buckets = res.aggs[key].to_dict()['buckets'] + for i in origin_buckets: + i['id'] = i['key'] + del i['key'] + i['num'] = i['doc_count'] + del i['doc_count'] + if key == 'strategy': + strategy_ids = [i['id'] for i in origin_buckets] + strategy = IastStrategyModel.objects.filter( + pk__in=strategy_ids).values('id', 'vul_name').all() + strategy_dic = dict_transfrom(strategy, 'id') + for i in origin_buckets: + i['name'] = strategy_dic[i['id']]['vul_name'] + key = 'hook_type' + if key == 'status': + status_ids = [i['id'] for i in origin_buckets] + status = IastVulnerabilityStatus.objects.filter( + pk__in=status_ids).values('id', 'name').all() + status_dic = dict_transfrom(status, 'id') + for i in origin_buckets: + i['name'] = status_dic[i['id']]['name'] + if key == 'language': + for i in origin_buckets: + i['name'] = i['id'] + del i['id'] + language_names = [i['name'] for i in origin_buckets] + for i in origin_buckets: + i['id'] = LANGUAGE_DICT.get(i['name']) + for language_key in LANGUAGE_DICT.keys(): + if language_key not in language_names: + origin_buckets.append({ + 'id': LANGUAGE_DICT[language_key], + 'name': language_key, + 'num': 0 + }) + if key == 'project': + project_ids = [i['id'] for i in origin_buckets] + project = IastProject.objects.filter(pk__in=project_ids).values( + 'id', 'name').all() + project_dic = dict_transfrom(project, 'id') + missing_ids = [] + for i in origin_buckets: + if i['id'] not in project_dic: + logger.info('found data consistency incorrect start ') + data_correction_interpetor.delay("project_missing") + missing_ids.append(i['id']) + continue + else: + i['name'] = project_dic[i['id']]['name'] + origin_buckets = filter(lambda x: x['id'] not in missing_ids, + origin_buckets) + if missing_ids: + logger.info('found data consistency incorrect ') + data_correction_interpetor.delay("project_missing") + + if key == 'level': + level_ids = [i['id'] for i in origin_buckets] + level = IastVulLevel.objects.filter(pk__in=level_ids).values( + 'id', 'name_value').all() + level_dic = dict_transfrom(level, 'id') + for i in origin_buckets: + i['name'] = level_dic[i['id']]['name_value'] + origin_buckets = sorted(origin_buckets, key=lambda x: x['id']) + dic[key] = list(origin_buckets) + return dict(dic) diff --git a/dongtai_web/aggregation/__init__.py b/dongtai_web/aggregation/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/dongtai_web/aggregation/aggregation_common.py b/dongtai_web/aggregation/aggregation_common.py new file mode 100644 index 000000000..f22d194b8 --- /dev/null +++ b/dongtai_web/aggregation/aggregation_common.py @@ -0,0 +1,209 @@ + +from dongtai_common.models.asset_vul_relation import AssetVulRelation +from dongtai_common.models.aql_info import AqlInfo +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.hook_type import HookType +from dongtai_common.models import LICENSE_RISK,SCA_AVAILABILITY_DICT +from dongtai_web.serializers.vul import VulSerializer +from dongtai_common.models import User +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.models.asset_aggr import AssetAggr +from django.db.models import Q + +# list id 去重 +def getUniqueList(origin_list=[]): + return list(set(origin_list)) + +# str to int list +def turnIntListOfStr(type_str,field=""): + try: + type_list = type_str.split(",") + # 安全校验,强制转int + type_list = list(map(int, type_list)) + if field: + type_int_list = list(map(str, type_list)) + type_int_str = ",".join(type_int_list) + return " and {} in ({}) ".format(field,type_int_str) + else: + return type_list + except Exception as e: + return "" + + +# 通过sca——aql读取 组件漏洞 修复版本,最新版本,开源许可证,agent_id +def getScaInfoByAql(aql_ids=None): + if aql_ids is None: + return {} + else: + aql_ids = getUniqueList(aql_ids) + sca_info = { + "aql_arr":{}, + "asset_arr":{}, + "hash_arr":{}, + "agent_ids":[] + } + aql_info = AqlInfo.objects.filter(id__in=aql_ids).values( + "id", + "safe_version","latest_version","source_license","license_risk","availability") + if aql_info: + for item in aql_info: + aql_info_id = str(item['id']) + del item['id'] + item['license_risk_name'] = LICENSE_RISK.get(str(item['license_risk']),"无风险") + item['availability_name'] = SCA_AVAILABILITY_DICT.get(str(item['availability']),"无利用信息") + sca_info["aql_arr"][aql_info_id] = item + asset_info = AssetVulRelation.objects.filter(aql_info_id__in=aql_ids).values( + "aql_info_id","id","agent_id","create_time","vul_package_id","hash") + hash_list = [] + if asset_info: + for item in asset_info: + aql_info_id = item['aql_info_id'] + if item['hash'] not in hash_list: + hash_list.append(item['hash']) + del item['aql_info_id'] + if not sca_info["asset_arr"].get(aql_info_id): + sca_info["asset_arr"][aql_info_id] = [item] + else: + sca_info["asset_arr"][aql_info_id].append(item) + + sca_info["agent_ids"].append(item['agent_id']) + # 通过hash list 读取 iast_asset_aggr id + aggr_info = AssetAggr.objects.filter(signature_value__in=hash_list).values("id","signature_value") + + if aggr_info: + for item in aggr_info: + sca_info['hash_arr'][item['signature_value']] = item['id'] + + return sca_info + +# 通过app vul ids 读取应用漏洞调用链,agent_id,漏洞状态 +def getAppVulInfoById(vul_ids=None): + if vul_ids is None: + return {} + vul_info=IastVulnerabilityModel.objects.filter(id__in=vul_ids).values( + "id", + "top_stack","bottom_stack","status_id","status__name","agent_id","latest_time") + vul_result = { + "vul_info": {}, + "agent_ids":[], + } + if vul_info: + for item in vul_info: + vul_id = item['id'] + del item['id'] + vul_result['agent_ids'].append(item['agent_id']) + vul_result['vul_info'][vul_id] = item + return vul_result + +# 通过agent_ids 获取项目id,name,version,中间件, +def getProjectInfoByAgentId(agent_ids=None): + if agent_ids is None: + return {} + else: + agent_ids = getUniqueList(agent_ids) + agent_info = IastAgent.objects.filter(id__in=agent_ids).values( + "id", + "project_name","project_version_id","project_version__version_name","bind_project_id","server__container" + ) + agent_result = {} + if agent_info: + for item in agent_info: + agent_id = item['id'] + del item['id'] + item['server_type'] = VulSerializer.split_container_name( + item['server__container']) + agent_result[agent_id] = item + return agent_result + +# 通过漏洞类型id 获取漏洞名称,等级 +def getHookTypeName(ids=None): + if ids is None: + return {} + else: + type_arr = {} + + type_info = HookType.objects.filter(id__in=ids).values("id","type","name") + if type_info: + for item in type_info: + + type_id = item['id'] + del item['id'] + type_arr[type_id] = item + return type_arr + + +# 鉴权 IastVulAssetRelation +def getAuthUserInfo(user,base_query): + # is_superuser == 2 租户管理员 is_superuser == 1 超级管理员 is_department_admin==True 部门管理员 其他为普通用户 + user_id = user.id + # 超级管理员 + if user.is_system_admin(): + base_query = base_query.filter(asset__user_id=user_id) + # 租户管理员 获取 租户id + elif user.is_talent_admin(): + talent = user.get_talent() + if not talent or talent is None: + base_query = base_query.filter(asset__user_id=user_id) + else: + base_query = base_query.filter(asset__talent_id=talent.id) + # 部门管理员 获取部门id + elif user.is_department_admin: + users = UserEndPoint.get_auth_users(user) + user_ids = list(users.values_list("id", flat=True)) + base_query = base_query.filter(asset__user_id__in=user_ids) + else: + # 普通用户,直接筛选用户id + base_query = base_query.filter(asset__user_id=user_id) + return base_query + + +def auth_user_list_str(user=None,user_id=0,user_table=""): + result = { + "user_list":[], + "user_str":"", + "user_condition_str":"" + } + if user is None: + user = User.objects.filter(id=user_id).first() + users = UserEndPoint.get_auth_users(user) + user_ids = list(users.values_list("id", flat=True)) + result['user_list'] = user_ids + user_ids_arr = list(map(str, user_ids)) + user_str = ",".join(user_ids_arr) + result['user_str'] = user_str + if user_table: + result['user_condition_str'] = " and {}.user_id in ({})".format(user_table, user_str) + + return result + + +# 鉴权 后 获取 漏洞信息 auth_condition = getAuthBaseQuery(request.user, "asset") +def getAuthBaseQuery(user=None,table_str="",user_id=0): + + # is_superuser == 2 租户管理员 is_superuser == 1 超级管理员 is_department_admin==True 部门管理员 其他为普通用户 + if user is None: + user = User.objects.filter(id=user_id).first() + else: + user_id = user.id + + query_base = " and {}.user_id={}".format(table_str, user_id) + # 超级管理员 + if user.is_system_admin(): + query_base = "" + # 租户管理员 获取 租户id + elif user.is_talent_admin(): + talent = user.get_talent() + if not talent or talent is None: + pass + else: + query_base = " and {}.talent_id={}".format(table_str,talent.id) + # 部门管理员 获取部门id + elif user.is_department_admin: + users = UserEndPoint.get_auth_users(user) + user_ids = list(users.values_list("id", flat=True)) + user_ids = list(map(str, user_ids)) + user_str = ",".join(user_ids) + query_base = " and {}.user_id in ({})".format(table_str,user_str) + + return query_base diff --git a/dongtai_web/aggregation/aggregation_del.py b/dongtai_web/aggregation/aggregation_del.py new file mode 100644 index 000000000..81f2c9902 --- /dev/null +++ b/dongtai_web/aggregation/aggregation_del.py @@ -0,0 +1,72 @@ +# 批量删除 组件漏洞+应用漏洞 +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.models.asset_vul import IastVulAssetRelation +from dongtai_common.models.asset_vul_relation import AssetVulRelation +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.models.asset import Asset +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from django.db import connection +from dongtai_web.aggregation.aggregation_common import turnIntListOfStr +import logging + +logger = logging.getLogger('dongtai-dongtai_conf') + +class DelVulMany(UserEndPoint): + name = "api-v2-aggregation-list-del" + description = _("del vul list of many") + + @extend_schema_with_envcheck( + + tags=[_('VulList')], + summary=_('Vul List delete'), + description=_( + "delete many app vul and dongtai_sca vul" + ), + ) + def post(self, request): + ids = request.data.get("ids","") + ids = turnIntListOfStr(ids) + source_type = request.data.get("source_type",1) + user = request.user + if source_type == 1: + queryset = IastVulnerabilityModel.objects.filter(is_del=0) + else: + queryset = IastVulAssetRelation.objects.filter(is_del=0) + + # 超级管理员 + if user.is_system_admin(): + pass + # 租户管理员 or 部门管理员 + elif user.is_talent_admin() or user.is_department_admin: + users = self.get_auth_users(user) + user_ids = list(users.values_list('id', flat=True)) + if source_type == 1: + queryset = queryset.filter(agent__user_id__in=user_ids) + else: + queryset = queryset.filter(asset__user_id__in=user_ids) + else: + # 普通用户 + if source_type == 1: + queryset = queryset.filter(agent__user_id=user.id) + else: + queryset = queryset.filter(asset__user_id=user.id) + + if source_type==1: + # 应用漏洞删除 + queryset.filter(id__in=ids).update(is_del=1) + else: + # 组件漏洞删除 + queryset.filter(asset_vul_id__in=ids).update(is_del=1) + # with connection.cursor() as cursor: + # sca_ids = list(map(str, ids)) + # sca_ids_str = ",".join(sca_ids) + # sql = " UPDATE iast_asset_aggr as aggr left join iast_asset_vul as vul on aggr.signature_value=vul.package_hash SET aggr.is_del = 1 WHERE vul.id in ({})".format( + # sca_ids_str) + # cursor.execute(sql) + + return R.success(data={ + 'messages': "success", + + }, ) diff --git a/dongtai_web/apitimelog/__init__.py b/dongtai_web/apitimelog/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/dongtai_web/apitimelog/admin.py b/dongtai_web/apitimelog/admin.py new file mode 100644 index 000000000..8c38f3f3d --- /dev/null +++ b/dongtai_web/apitimelog/admin.py @@ -0,0 +1,3 @@ +from django.contrib import admin + +# Register your models here. diff --git a/dongtai_web/apitimelog/apps.py b/dongtai_web/apitimelog/apps.py new file mode 100644 index 000000000..7aa0970fc --- /dev/null +++ b/dongtai_web/apitimelog/apps.py @@ -0,0 +1,5 @@ +from django.apps import AppConfig + + +class ApitimelogConfig(AppConfig): + name = 'apitimelog' diff --git a/dongtai_web/apitimelog/middleware.py b/dongtai_web/apitimelog/middleware.py new file mode 100644 index 000000000..74f59179f --- /dev/null +++ b/dongtai_web/apitimelog/middleware.py @@ -0,0 +1,56 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : middleware +# @created : 星期一 2月 14, 2022 15:06:25 CST +# +# @description : +###################################################################### + + +import time +import json +import logging +request_logger = logging.getLogger(__name__) +REQUEST_DICT = {} +class RequestLogMiddleware: + + def __init__(self, get_response): + self.get_response = get_response + + def __call__(self, request): + start_time = time.time() + + # Only logging "*/api/*" patterns + if "/api/" in str(request.get_full_path()): + apiurl = str(request.get_full_path()) + else: + apiurl = '' + response = self.get_response(request) + timenow = time.time() - start_time + api_info = REQUEST_DICT.get( + apiurl, { + 'max_time': 0, + 'min_time': 0, + 'average_time': 0, + 'request_count': 0 + }) + api_info['max_time'] = timenow if api_info[ + 'max_time'] == 0 or api_info['max_time'] < timenow else api_info[ + 'max_time'] + api_info['min_time'] = timenow if api_info[ + 'min_time'] == 0 or api_info['min_time'] > timenow else api_info[ + 'min_time'] + api_info['average_time'] = ( + api_info['average_time'] * api_info['request_count'] + timenow + ) / (api_info['request_count'] + 1) + api_info['request_count'] += 1 + REQUEST_DICT[apiurl] = api_info + request_logger.error(msg='{} : {}'.format(apiurl, timenow)) + return response + + def process_exception(self, request, exception): + try: + raise exception + except Exception as e: + request_logger.exception("Unhandled Exception: " + str(e)) + return exception diff --git a/dongtai_web/apitimelog/models.py b/dongtai_web/apitimelog/models.py new file mode 100644 index 000000000..71a836239 --- /dev/null +++ b/dongtai_web/apitimelog/models.py @@ -0,0 +1,3 @@ +from django.db import models + +# Create your models here. diff --git a/dongtai_web/apitimelog/tests.py b/dongtai_web/apitimelog/tests.py new file mode 100644 index 000000000..7ce503c2d --- /dev/null +++ b/dongtai_web/apitimelog/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase + +# Create your tests here. diff --git a/dongtai_web/apitimelog/urls.py b/dongtai_web/apitimelog/urls.py new file mode 100644 index 000000000..9b353074a --- /dev/null +++ b/dongtai_web/apitimelog/urls.py @@ -0,0 +1,26 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : urls +# @created : 星期三 1月 12, 2022 19:29:08 CST +# +# @description : +###################################################################### + + + +from django.conf.urls.static import static +from django.urls import include, path +import os +from dongtai_conf import settings + +urlpatterns = [ +] + +if os.getenv('REQUESTLOG', None) == 'TRUE' or os.getenv('environment', + None) in ('TEST', ): + from dongtai_web.apitimelog.views import ApiTimeLogView + urlpatterns.extend([ + path('apitimelog', ApiTimeLogView.as_view()), + ]) + +urlpatterns = [path('api/v1/', include(urlpatterns))] diff --git a/dongtai_web/apitimelog/views.py b/dongtai_web/apitimelog/views.py new file mode 100644 index 000000000..26de2a355 --- /dev/null +++ b/dongtai_web/apitimelog/views.py @@ -0,0 +1,14 @@ +from apitimelog.middleware import REQUEST_DICT +# Create your views here. + +from dongtai_common.endpoint import UserEndPoint +from django.http import JsonResponse + + +class ApiTimeLogView(UserEndPoint): + def get(self, request): + res = [] + for k,v in REQUEST_DICT.items(): + v['uri'] = k + res.append(v) + return JsonResponse(res,safe=False) diff --git a/dongtai_web/apps.py b/dongtai_web/apps.py new file mode 100644 index 000000000..692d1c80a --- /dev/null +++ b/dongtai_web/apps.py @@ -0,0 +1,20 @@ +from dongtai_common.common.utils import DongTaiAppConfigPatch +from django.apps import AppConfig + + +class IastConfig(DongTaiAppConfigPatch, AppConfig): + name = "dongtai_web" + + def ready(self): + super().ready() +# register_preheat() + from dongtai_conf.celery import app as celery_app + + + +#def register_preheat(): +# from dongtai_engine.preheat import PreHeatRegister +# +# from dongtai_web.aggr_vul.app_vul_summary import get_annotate_cache_data +# +# PreHeatRegister.register(get_annotate_cache_data) diff --git a/dongtai_web/base/__init__.py b/dongtai_web/base/__init__.py new file mode 100644 index 000000000..09d8ceb62 --- /dev/null +++ b/dongtai_web/base/__init__.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: sentry diff --git a/dongtai_web/base/agent.py b/dongtai_web/base/agent.py new file mode 100644 index 000000000..bc9231e17 --- /dev/null +++ b/dongtai_web/base/agent.py @@ -0,0 +1,329 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi + +import json +import re +import time + +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.program_language import IastProgramLanguage +from dongtai_common.models.project import IastProject +from dongtai_common.models.server import IastServer +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from django.utils.translation import gettext_lazy as _ +from dongtai_common.models.hook_type import HookType +from dongtai_web.base.project_version import get_project_version +from dongtai_common.models.strategy import IastStrategyModel +from dongtai_common.models.project_version import IastProjectVersion +from django.db.models import Count + + +def get_agents_with_project(project_name, users): + """ + :param project_name: + :param users: + :return: + """ + agent_ids = [] + if project_name and project_name != '': + project_ids = IastProject.objects.filter( + user__in=users, + name__icontains=project_name).values_list("id", flat=True).all() + + if project_ids: + agent_ids = IastAgent.objects.filter( + bind_project_id__in=project_ids).values_list("id", + flat=True).all() + + return agent_ids + + +def get_user_project_name(auth_users): + project_models = IastProject.objects.filter(user__in=auth_users).values("id", "name") + projects_info = {} + if project_models: + for item in project_models: + projects_info[item['id']] = item['name'] + return projects_info + + +def get_user_agent_pro(auth_users, bindId): + agentInfo = IastAgent.objects.filter( + user__in=auth_users, + bind_project_id__in=bindId + ).values("id", "bind_project_id", "server_id") + result = {"pidArr": {}, "serverArr": {}, "server_ids": []} + + if agentInfo: + for item in agentInfo: + result["pidArr"][item['id']] = item['bind_project_id'] + result["serverArr"][item['id']] = item['server_id'] + result["server_ids"].append(item['server_id']) + return result + + +def get_all_server(ids): + alls = IastServer.objects.filter(id__in=ids).values("id", "container") + result = {} + if alls: + for item in alls: + result[item['id']] = item['container'] + return result + + +# todo del edit by song +def get_project_vul_count_back(users, queryset, auth_agents, project_id=None): + result = list() + project_queryset = IastProject.objects.filter(user__in=users) + project_queryset = project_queryset.values('name', 'id') + if not project_queryset: + return result + if project_id: + project_queryset = project_queryset.filter(id=project_id) + + versions = IastProjectVersion.objects.filter( + project_id__in=[project['id'] for project in project_queryset], + status=1, + current_version=1, + user__in=users).values_list('id', 'project_id').all() + versions_map = {version[1]: version[0] for version in versions} + # 需要 查询 指定项目 当前版本 绑定的agent 所对应的漏洞数量 + + for project in project_queryset: + project_id = project['id'] + version_id = versions_map.get(project_id, 0) + agent_queryset = auth_agents.filter(project_version_id=version_id, + bind_project_id=project_id) + + count = queryset.filter(agent__in=agent_queryset).count() + + if count is False: + result.append({ + "project_name": project['name'], + "count": 0, + "id": project_id + }) + else: + result.append({ + "project_name": project['name'], + "count": count, + "id": project_id + }) + + result = sorted(result, key=lambda item: item['count'], reverse=True)[:5] + return result + + +# add by song +def get_project_vul_count(users, queryset, auth_agents, project_id=None): + result = list() + project_queryset = IastProject.objects.filter(user__in=users) + project_queryset = project_queryset.values('name', 'id') + if not project_queryset: + return result + if project_id: + project_queryset = project_queryset.filter(id=project_id) + + versions = IastProjectVersion.objects.filter( + project_id__in=[project['id'] for project in project_queryset], + status=1, + current_version=1, + user__in=users).values_list('id', 'project_id').all() + versions_map = {version[1]: version[0] for version in versions} + # agent_summary = queryset.values('agent_id').annotate(agent_vul_num=Count("agent_id")) + agentIdArr = {} + for item in queryset: + agentIdArr[item["agent_id"]] = item["count"] + auth_agent_arr = auth_agents.values("project_version_id", "bind_project_id", "id") + agent_list = {} + for auth in auth_agent_arr: + version_id = versions_map.get(auth['bind_project_id'], 0) + if version_id == auth['project_version_id']: + if agent_list.get(auth['bind_project_id'], None) is None: + agent_list[auth['bind_project_id']] = [] + agent_list[auth['bind_project_id']].append(auth['id']) + + # 需要 查询 指定项目 当前版本 绑定的agent 所对应的漏洞数量 + for project in project_queryset: + project_id = project['id'] + count = 0 + for agent_id in agent_list.get(project_id, []): + count = count + int(agentIdArr.get(agent_id, 0)) + result.append({ + "project_name": project['name'], + "count": count, + "id": project_id + }) + + result = sorted(result, key=lambda item: item['count'], reverse=True)[:5] + return result + + +def change_dict_key(dic, keypair): + for k, v in keypair.items(): + dic[v] = dic.pop(k) + return dic + + +def get_vul_count_by_agent(agent_ids, vid, user): + queryset = IastVulnerabilityModel.objects.filter( + agent_id__in=agent_ids) + typeInfo = queryset.values().order_by("level") + if vid: + typeInfo = typeInfo.filter(id=vid) + type_summary = [] + levelCount = {} + vulDetail = {} + strategy_ids = queryset.values_list('strategy_id', + flat=True).distinct() + strategys = { + strategy['id']: strategy + for strategy in IastStrategyModel.objects.filter( + pk__in=strategy_ids).values('id', 'vul_name').all() + } + hook_type_ids = queryset.values_list('hook_type_id', + flat=True).distinct() + hooktypes = { + hooktype['id']: hooktype + for hooktype in HookType.objects.filter( + pk__in=hook_type_ids).values('id', 'name').all() + } + if typeInfo: + typeArr = {} + typeLevel = {} + for one in typeInfo: + hook_type = hooktypes.get(one['hook_type_id'], None) + hook_type_name = hook_type['name'] if hook_type else None + strategy = strategys.get(one['strategy_id'], None) + strategy_name = strategy['vul_name'] if strategy else None + type_ = list( + filter(lambda x: x is not None, [strategy_name, hook_type_name])) + one['type'] = type_[0] if type_ else '' + typeArr[one['type']] = typeArr.get(one['type'], 0) + 1 + typeLevel[one['type']] = one['level_id'] + levelCount[one['level_id']] = levelCount.get(one['level_id'], 0) + 1 + language = IastAgent.objects.filter( + pk=one['agent_id']).values_list('language', flat=True).first() + one['language'] = language if language is not None else '' + if one['type'] not in vulDetail.keys(): + vulDetail[one['type']] = [] + detailStr1 = _( + "We found that there is {1} in the {0} page, attacker can modify the value of {2} to attack:").format( + one['uri'], one['type'], one['taint_position']) + + try: + one['req_params'] = str(one['req_params']) + except Exception as e: + one['req_params'] = "" + detailStr2 = one['http_method'] + " " + one['uri'] + "?" + one['req_params'] + one['http_protocol'] + try: + fileData = one['full_stack'][-1].get("stack", "") + pattern = r'.*?\((.*?)\).*?' + resMatch = re.match(pattern, fileData) + uriArr = resMatch.group(1).split(":") + fileName = uriArr[0] + if len(uriArr) > 1: + rowStr = _("{} Line").format(str(uriArr[1])) + else: + rowStr = "" + except Exception as e: + fileName = "" + rowStr = "" + classname = "" + methodname = "" + if one['full_stack']: + try: + full_stack_arr = json.loads(one['full_stack']) + full_stack = full_stack_arr[-1] + classname = str(full_stack.get("classname", "")) + methodname = str(full_stack.get("methodname", "")) + except Exception as e: + print("======") + detailStr3 = _("In {} {} call {}. {} (), Incoming parameters {}").format( + str(fileName), rowStr, classname, methodname, + str(one['taint_value'])) + cur_tile = _("{} Appears in {} {}").format(one['type'], str(one['uri']), str(one['taint_position'])) + if one['param_name']: + cur_tile = cur_tile + "\"" + str(one['param_name']) + "\"" + vulDetail[one['type']].append({ + "title": cur_tile, + "type_name": one['type'], + "level_id": one['level_id'], + "first_time": time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(one['first_time'])), + "latest_time": time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(one['latest_time'])), + "language": one['language'], + "url": one['url'], + "detail_data": [detailStr1, detailStr2, detailStr3], + }) + typeArrKeys = typeArr.keys() + for item_type in typeArrKeys: + type_summary.append( + { + 'type_name': item_type, + 'type_count': typeArr[item_type], + 'type_level': typeLevel[item_type] + } + ) + return { + 'type_summary': type_summary, + 'levelCount': levelCount, + 'vulDetail': vulDetail + } + + +def get_hook_type_name(obj): + # hook_type = HookType.objects.filter(pk=obj['hook_type_id']).first() + # hook_type_name = hook_type.name if hook_type else None + # strategy = IastStrategyModel.objects.filter(pk=obj['strategy_id']).first() + # strategy_name = strategy.vul_name if strategy else None + # type_ = list( + # filter(lambda x: x is not None, [strategy_name, hook_type_name])) + type_ = list( + filter(lambda x: x is not None, [ + obj.get('strategy__vul_name', None), + obj.get('hook_type__name', None) + ])) + return type_[0] if type_ else '' + + +def initlanguage(): + program_language_list = IastProgramLanguage.objects.values_list( + 'name', flat=True).all() + return { + program_language.upper(): 0 + for program_language in program_language_list + } + + +# todo 默认开源许可证 +# def init_license(): +# license_list = IastProgramLanguage.objects.all() +# license_dic = { +# license.name: 0 +# for license in license_list +# } +# return license_dic + + +def get_agent_languages(agent_items): + default_language = initlanguage() + language_agents = dict() + language_items = IastAgent.objects.filter().values('id', 'language') + for language_item in language_items: + language_agents[language_item['id']] = language_item['language'] + + for item in agent_items: + agent_id = item['agent_id'] + count = item['count'] + if default_language.get(language_agents[agent_id], None): + default_language[language_agents[agent_id]] = count + default_language[language_agents[agent_id]] + else: + default_language[ + language_agents[agent_id]] = count + return [{ + 'language': _key, + 'count': _value + } for _key, _value in default_language.items()] diff --git a/dongtai_web/base/paginator.py b/dongtai_web/base/paginator.py new file mode 100644 index 000000000..5ba5b870a --- /dev/null +++ b/dongtai_web/base/paginator.py @@ -0,0 +1,26 @@ +from django.core.cache import cache + + +class ListPageMaker(): + + def parse_args(self, request): + page = int(request.query_params.get('page', 1)) + page_size = int(request.query_params.get('pageSize', 20)) + page_size = page_size if page_size < 50 else 50 + return page, page_size, request.user + + def make_key(self, request,keyName="logs"): + self.cache_key = f"{request.user.id}_total_{keyName}_id" + self.cache_key_max_id = f"{request.user.id}_max_{keyName}_id" + + def get_query_cache(self): + total = cache.get(self.cache_key) + max_id = cache.get(self.cache_key_max_id) + return total, max_id + + def set_query_cache(self, queryset): + total = queryset.values('id').count() + max_id = queryset.values_list('id', flat=True).order_by('-action_time')[0] + cache.set(self.cache_key, total, 60 * 60) + cache.set(self.cache_key_max_id, max_id, 60 * 60) + return total, max_id diff --git a/dongtai_web/base/project_version.py b/dongtai_web/base/project_version.py new file mode 100644 index 000000000..fc9167579 --- /dev/null +++ b/dongtai_web/base/project_version.py @@ -0,0 +1,126 @@ +import time +from django.db.models import Q +from dongtai_common.models.project_version import IastProjectVersion +from django.utils.translation import gettext_lazy as _ +from rest_framework import serializers +from dongtai_common.models.project import IastProject +from django.db import transaction + +class VersionModifySerializer(serializers.Serializer): + version_id = serializers.CharField( + help_text=_("The version id of the project")) + version_name = serializers.CharField( + help_text=_("The version name of the project")) + description = serializers.CharField( + help_text=_("Description of the project versoin")) + project_id = serializers.IntegerField(help_text=_("The id of the project")) + current_version = serializers.IntegerField(help_text=_( + "Whether it is the current version, 1 means yes, 0 means no.")) + +@transaction.atomic +def version_modify(user, auth_users, versionData=None): + version_id = versionData.get("version_id", 0) + project_id = versionData.get("project_id", 0) + current_version = versionData.get("current_version", 0) + version_name = versionData.get("version_name", "") + description = versionData.get("description", "") + project = IastProject.objects.filter(user__in=auth_users, + id=project_id).only( + 'id', 'user').first() + if not version_name or not project: + return { + "status": "202", + "msg": _("Parameter error") + } + baseVersion = IastProjectVersion.objects.filter( + project_id=project.id, + version_name=version_name, + status=1, + ) + if version_id: + baseVersion = baseVersion.filter(~Q(id=version_id)) + existVersion = baseVersion.exists() + if existVersion: + return { + "status": "202", + "msg": _("Repeated version name") + } + if version_id: + version = IastProjectVersion.objects.filter(id=version_id, + project_id=project.id, + status=1).first() + if not version: + return { + "status": "202", + "msg": _("Version does not exist") + } + else: + version.update_time = int(time.time()) + version.version_name = version_name + version.description = description + version.save() + else: + version,created = IastProjectVersion.objects.get_or_create( + project_id=project.id, + user=project.user, + current_version=current_version, + version_name=version_name, + description = description) + version.status=1 + version.save() + return { + "status": "201", + "msg": "success", + "data": { + "version_id": version.id, + "version_name": version_name, + "description": description + } + } + + + +def get_project_version(project_id, auth_users): + versionInfo = IastProjectVersion.objects.filter( + project_id=project_id, status=1, current_version=1, user__in=auth_users + ).first() + if versionInfo: + current_project_version = { + "version_id": versionInfo.id, + "version_name": versionInfo.version_name, + "description": versionInfo.description + } + else: + current_project_version = { + "version_id": 0, + "version_name": "", + "description": "", + } + return current_project_version + + +def get_project_version_by_id(version_id): + versionInfo = IastProjectVersion.objects.filter(pk=version_id).first() + if versionInfo: + current_project_version = { + "version_id": versionInfo.id, + "version_name": versionInfo.version_name, + "description": versionInfo.description + } + else: + current_project_version = { + "version_id": 0, + "version_name": "", + "description": "", + } + return current_project_version + + + +class ProjectsVersionDataSerializer(serializers.Serializer): + description = serializers.CharField( + help_text=_("Description of the project")) + version_id = serializers.CharField( + help_text=_("The version id of the project")) + version_name = serializers.CharField( + help_text=_("The version name of the project")) diff --git a/dongtai_web/base/update_project_version.py b/dongtai_web/base/update_project_version.py new file mode 100644 index 000000000..f66bb042b --- /dev/null +++ b/dongtai_web/base/update_project_version.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:sjh +# software: PyCharm +# project: lingzhi-webapi +import logging, time +from dongtai_common.endpoint import UserEndPoint, R +from dongtai_common.models.project_version import IastProjectVersion +from dongtai_common.models.project import IastProject +from dongtai_common.models.agent import IastAgent +from django.utils.translation import gettext_lazy as _ + +logger = logging.getLogger("django") + + +class UpdateProjectVersion(UserEndPoint): + name = "api-v1-project-version-check" + description = _("Detects and associates application version information") + + def get(self, request): + try: + all_project = IastProject.objects.all() + data = [] + for one in all_project: + result = IastProjectVersion.objects.filter(project_id=one.id, user_id=one.user_id, status=1).first() + if not result: + result = IastProjectVersion.objects.create( + version_name="V1.0", + project_id=one.id, + user_id=one.user_id, + current_version=1, + status=1 + ) + data.append(result.id) + IastAgent.objects.filter( + bind_project_id=one.id, + user_id=one.user_id, + project_version_id=0 + ).update( + project_version_id=result.id, + latest_time=int(time.time()) + ) + return R.success(msg=_('Detection finished'), data=data) + except Exception as e: + return R.failure(status=202, msg=_('Detection failed')) diff --git a/dongtai_web/common.py b/dongtai_web/common.py new file mode 100644 index 000000000..b9a9106e8 --- /dev/null +++ b/dongtai_web/common.py @@ -0,0 +1,8 @@ + +from enum import IntEnum + + + +class VulType(IntEnum): + APPLICATION = 1 + ASSET = 2 diff --git a/dongtai_web/dongtai_sca/__init__.py b/dongtai_web/dongtai_sca/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/dongtai_web/dongtai_sca/apps.py b/dongtai_web/dongtai_sca/apps.py new file mode 100644 index 000000000..550152698 --- /dev/null +++ b/dongtai_web/dongtai_sca/apps.py @@ -0,0 +1,5 @@ +from django.apps import AppConfig + + +class ScaConfig(AppConfig): + name = 'dongtai_sca' diff --git a/dongtai_web/dongtai_sca/common/__init__.py b/dongtai_web/dongtai_sca/common/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/dongtai_web/dongtai_sca/common/sca_vul.py b/dongtai_web/dongtai_sca/common/sca_vul.py new file mode 100644 index 000000000..ef7b69fb4 --- /dev/null +++ b/dongtai_web/dongtai_sca/common/sca_vul.py @@ -0,0 +1,73 @@ +from dongtai_common.models.asset import Asset +from dongtai_common.models.asset_vul import IastAssetVulTypeRelation +from dongtai_web.dongtai_sca.models import VulCveRelation +from dongtai_common.models.asset_vul import IastAssetVul + + +def get_ref(refs) -> list: + if not refs: + return [] + for ref in refs: + ref['source_url'] = ref['url'] + ref['url'] = ref['content'] + return refs + +# 通过asset_vul获取 组件详情信息 +def GetScaVulData(asset_vul, asset_queryset): + data = {'base_info': dict(), 'poc_info': dict()} + vul_id = asset_vul.id + + data['base_info'] = {'package_name': asset_vul.aql, 'version': asset_vul.package_version, + 'safe_version': asset_vul.package_safe_version, 'language': asset_vul.package_language} + + data['base_info']['first_time'] = asset_vul.vul_publish_time + data['base_info']['last_time'] = asset_vul.vul_update_time + + data['base_info']['cwe'] = '' + data['base_info']['cnvd'] = '' + data['base_info']['cve'] = '' + data['base_info']['cnnvd'] = '' + data['base_info']['level_id'] = asset_vul.level.id + data['base_info']['level'] = asset_vul.level.name_value + vul_cve_nums = asset_vul.vul_cve_nums + if vul_cve_nums: + data['base_info']['cwe'] = vul_cve_nums['cwe'] if vul_cve_nums['cwe'] else '' + data['base_info']['cnvd'] = vul_cve_nums['cnvd'] if vul_cve_nums['cnvd'] else '' + data['base_info']['cve'] = vul_cve_nums['cve'] if vul_cve_nums['cve'] else '' + data['base_info']['cnnvd'] = vul_cve_nums['cnnvd'] if vul_cve_nums['cnnvd'] else '' + + data['base_info']['vul_title'] = asset_vul.vul_name + data['base_info']['vul_detail'] = asset_vul.vul_detail + data['base_info']['vul_cve_id'] = asset_vul.cve_code + data['base_info']['have_article'] = asset_vul.have_article + data['base_info']['have_poc'] = asset_vul.have_poc + data['base_info']['vul_type'] = '' + data['base_info']['vul_id'] = vul_id + vul_type_relation = IastAssetVulTypeRelation.objects.filter(asset_vul_id=asset_vul.id) + if vul_type_relation: + vul_types = [_i.asset_vul_type.name for _i in vul_type_relation] + data['base_info']['vul_type'] = ','.join(vul_types) + + #asset_queryset = asset_queryset.filter( + # signature_value=asset_vul.package_hash, version=asset_vul.package_version, project_id__gt=0 + #).values('project_id', 'id').all() + #if asset_queryset: + #_temp_data = {_a['project_id']: _a['id'] for _a in asset_queryset} + #asset_ids = [_temp_data[p_id] for p_id in _temp_data] + + project_list = [] + projects_data = Asset.objects.filter( + iastvulassetrelation__asset_vul_id=asset_vul.id).values( + 'project_name').distinct().all() + for project in projects_data: + project_list.append(project['project_name']) + + data['base_info']['project_names'] = project_list + + #cve_relation = VulCveRelation.objects.filter(id=asset_vul.cve_id).first() + + data['poc_info']['poc_list'] = asset_vul.poc if asset_vul.poc else [] + + references = asset_vul.references if asset_vul.references else [] + data['poc_info']['reference_link'] = get_ref(references) + return data diff --git a/dongtai_web/dongtai_sca/models.py b/dongtai_web/dongtai_sca/models.py new file mode 100644 index 000000000..6ea73ee1b --- /dev/null +++ b/dongtai_web/dongtai_sca/models.py @@ -0,0 +1,157 @@ +from django.db import models + +# Create your models here. + +from django.db import models + + +class Package(models.Model): + huo_xian_product_id = models.CharField(max_length=255, blank=True, null=True) + aql = models.CharField(max_length=255, blank=True, null=True) + hash = models.CharField(max_length=255, blank=True, null=True) + ecosystem = models.CharField(max_length=50, blank=True, null=True) + name = models.CharField(max_length=255, blank=True, null=True) + version = models.CharField(max_length=255, blank=True, null=True) + license = models.CharField(max_length=50, blank=True, null=True) + language = models.CharField(max_length=50, null=False, default='') + version_publish_time = models.DateTimeField(blank=True, null=True) + + created_at = models.DateTimeField(auto_now_add=True, blank=True, null=True) + updated_at = models.DateTimeField(auto_now=True, blank=True, null=True) + + class Meta: + db_table = 'sca2_package_v2' + + +class Vul(models.Model): + id = models.CharField(primary_key=True, max_length=50) + summary = models.CharField(max_length=255, blank=True, null=True) + details = models.TextField(blank=True, null=True) + aliases = models.JSONField(blank=True, null=True) + modified = models.DateTimeField(blank=True, null=True) + published = models.DateTimeField(blank=True, null=True) + withdrawn = models.DateTimeField(blank=True, null=True) + references = models.JSONField(null=True) + + created_at = models.DateTimeField(auto_now_add=True, blank=True, null=True) + updated_at = models.DateTimeField(auto_now=True, blank=True, null=True) + + class Meta: + db_table = 'sca2_vul' + + +class VulPackage(models.Model): + cve = models.CharField(max_length=50, blank=True, null=True) + ecosystem = models.CharField(max_length=255, blank=True, null=True) + name = models.CharField(max_length=255, blank=True, null=True) + severity = models.CharField(max_length=32, blank=True, null=True) + introduced = models.CharField(max_length=64, blank=True, null=True) + introduced_vcode = models.CharField(max_length=64, blank=True, null=True) + final_version = models.CharField(max_length=64, blank=True, null=True) + final_vcode = models.CharField(max_length=64, blank=True, null=True) + fixed = models.CharField(max_length=64, blank=True, null=True) + fixed_vcode = models.CharField(max_length=64, blank=True, null=True) + safe_version = models.CharField(max_length=64, blank=True, null=True) + safe_vcode = models.CharField(max_length=64, blank=True, null=True) + created_at = models.DateTimeField(auto_now_add=True, blank=True, null=True) + updated_at = models.DateTimeField(auto_now=True, blank=True, null=True) + + class Meta: + db_table = 'sca2_vul_package_v2' + + +class VulPackageRange(models.Model): + vul_package_id = models.IntegerField(blank=True, null=True) + ecosystem = models.CharField(max_length=255, blank=True, null=True) + name = models.CharField(max_length=255, blank=True, null=True) + type = models.CharField(max_length=50, blank=True, null=True) + introduced = models.CharField(max_length=50, blank=True, null=True) + introduced_vcode = models.CharField(max_length=50, blank=True, null=True) + fixed = models.CharField(max_length=50, blank=True, null=True) + fixed_vcode = models.CharField(max_length=50, blank=True, null=True) + + created_at = models.DateTimeField(auto_now_add=True, blank=True, null=True) + updated_at = models.DateTimeField(auto_now=True, blank=True, null=True) + + class Meta: + db_table = 'sca2_vul_package_range' + + +class VulPackageVersion(models.Model): + vul_package_id = models.IntegerField(blank=True, null=True) + ecosystem = models.CharField(max_length=255, blank=True, null=True) + name = models.CharField(max_length=255, blank=True, null=True) + version = models.CharField(max_length=255, blank=True, null=True) + + created_at = models.DateTimeField(auto_now_add=True, blank=True, null=True) + updated_at = models.DateTimeField(auto_now=True, blank=True, null=True) + + class Meta: + db_table = 'sca2_vul_package_version' + + +class VulCveRelation(models.Model): + cve = models.CharField(max_length=255) + cwe = models.CharField(max_length=255) + cnnvd = models.CharField(max_length=255) + cnvd = models.CharField(max_length=255) + ghsa = models.CharField(max_length=255) + vul_title = models.CharField(max_length=512) + vul_title_en = models.CharField(max_length=512) + cwe_info = models.JSONField(blank=True, null=True) + description = models.JSONField(blank=True, null=True) + poc = models.JSONField(blank=True, null=True) + fix_plan = models.JSONField(blank=True, null=True) + references = models.JSONField(blank=True, null=True) + cpe_list = models.JSONField(blank=True, null=True) + cvss2_list = models.JSONField(blank=True, null=True) + cvss3_list = models.JSONField(blank=True, null=True) + severity = models.CharField(max_length=32, null=False, default='') + publish_time = models.DateTimeField(blank=True, null=True) + update_time = models.DateTimeField(blank=True, null=True) + created_at = models.DateTimeField(auto_now_add=True, blank=True, null=True) + updated_at = models.DateTimeField(auto_now=True, blank=True, null=True) + + class Meta: + db_table = 'sca2_cve_relation' + + +class PackageRepoDependency(models.Model): + repo_aql = models.CharField(max_length=255, null=False, default='') + dependency_aql = models.CharField(max_length=255, null=False, default='') + + class Meta: + db_table = 'sca2_package_repo_dependency' + + +class PackageDependency(models.Model): + package_name = models.CharField(max_length=255, null=False, default='') + p_version = models.CharField(max_length=64, null=False, default='') + dependency_package_name = models.CharField(max_length=255, null=False, default='') + d_version = models.CharField(max_length=64, null=False, default='') + ecosystem = models.CharField(max_length=64, null=False, default='') + + class Meta: + db_table = 'sca2_package_dependency' + + +class PackageLicenseInfo(models.Model): + license_name = models.CharField(max_length=255, blank=True, null=True) + identifier = models.CharField(max_length=64, blank=True, null=True) + license_text = models.TextField(blank=True, null=True) + create_at = models.DateTimeField(auto_now_add=True, blank=True, null=True) + update_at = models.DateTimeField(auto_now=True, blank=True, null=True) + + class Meta: + db_table = 'sca2_package_license_info' + + +class PackageLicenseLevel(models.Model): + identifier = models.CharField(max_length=64, blank=True, null=True) + level_id = models.SmallIntegerField(null=False, default=0) + level_desc = models.CharField(max_length=64, blank=True, null=True) + create_at = models.DateTimeField(auto_now_add=True, blank=True, null=True) + update_at = models.DateTimeField(auto_now=True, blank=True, null=True) + + class Meta: + db_table = 'sca2_license_level' diff --git a/dongtai_web/dongtai_sca/scan/__init__.py b/dongtai_web/dongtai_sca/scan/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/dongtai_web/dongtai_sca/scan/cwe.py b/dongtai_web/dongtai_sca/scan/cwe.py new file mode 100644 index 000000000..7ccdb3d7c --- /dev/null +++ b/dongtai_web/dongtai_sca/scan/cwe.py @@ -0,0 +1,324 @@ +from collections import defaultdict + +bc = defaultdict( + lambda: "", { + "CWE-843": "使用不兼容类型访问资源(类型混淆)", + "CWE-710": "编程规范违背", + "CWE-489": "遗留的调试代码", + "CWE-334": "随机数的空间太小", + "CWE-401": "在移除最后引用时对内存的释放不恰当(内存泄露)", + "CWE-212": "敏感数据的不恰当跨边界移除", + "CWE-1285": + "Improper Validation of Specified Index, Position, or Offset in Input", + "CWE-502": "不可信数据的反序列化", + "CWE-193": "Off-by-one错误", + "CWE-789": "未经控制的内存分配", + "CWE-114": "流程控制", + "CWE-697": "不充分的比较", + "CWE-250": "带着不必要的权限执行", + "CWE-348": "使用不可信的源", + "CWE-183": "宽松定义的白名单", + "CWE-428": "未经引用的搜索路径或元素", + "CWE-754": "对因果或异常条件的不恰当检查", + "CWE-342": "从先前值可预测准确值", + "CWE-172": "编码错误", + "CWE-786": "在缓冲区起始位置之前访问内存", + "CWE-862": "授权机制缺失", + "CWE-330": "使用不充分的随机数", + "CWE-184": "不完整的黑名单", + "CWE-444": "HTTP请求的解释不一致性(HTTP请求私运)", + "CWE-799": "交互频率的控制不恰当", + "CWE-130": "长度参数不一致性处理不恰当", + "CWE-317": "在GUI中的明文存储", + "CWE-917": "表达式语言语句中使用的特殊元素转义处理不恰当(表达式语言注入)", + "CWE-426": "不可信的搜索路径", + "CWE-98": "PHP程序中Include/Require语句包含文件控制不恰当(PHP远程文件包含)", + "CWE-200": "信息暴露", + "CWE-346": "源验证错误", + "CWE-214": "通过处理环境导致的信息暴露", + "CWE-271": "特权放弃/降低错误", + "CWE-838": "输出上下文语义编码不恰当", + "CWE-118": "对可索引资源的访问不恰当(越界错误)", + "CWE-276": "缺省权限不正确", + "CWE-829": "从非可信控制范围包含功能例程", + "CWE-189": "数值错误", + "CWE-681": "数值类型间的不正确转换", + "CWE-326": "不充分的加密强度", + "CWE-202": "通过数据查询的敏感数据暴露", + "CWE-252": "未加检查的返回值", + "CWE-617": "可达断言", + "CWE-354": "完整性检查值验证不恰当", + "CWE-270": "特权上下文切换错误", + "CWE-674": "未经控制的递归", + "CWE-611": "XML外部实体引用的不恰当限制(XXE)", + "CWE-915": "动态确定对象属性修改的控制不恰当", + "CWE-280": "不充分权限或特权的处理不恰当", + "NVD-CWE-Other": "", + "CWE-194": "未预期的符号扩展", + "CWE-552": "对外部实体的文件或目录可访问", + "CWE-760": "使用可预测Salt的单向哈希算法", + "CWE-694": "使用多个具有重复标识的资源", + "CWE-77": "在命令中使用的特殊元素转义处理不恰当(命令注入)", + "CWE-1021": "不当限制渲染UI层或帧", + "CWE-311": "敏感数据加密缺失", + "CWE-170": "不恰当的空终结符", + "CWE-598": "通过GET请求中的查询字符串导致的信息暴露", + "CWE-653": "不充分的划分", + "CWE-441": "未有动机的代理或中间人(混淆代理)", + "CWE-20": "输入验证不恰当", + "CWE-601": "指向未可信站点的URL重定向(开放重定向)", + "CWE-203": "通过差异性导致的信息暴露", + "CWE-1284": "Improper Validation of Specified Quantity in Input", + "CWE-61": "UNIX符号链接跟随", + "CWE-822": "非可信指针解引用", + "CWE-620": "未经验证的口令修改", + "CWE-268": "特权链锁", + "CWE-1188": "不安全的默认资源初始化", + "CWE-242": "使用内在危险函数", + "CWE-112": "XML验证缺失", + "CWE-299": "证书撤销验证不恰当", + "CWE-113": "HTTP头部中CRLF序列转义处理不恰当(HTTP响应分割)", + "CWE-79": "在Web页面生成时对输入的转义处理不恰当(跨站脚本)", + "CWE-345": "对数据真实性的验证不充分", + "CWE-191": "整数下溢(超界折返)", + "CWE-1278": + "Missing Protection Against Hardware Reverse Engineering Using Integrated Circuit (IC) Imaging Techniques", + "CWE-349": "在可信数据中接受外来的不可信数据", + "CWE-435": "交互错误", + "CWE-119": "内存缓冲区边界内操作的限制不恰当", + "CWE-1333": "Inefficient Regular Expression Complexity", + "CWE-369": "除零错误", + "CWE-26": "路径遍历:'dir/../filename'", + "CWE-353": "缺失完整性检查支持", + "CWE-470": "使用外部可控制的输入来选择类或代码(不安全的反射)", + "CWE-404": "不恰当的资源关闭或释放", + "CWE-696": "不正确的行为次序", + "CWE-197": "数值截断错误", + "CWE-774": "不加限制或调节进行文件描述符或句柄的分配", + "CWE-125": "跨界内存读", + "CWE-693": "保护机制失效", + "CWE-277": "不安全的继承权限", + "CWE-909": "资源初始化缺失", + "CWE-923": "通信信道对预期端点的不适当限制", + "CWE-539": "通过持久性Cookie导致的信息暴露", + "CWE-590": "释放并不在堆上的内存", + "CWE-208": "通过时间差异性导致的信息暴露", + "CWE-302": "使用假设不可变数据进行的认证绕过", + "CWE-532": "通过日志文件的信息暴露", + "CWE-335": "PRNG种子错误", + "CWE-340": "可预测问题", + "CWE-185": "不正确的正则表达式", + "CWE-332": "PRNG中信息熵不充分", + "CWE-94": "对生成代码的控制不恰当(代码注入)", + "CWE-347": "密码学签名的验证不恰当", + "CWE-772": "对已超过有效生命周期的资源丧失索引", + "CWE-834": "过度迭代", + "CWE-610": "资源在另一范围的外部可控制索引", + "CWE-924": "通信信道中传输过程中消息完整性的不正确执行", + "CWE-1187": "使用未初始化的资源", + "CWE-305": "使用基本弱点进行的认证绕过", + "CWE-327": "使用已被攻破或存在风险的密码学算法", + "CWE-613": "不充分的会话过期机制", + "CWE-665": "初始化不恰当", + "CWE-331": "信息熵不充分", + "CWE-204": "响应差异性信息暴露", + "CWE-126": "缓冲区上溢读取", + "CWE-1286": "Improper Validation of Syntactic Correctness of Input", + "CWE-755": "对异常条件的处理不恰当", + "CWE-641": "文件和其他资源名称限制不恰当", + "CWE-96": "静态存储代码中指令转义处理不恰当(静态代码注入)", + "CWE-297": "对宿主不匹配的证书验证不恰当", + "CWE-35": "路径遍历:'.../...//'", + "CWE-313": "在文件或磁盘上的明文存储", + "CWE-778": "不充分的日志记录", + "CWE-798": "使用硬编码的凭证", + "CWE-88": "参数注入或修改", + "CWE-59": "在文件访问前对链接解析不恰当(链接跟随)", + "CWE-667": "加锁机制不恰当", + "CWE-75": "特殊命令到另一不同平面时的净化处理不恰当(特殊命令注入)", + "CWE-328": "可逆的单向哈希", + "CWE-823": "使用越界的指针偏移", + "CWE-241": "非预期数据类型处理不恰当", + "CWE-640": "忘记口令恢复机制弱", + "CWE-91": "XML注入(XPath盲注)", + "CWE-283": "未经验证的属主", + "CWE-201": "通过发送数据的信息暴露", + "CWE-115": "输入的错误解释", + "CWE-920": "功耗限制不当", + "CWE-190": "整数溢出或超界折返", + "CWE-248": "未捕获的异常", + "CWE-507": "特洛伊木马", + "CWE-89": "SQL命令中使用的特殊元素转义处理不恰当(SQL注入)", + "CWE-73": "文件名或路径的外部可控制", + "CWE-358": "不恰当实现的标准安全检查", + "CWE-93": "对CRLF序列的转义处理不恰当(CRLF注入)", + "CWE-99": "对资源描述符的控制不恰当(资源注入)", + "CWE-565": "在信任Cookie未进行验证与完整性检查", + "CWE-943": "数据查询逻辑中特殊元素的不当中和", + "CWE-87": "替代XSS语法转义处理不恰当", + "CWE-379": "在具有不安全权限的目录中创建临时文件", + "CWE-471": "对假设不可变数据的修改(MAID)", + "CWE-706": "使用不正确的解析名称或索引", + "CWE-290": "使用欺骗进行的认证绕过", + "CWE-362": "使用共享资源的并发执行不恰当同步问题(竞争条件)", + "CWE-400": "未加控制的资源消耗(资源穷尽)", + "CWE-359": "侵犯隐私", + "CWE-788": "在缓冲区结束位置之后访问内存", + "CWE-15": "系统设置或配置在外部可控制", + "CWE-24": "路径遍历:'../filedir'", + "CWE-295": "证书验证不恰当", + "CWE-662": "不恰当的同步机制", + "CWE-267": "特权定义了不安全动作", + "CWE-364": "信号处理例程中的竞争条件", + "CWE-228": "语法无效结构处理不恰当", + "CWE-824": "使用未经初始化的指针", + "CWE-776": "DTD中递归实体索引的不恰当限制(XML实体扩展)", + "CWE-688": "使用不正确变量或索引作为参数的函数调用", + "CWE-912": "隐藏功能", + "CWE-1336": + "Improper Neutralization of Special Elements Used in a Template Engine", + "CWE-922": "敏感信息的不安全存储", + "CWE-916": "使用具有不充分计算复杂性的口令哈希", + "CWE-257": "以可恢复格式存储口令", + "CWE-506": "内嵌的恶意代码", + "CWE-451": "关键信息的UI错误表达", + "CWE-134": "使用外部控制的格式字符串", + "CWE-704": "不正确的类型转换", + "CWE-1236": + "Improper Neutralization of Formula Elements in a CSV File", + "CWE-664": "在生命周期中对资源的控制不恰当", + "CWE-391": "未经检查的错误条件", + "CWE-407": "算法复杂性", + "CWE-522": "不充分的凭证保护机制", + "CWE-90": "LDAP查询中使用的特殊元素转义处理不恰当(LDAP注入)", + "CWE-457": "使用未经初始化的变量", + "CWE-240": "对不一致结构体元素处理不恰当", + "CWE-540": "通过源代码导致的信息暴露", + "CWE-603": "使用客户端的认证机制", + "CWE-323": "在加密中重用Nonce与密钥对", + "CWE-216": "容器错误", + "CWE-23": "相对路径遍历", + "CWE-497": "将系统数据暴露到未授权控制的范围", + "CWE-427": "对搜索路径元素未加控制", + "CWE-259": "使用硬编码的口令", + "CWE-256": "明文存储口令", + "CWE-402": "将私有的资源传输到一个新的空间(资源泄露)", + "CWE-42": "路径等价:'filename.' (尾部点号)", + "CWE-573": "调用者对规范的不恰当使用", + "CWE-338": "使用具有密码学弱点缺陷的PRNG", + "CWE-415": "双重释放", + "CWE-116": "对输出编码和转义不恰当", + "CWE-178": "大小写敏感处理不恰当", + "CWE-80": "Web页面中脚本相关HTML标签转义处理不恰当(基本跨站脚本)", + "CWE-304": "认证中关键步骤缺失", + "CWE-300": "通道可被非端点访问(中间人攻击)", + "CWE-680": "整数溢出导致缓冲区溢出", + "CWE-476": "空指针解引用", + "CWE-682": "数值计算不正确", + "CWE-670": "控制流实现总是不正确", + "CWE-350": "不恰当地信任反向DNS", + "CWE-279": "不安全的运行时授予权限", + "CWE-775": "缺失文件描述符或句柄在有效生命周期之后的释放处理", + "NVD-CWE-noinfo": "", + "CWE-307": "过多认证尝试的限制不恰当", + "CWE-672": "在过期或释放后对资源进行操作", + "CWE-472": "对假设不可变Web参数的外部可控制", + "CWE-770": "不加限制或调节的资源分配", + "CWE-551": "不正确的行为次序:在解析与净化处理之前进行授权", + "CWE-567": "在多现场上下文中未能对共享数据进行同步访问", + "CWE-440": "预期行为违背", + "CWE-129": "对数组索引的验证不恰当", + "CWE-321": "使用硬编码的密码学密钥", + "CWE-316": "在内存中的明文存储", + "CWE-399": "资源管理错误", + "CWE-707": "对消息或数据结构的处理不恰当", + "CWE-627": "动态变量执行", + "CWE-548": "通过目录枚举导致的信息暴露", + "CWE-121": "栈缓冲区溢出", + "CWE-294": "使用捕获-重放进行的认证绕过", + "CWE-757": "在会话协商时选择低安全性的算法(算法降级)", + "CWE-273": "对于放弃特权的检查不恰当", + "CWE-763": "对无效指针或索引的释放", + "CWE-285": "授权机制不恰当", + "CWE-122": "堆缓冲区溢出", + "CWE-123": "任意地址可写任意内容条件", + "CWE-649": "依赖于未经完整性检查的安全相关输入的混淆或加密", + "CWE-1076": "对预期协议的遵守不足", + "CWE-416": "释放后使用", + "CWE-523": "凭证传输未经安全保护", + "CWE-669": "在范围间的资源转移不正确", + "CWE-266": "特权授予不正确", + "CWE-639": "通过用户控制密钥绕过授权机制", + "CWE-749": "暴露危险的方法或函数", + "CWE-303": "认证算法的不正确实现", + "CWE-792": "对一个或多个特殊元素实例的过滤不完全", + "CWE-209": "通过错误消息导致的信息暴露", + "CWE-36": "绝对路径遍历", + "CWE-281": "权限预留不恰当", + "CWE-494": "下载代码缺少完整性检查", + "CWE-1004": "没有'HttpOnly'标志的敏感Cookie", + "CWE-390": "未有动作错误条件的检测", + "CWE-78": "OS命令中使用的特殊元素转义处理不恰当(OS命令注入)", + "CWE-918": "服务端请求伪造(SSRF)", + "CWE-74": "输出中的特殊元素转义处理不恰当(注入)", + "CWE-807": "在安全决策中依赖未经信任的输入", + "CWE-599": "缺失对OpenSSL证书的验证", + "CWE-312": "敏感数据的明文存储", + "CWE-120": "未进行输入大小检查的缓冲区拷贝(传统缓冲区溢出)", + "CWE-525": "通过浏览器缓存导致的信息暴露", + "CWE-124": "缓冲区下溢", + "CWE-1287": "Improper Validation of Specified Type of Input", + "CWE-1295": "Debug Messages Revealing Unnecessary Information", + "CWE-117": "日志输出的转义处理不恰当", + "CWE-261": "口令使用弱密码学算法", + "CWE-642": "对关键状态数据的外部可控制", + "CWE-425": "直接请求(强制性浏览)", + "CWE-377": "不安全的临时文件", + "CWE-436": "解释冲突", + "CWE-527": "将CVS仓库暴露给非授权控制范围", + "CWE-284": "访问控制不恰当", + "CWE-22": "对路径名的限制不恰当(路径遍历)", + "CWE-732": "关键资源的不正确权限授予", + "CWE-657": "违背安全设计原则", + "CWE-378": "创建拥有不安全权限的临时文件", + "CWE-64": "Windows快捷方式跟随(.LNK)", + "CWE-684": "特定函数功能的不正确供给", + "CWE-269": "特权管理不恰当", + "CWE-521": "弱口令要求", + "CWE-131": "缓冲区大小计算不正确", + "CWE-325": "缺少必要的密码学步骤", + "CWE-668": "将资源暴露给错误范围", + "CWE-790": "特殊元素过滤不恰当", + "CWE-459": "清理环节不完整", + "CWE-708": "不正确的属主授予", + "CWE-434": "危险类型文件的不加限制上传", + "CWE-1321": + "Improperly Controlled Modification of Object Prototype Attributes ('Prototype Pollution')", + "CWE-538": "文件和路径信息暴露", + "CWE-352": "跨站请求伪造(CSRF)", + "CWE-367": "检查时间与使用时间(TOCTOU)的竞争条件", + "CWE-835": "不可达退出条件的循环(无限循环)", + "CWE-288": "使用候选路径或通道进行的认证绕过", + "CWE-759": "使用未加Salt的单向哈希算法", + "CWE-825": "无效指针解引用", + "CWE-942": "过度许可的跨域白名单", + "CWE-287": "认证机制不恰当", + "CWE-863": "授权机制不正确", + "CWE-384": "会话固定", + "CWE-319": "敏感数据的明文传输", + "CWE-644": "对HTTP头部进行脚本语法转义处理不恰当", + "CWE-787": "跨界内存写", + "CWE-306": "关键功能的认证机制缺失", + "CWE-805": "使用不正确的长度值访问缓冲区", + "CWE-908": "对未经初始化资源的使用", + "CWE-405": "不对称的资源消耗(放大攻击)", + "CWE-913": "动态管理代码资源的控制不恰当", + "CWE-264": "权限、特权和访问控制", + "CWE-315": "在Cookie中的明文存储", + "CWE-233": "参数问题", + "CWE-385": "隐蔽时间通道" + }) + + +def get_cwe_name(cwe_id: str) -> str: + return bc[cwe_id] diff --git a/dongtai_web/dongtai_sca/scan/tests.py b/dongtai_web/dongtai_sca/scan/tests.py new file mode 100644 index 000000000..10a0dd7a6 --- /dev/null +++ b/dongtai_web/dongtai_sca/scan/tests.py @@ -0,0 +1,132 @@ +from .utils import get_package_vul, get_package + +from django.test import TestCase + + +class ExtenalApiTestCase(TestCase): + + def test_get_package_vul_by_aql(self): + res = get_package_vul( + aql="maven:com.fasterxml.jackson.core:jackson-databind:2.9.3:") + assert isinstance(res, list) + + def test_get_package_by_ecosystem_and_hash_java(self): + res = get_package( + ecosystem="maven", + package_hash="3490508379d065fe3fcb80042b62f630f7588606") + assert isinstance(res, list) + + def test_get_package_by_ecosystem_and_hash_go(self): + res = get_package( + ecosystem="golang", + package_hash="3c61e56652c8d48ba09390f1170cf868007e1293") + assert isinstance(res, list) + + +from test import DongTaiTestCase +from test.apiserver.test_agent_base import AgentTestCase +from .utils import update_one_sca + +from .utils import get_nearest_version, get_latest_version + + +class DongTaiVersionTestCase(TestCase): + version_list = [ + '1.0', '1.0-m4', '1.0-rc1', '1.0.1', '1.2', '1.2-rc1', '1.2-rc2', + '1.2.1', '1.2.2', '1.2.3', '1.2.4', '1.2.5', '1.2.6', '1.2.7', '1.2.8', + '1.2.9', '2.0', '2.0-m1', '2.0-m2', '2.0-m4', '2.0.1', '2.0.2', + '2.0.3', '2.0.4', '2.0.5', '2.0.6', '2.0.7', '2.0.8', '2.5', '2.5.1', + '2.5.2', '2.5.3', '2.5.4', '2.5.5', '2.5.6', '2.5.6.SEC01', + '2.5.6.SEC02', '2.5.6.SEC03', '3.0.0.RELEASE', '3.0.1.RELEASE', + '3.0.2.RELEASE', '3.0.3.RELEASE', '3.0.4.RELEASE', '3.0.5.RELEASE', + '3.0.6.RELEASE', '3.0.7.RELEASE', '3.1.0.RELEASE', '3.1.1.RELEASE', + '3.1.2.RELEASE', '3.1.3.RELEASE', '3.1.4.RELEASE', '3.2.0.RELEASE', + '3.2.1.RELEASE', '3.2.10.RELEASE', '3.2.11.RELEASE', '3.2.12.RELEASE', + '3.2.13.RELEASE', '3.2.14.RELEASE', '3.2.15.RELEASE', '3.2.16.RELEASE', + '3.2.17.RELEASE', '3.2.18.RELEASE', '3.2.2.RELEASE', '3.2.3.RELEASE', + '3.2.4.RELEASE', '3.2.5.RELEASE', '3.2.6.RELEASE', '3.2.7.RELEASE', + '3.2.8.RELEASE', '3.2.9.RELEASE', '4.0.0.RELEASE', '4.0.1.RELEASE', + '4.0.2.RELEASE', '4.0.3.RELEASE', '4.0.4.RELEASE', '4.0.5.RELEASE', + '4.0.6.RELEASE', '4.0.7.RELEASE', '4.0.8.RELEASE', '4.0.9.RELEASE', + '4.1.0.RELEASE', '4.1.1.RELEASE', '4.1.2.RELEASE', '4.1.3.RELEASE', + '4.1.4.RELEASE', '4.1.5.RELEASE', '4.1.6.RELEASE', '4.1.7.RELEASE', + '4.1.8.RELEASE', '4.1.9.RELEASE', '4.2.0.RELEASE', '4.2.1.RELEASE', + '4.2.2.RELEASE', '4.2.3.RELEASE', '4.2.4.RELEASE', '4.2.5.RELEASE', + '4.2.6.RELEASE', '4.2.7.RELEASE', '4.2.8.RELEASE', '4.2.9.RELEASE', + '4.3.0.RELEASE', '4.3.1.RELEASE', '4.3.10.RELEASE', '4.3.11.RELEASE', + '4.3.12.RELEASE', '4.3.13.RELEASE', '4.3.14.RELEASE', '4.3.15.RELEASE', + '4.3.16.RELEASE', '4.3.17.RELEASE', '4.3.18.RELEASE', '4.3.19.RELEASE', + '4.3.2.RELEASE', '4.3.20.RELEASE', '4.3.21.RELEASE', '4.3.22.RELEASE', + '4.3.23.RELEASE', '4.3.24.RELEASE', '4.3.25.RELEASE', '4.3.26.RELEASE', + '4.3.27.RELEASE', '4.3.28.RELEASE', '4.3.29.RELEASE', '4.3.3.RELEASE', + '4.3.30.RELEASE', '4.3.4.RELEASE', '4.3.5.RELEASE', '4.3.6.RELEASE', + '4.3.7.RELEASE', '4.3.8.RELEASE', '4.3.9.RELEASE', '5.0.0.RELEASE', + '5.0.1.RELEASE', '5.0.10.RELEASE', '5.0.11.RELEASE', '5.0.12.RELEASE', + '5.0.13.RELEASE', '5.0.14.RELEASE', '5.0.15.RELEASE', '5.0.16.RELEASE', + '5.0.17.RELEASE', '5.0.18.RELEASE', '5.0.19.RELEASE', '5.0.2.RELEASE', + '5.0.20.RELEASE', '5.0.3.RELEASE', '5.0.4.RELEASE', '5.0.5.RELEASE', + '5.0.6.RELEASE', '5.0.7.RELEASE', '5.0.8.RELEASE', '5.0.9.RELEASE', + '5.1.0.RELEASE', '5.1.1.RELEASE', '5.1.10.RELEASE', '5.1.11.RELEASE', + '5.1.12.RELEASE', '5.1.13.RELEASE', '5.1.14.RELEASE', '5.1.15.RELEASE', + '5.1.16.RELEASE', '5.1.17.RELEASE', '5.1.18.RELEASE', '5.1.19.RELEASE', + '5.1.2.RELEASE', '5.1.20.RELEASE', '5.1.3.RELEASE', '5.1.4.RELEASE', + '5.1.5.RELEASE', '5.1.6.RELEASE', '5.1.7.RELEASE', '5.1.8.RELEASE', + '5.1.9.RELEASE', '5.2.11.RELEASE', '5.2.19.RELEASE', '5.2.3.RELEASE', + '5.2.8.RELEASE', '5.3.14', '5.3.15', '5.3.16', '5.3.19' + ] + + def test_nearest_version(self): + version = '5.1.3.RELEASE' + nrversion = get_nearest_version(version, self.version_list) + assert nrversion == '5.1.3.RELEASE' + + def test_latest_version(self): + assert '5.3.19' == get_latest_version(self.version_list) + + def test_nearest_version_1(self): + version = '0.0.1' + nrversion = get_nearest_version(version, self.version_list) + assert nrversion == '1.0-rc1' + + def test_nearest_version_2(self): + version = '10.0.1' + nrversion = get_nearest_version(version, self.version_list) + assert nrversion == '' + + +from dongtai_common.models.asset import Asset + +class AgentHardencodeTestCase(AgentTestCase): + + def test_update_one_sca_java(self): + update_one_sca( + self.agent_id, + "/Users/xxx/spring-boot/2.3.2.RELEASE/org.springframework:spring-beans.jar", + "3490508379d065fe3fcb80042b62f630f7588606", + "org.springframework:spring-beans.jar", "SHA-1") + + def test_update_one_sca_golang(self): + update_one_sca(self.agent_id, "pypi:markupsafe:2.0.1:", + "a4bb5ffad5564e4a0e25955e3a40b1c6158385b2", + "org.springframework:spring-beans.jar", "SHA-1") + + def test_get_package_edge_case(self): + update_one_sca(self.agent_id, "", + "9b7860a324f4b2f2bc31bcdd99c7ee51fe32e0c8", + " org.springframework:spring-web.jar ", "SHA-1") + asset = Asset.objects.filter( + agent_id=self.agent_id, + signature_value="9b7860a324f4b2f2bc31bcdd99c7ee51fe32e0c8").first( + ) + + def test_get_package_edge_case_1(self): + update_one_sca(self.agent_id, "", + "07b6bf82cea13570b5290d6ed841283a1fcce170", + " org.springframework:spring-web.jar ", "SHA-1") + asset = Asset.objects.filter( + agent_id=self.agent_id, + signature_value="07b6bf82cea13570b5290d6ed841283a1fcce170").first( + ) + assert asset is not None + assert asset.safe_version_list is not None + assert asset.iastvulassetrelation_set.all() != [] diff --git a/dongtai_web/dongtai_sca/scan/utils.py b/dongtai_web/dongtai_sca/scan/utils.py new file mode 100644 index 000000000..622595ef1 --- /dev/null +++ b/dongtai_web/dongtai_sca/scan/utils.py @@ -0,0 +1,572 @@ +import requests +from result import Ok, Err, Result +import logging +from requests.exceptions import ConnectionError, ConnectTimeout +from requests.exceptions import RequestException +import json +from json.decoder import JSONDecodeError +from typing import Optional, Callable, Any +from typing import List, Dict, Tuple +from requests import Response +from dongtai_conf.settings import SCA_BASE_URL, SCA_TIMEOUT +from urllib.parse import urljoin +from dongtai_common.common.utils import cached_decorator +from dongtai_common.models.profile import IastProfile +from json.decoder import JSONDecodeError +from http import HTTPStatus + +logger = logging.getLogger("dongtai-webapi") + +def get_sca_token() -> str: + #profilefromdb = IastProfile.objects.filter(key='sca_token').values_list( + # 'value', flat=True).first() + #if profilefromdb: + # return profilefromdb + #return '' + from dongtai_conf.settings import SCA_TOKEN + return SCA_TOKEN + +def request_get_res_data_with_exception(data_extract_func: Callable[ + [Response], Result] = lambda x: Ok(x), + *args, + **kwargs) -> Result: + try: + response: Response = requests.request(*args, **kwargs) + logger.debug(f"response content: {response.content!r}") + logger.info(f"response content url: {response.url} status_code: {response.status_code}") + res = data_extract_func(response) + if isinstance(res, Err): + return res + return Ok(res.value) + except (ConnectionError, ConnectTimeout): + return Err("ConnectionError with target server") + except JSONDecodeError: + logger.debug(f"content decode error :{response.content!r}") + logger.info(f"content decode error") + return Err("Content decode error") + except RequestException as e: + logger.error(e, exc_info=True) + return Err("Request Exception") + except Exception as e: + logger.error(e, exc_info=True) + return Err("Exception") + + +def data_transfrom(response: Response) -> Result[List[Dict], str]: + if response.status_code == HTTPStatus.FORBIDDEN: + return Err('Rate Limit Exceeded') + try: + res_data = json.loads(response.content) + return Ok(res_data['data']) + except JSONDecodeError as e: + logger.debug(e, exc_info=True) + logger.info(f'JSONDecodeError content: {response.content!r}') + return Err('Failed') + except KeyError as e: + logger.debug(e, exc_info=True) + logger.info(f'content form not match content: {response.content!r}') + return Err('Failed') + except Exception as e: + logger.error(f"unexcepted Exception : {e}", exc_info=True) + return Err('Failed') + +def data_transfrom_package_vul_v2(response: Response) -> Result[List[Dict], str]: + if response.status_code == HTTPStatus.FORBIDDEN: + return Err('Rate Limit Exceeded') + try: + res_data = json.loads(response.content) + return Ok((res_data['data'], res_data['safe_version'])) + except JSONDecodeError as e: + logger.debug(e, exc_info=True) + logger.info(f'JSONDecodeError content: {response.content!r}') + return Err('Failed') + except KeyError as e: + logger.debug(e, exc_info=True) + logger.info(f'content form not match content: {response.content!r}') + return Err('Failed') + except Exception as e: + logger.error(f"unexcepted Exception : {e}", exc_info=True) + return Err('Failed') + +@cached_decorator(random_range=(2 * 60 * 60, 2 * 60 * 60),) +def get_package_vul(aql: Optional[str] = None, + ecosystem: Optional[str] = None, + package_hash: Optional[str] = None) -> List[Dict]: + url = urljoin(SCA_BASE_URL, "/openapi/sca/v1/package_vul/") + if aql is not None: + querystring = {"aql": aql} + else: + querystring = {"ecosystem": ecosystem, "hash": package_hash} + headers = {"Token": get_sca_token()} + payload = "" + res = request_get_res_data_with_exception(data_transfrom, + "GET", + url, + data=payload, + params=querystring, + headers=headers, + timeout=SCA_TIMEOUT) + if isinstance(res, Err): + return [] + data = res.value + return data + + +@cached_decorator( + random_range=(2 * 60 * 60, 2 * 60 * 60), ) +def get_package_vul_v2(aql: Optional[str] = None, + ecosystem: Optional[str] = None, + package_hash: Optional[str] = None) -> Tuple[List[Dict],List[Dict]]: + url = urljoin(SCA_BASE_URL, "/openapi/sca/v2/package_vul/") + if aql is not None: + querystring = {"aql": aql} + else: + querystring = {"ecosystem": ecosystem, "hash": package_hash} + headers = {"Token": get_sca_token()} + payload = "" + res = request_get_res_data_with_exception(data_transfrom_package_vul_v2, + "GET", + url, + data=payload, + params=querystring, + headers=headers, + timeout=SCA_TIMEOUT) + if isinstance(res, Err): + return [], [] + data = res.value + return data + +@cached_decorator(random_range=(2 * 60 * 60, 2 * 60 * 60),) +def get_package(aql: Optional[str] = None, + ecosystem: Optional[str] = None, + package_hash: Optional[str] = None) -> List[Dict]: + url = urljoin(SCA_BASE_URL, "/openapi/sca/v1/package/") + if aql is not None: + querystring = {"aql": aql} + else: + querystring = {"ecosystem": ecosystem, "hash": package_hash} + headers = {"Token": get_sca_token()} + payload = "" + res = request_get_res_data_with_exception(data_transfrom, + "GET", + url, + data=payload, + params=querystring, + headers=headers, + timeout=SCA_TIMEOUT) + if isinstance(res, Err): + return [] + data = res.value + return data + + +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.asset import Asset +from dongtai_common.models.vul_level import IastVulLevel +import time +#from dongtai_web.dongtai_sca.utils import sca_scan_asset + + +def get_package_aql(name: str, ecosystem: str, version: str) -> str: + return f"{ecosystem}:{name}:{version}" + + +from celery import shared_task +from dongtai_web.dongtai_sca.models import PackageLicenseLevel +from dongtai_conf.settings import SCA_SETUP + + +def get_license_list(license_list_str: str) -> List[Dict]: + license_list = list(filter(lambda x: x, license_list_str.split(","))) + res = list( + PackageLicenseLevel.objects.filter( + identifier__in=license_list).values('identifier', 'level_id', + 'level_desc').all()) + selected_identifier = list(map(lambda x: x['identifier'], res)) + for k in license_list: + if k not in selected_identifier: + res.append({ + 'identifier': k, + "level_id": 0, + "level_desc": "允许商业集成" + }) + + if res: + return res + return [{ + 'identifier': "non-standard", + "level_id": 0, + "level_desc": "允许商业集成" + }] + +def get_highest_license(license_list: List[Dict]) -> Dict: + logger.debug(f'license_list : {license_list}') + res = sorted(license_list, key=lambda x: x['level_id'], reverse=True) + if res: + return res[0] + return { + 'identifier': "non-standard", + "level_id": 0, + "level_desc": "允许商业集成" + } + + +from hashlib import sha1 + +def sha_1(raw): + sha1_str = sha1(raw.encode("utf-8")).hexdigest() + return sha1_str + +@shared_task(queue='dongtai-sca-task') +def update_one_sca(agent_id, + package_path, + package_signature, + package_name, + package_algorithm, + package_version=''): + logger.info( + f'SCA检测开始 [{agent_id} {package_path} {package_signature} {package_name} {package_algorithm} {package_version}]' + ) + agent = IastAgent.objects.filter(id=agent_id).first() + if not package_signature: + package_signature = sha_1(package_signature) + if not SCA_SETUP: + logger.warning(f"SCA_TOKEN not setup !") + asset = Asset() + new_level = IastVulLevel.objects.get(name="info") + + # change to update_or_create + asset.package_name = package_name + asset.package_path = package_path + asset.signature_value = package_signature + asset.signature_algorithm = 'SHA-1' + asset.version = package_version + asset.level_id = new_level.id + asset.vul_count = 0 + asset.language = agent.language + if agent: + asset.agent = agent + asset.project_version_id = agent.project_version_id if agent.project_version_id else 0 + asset.project_name = agent.project_name + asset.language = agent.language + asset.project_id = -1 + if agent.bind_project_id: + asset.project_id = agent.bind_project_id + asset.user_id = -1 + if agent.user_id: + asset.user_id = agent.user_id + license_list = get_license_list("non-standard") + asset.license_list = license_list + highest_license = get_highest_license(license_list) + asset.highest_license = get_highest_license(license_list) + asset.license = highest_license['identifier'] + asset.dt = int(time.time()) + asset.save() + return + + if agent.language == "JAVA": + packages = get_package(ecosystem='maven', + package_hash=package_signature) + else: + packages = get_package(aql=package_name) + if not packages: + asset = Asset() + new_level = IastVulLevel.objects.get(name="info") + + # change to update_or_create + asset.package_name = package_name + asset.package_path = package_path + asset.signature_value = package_signature + asset.signature_algorithm = 'SHA-1' + asset.version = package_version + asset.level_id = new_level.id + asset.vul_count = 0 + asset.language = agent.language + if agent: + asset.agent = agent + asset.project_version_id = agent.project_version_id if agent.project_version_id else 0 + asset.project_name = agent.project_name + asset.language = agent.language + asset.project_id = -1 + if agent.bind_project_id: + asset.project_id = agent.bind_project_id + asset.user_id = -1 + if agent.user_id: + asset.user_id = agent.user_id + license_list = get_license_list("non-standard") + asset.license_list = license_list + highest_license = get_highest_license(license_list) + asset.highest_license = get_highest_license(license_list) + asset.license = highest_license['identifier'] + asset.dt = int(time.time()) + asset.save() + + for package in packages: + asset = Asset() + new_level = IastVulLevel.objects.get(name="info") + aql = get_package_aql(package['name'], package['ecosystem'], + package['version']) + + # change to update_or_create + asset.package_name = aql + asset.package_path = package_path + asset.signature_value = package['hash'] + asset.signature_algorithm = 'SHA-1' + asset.version = package['version'] + asset.level_id = new_level.id + asset.vul_count = 0 + asset.language = agent.language + if agent: + asset.agent = agent + asset.project_version_id = agent.project_version_id if agent.project_version_id else 0 + asset.project_name = agent.project_name + asset.language = agent.language + asset.project_id = -1 + if agent.bind_project_id: + asset.project_id = agent.bind_project_id + asset.user_id = -1 + if agent.user_id: + asset.user_id = agent.user_id + license_list = get_license_list( + package['license'] if package['license'] else "non-standard") + asset.license_list = license_list + highest_license = get_highest_license(license_list) + asset.highest_license = get_highest_license(license_list) + asset.license = highest_license['identifier'] + asset.dt = int(time.time()) + asset.save() + sca_scan_asset(asset.id, package['ecosystem'], package['name'], + package['version']) + + +from collections import defaultdict +from dongtai_common.models.asset_vul import IastAssetVul + + +def stat_severity(serveritys) -> defaultdict: + dic = defaultdict(int) + for serverity in serveritys: + dic[serverity] += 1 + return dic + + +from dongtai_common.models.asset import Asset +from packaging.version import _BaseVersion + + +class DongTaiScaVersion(_BaseVersion): + """ + Internal Temprorary Version Solution. + Use to compare version. + """ + + def __init__(self, version: str) -> None: + version_code = "" + version_list = version.split('.')[0:4] + while len(version_list) != 5: + version_list.append("0") + for _version in version_list: + version_code += _version.zfill(5) + self._key = version_code + self._version = version + + +def get_nearest_version(version_str: str, version_str_list: List[str]) -> str: + return min(filter(lambda x: x >= DongTaiScaVersion(version_str), + map(lambda x: DongTaiScaVersion(x), version_str_list)), + default=DongTaiScaVersion(""))._version + + +def get_latest_version(version_str_list: List[str]) -> str: + return max(map(lambda x: DongTaiScaVersion(x), version_str_list), + default=DongTaiScaVersion(""))._version + + +def get_cve_numbers(cve: str = "", + cwe: list = [], + cnvd: str = "", + cnnvd: str = ""): + return {'cve': cve, 'cwe': cwe, 'cnvd': cnvd, 'cnnvd': cnnvd} + + +def get_vul_serial(title: str = "", + cve: str = "", + cwe: list = [], + cnvd: str = "", + cnnvd: str = "") -> str: + return "|".join([title, cve, cnvd, cnnvd] + cwe) + + +from collections import defaultdict + + +def get_vul_level_dict() -> defaultdict: + return defaultdict(lambda: 5, { + 'high': 1, + "critical": 1, + "medium": 2, + "low": 3 + }) + + +def get_ecosystem_language_dict() -> defaultdict: + return defaultdict(lambda: 'JAVA', { + 'maven': 'JAVA', + "pypi": 'PYTHON', + "composer": 'PHP', + "golang": 'GO' + }) + + +def get_description(descriptions: List[Dict]) -> str: + if not descriptions: + return "" + return sorted(descriptions, key=lambda x: x['language'], + reverse=True)[0]['content'] + + +def get_vul_path(base_aql: str, + vul_package_path: List[Dict] = []) -> List[str]: + return list( + map(lambda x: get_package_aql(x['name'], x['ecosystem'], x['version']), + vul_package_path)) + [base_aql] + + +from dongtai_common.models.asset_vul import (IastAssetVulTypeRelation, + IastAssetVul, + IastVulAssetRelation, + IastAssetVulType) + +from .cwe import get_cwe_name + +def get_asset_level(res: dict) -> int: + level_map = {'critical': 1, 'high': 1, 'medium': 2, 'low': 3} + for k, v in level_map.items(): + if res[k] > 0: + return v + return 4 + + +def get_detail(res: List[Dict]) -> str: + slice_first = sorted(res, key=lambda x: x['language'], reverse=True)[0:] + if slice_first: + return slice_first[0]["content"] + return "" + + +def get_title(title_zh: str, title_en: str) -> str: + title_list = list(filter(lambda x: x != "", [title_zh, title_en])) + if title_list: + return title_list[0] + return "" + + +from django.db import IntegrityError + + +def sca_scan_asset(asset_id: int, ecosystem: str, package_name: str, + version: str): + aql = get_package_aql(package_name, ecosystem, version) + package_vuls, safe_version = get_package_vul_v2(aql) + res = stat_severity(map(lambda x: x["severity"], package_vuls)) + timestamp = int(time.time()) + package_language = get_ecosystem_language_dict()[ecosystem] + Asset.objects.filter(pk=asset_id).update(level_id=get_asset_level(res)) + Asset.objects.filter(pk=asset_id).update( + **{f"vul_{k}_count": v + for k, v in res.items()}) + Asset.objects.filter(pk=asset_id).update( + **{"vul_count": sum(res.values())}) + for vul in package_vuls: + vul_dependency = get_vul_path(aql, vul['vul_package_path']) + cve_numbers = get_cve_numbers(vul['cve'], vul['cwe_info'], vul['cnvd'], + vul['cnnvd']) + nearest_fixed_version = get_nearest_version( + version, [i['version'] for i in vul['fixed']]) + vul_serial = get_vul_serial(vul['vul_title'], vul['cve'], + vul['cwe_info'], vul['cnvd'], vul['cnnvd']) + vul_level = get_vul_level_dict()[vul['severity']] + detail = get_detail(vul['description']) + #still need , save to asset_vul_relation + # nearest_fixed_version = get_nearest_version(version, vul['fixed']) + # save to asset latest_version + # latest_version = get_latest_version(vul['safe_version']) + + # where to place? save_version save to asset + # package_safe_version_list = vul['safe_version'] + # effected save to asset_vul_relation + package_effected_version_list = vul['effected'] + package_fixed_version_list = vul['fixed'] + + # 兼容 + # + if not IastAssetVul.objects.filter(sid=vul['sid']).exists(): + asset_vul = IastAssetVul.objects.filter( + sid__isnull=True, + cve_code=vul['cve']).order_by('update_time').first() + if asset_vul: + asset_vul.sid = vul['sid'] + asset_vul.save() + asset_vul, _ = IastAssetVul.objects.update_or_create( + sid=vul['sid'], + defaults={ + "package_name": vul['name'], + "level_id": vul_level, + "vul_name": get_title(vul['vul_title'], vul['vul_title_en']), + "vul_detail": detail, + "aql": aql, + # package_hash=vul_package_hash, #??? + "package_version": version, + #package_latest_version=latest_version, + "package_language": package_language, + "have_article": 1 if vul['references'] else 0, + "have_poc": 1 if vul['poc'] else 0, + #cve_id=cve_relation.id, + "vul_cve_nums": cve_numbers, + "vul_serial": vul_serial, + "vul_publish_time": vul['publish_time'], + "vul_update_time": vul['vul_change_time'], + "update_time": timestamp, + "update_time_desc": -timestamp, + "create_time": timestamp, + "fix_plan": vul['fix_plan'], + "poc": vul['poc'], + "descriptions": vul['description'], + "references": vul['references'], + }, + ) + asset_vul_relation, _ = IastVulAssetRelation.objects.update_or_create( + asset_vul_id=asset_vul.id, + asset_id=asset_id, + defaults={ + "create_time": timestamp, + "vul_dependency_path": vul_dependency, + "effected_version_list": package_effected_version_list, + "fixed_version_list": package_fixed_version_list, + "nearest_fixed_version": nearest_fixed_version, + "status_id": 1, + }, + ) + if len(vul['cwe_info']) == 0: + vul['cwe_info'].append('') + for cwe_id in vul['cwe_info']: + if not IastAssetVulType.objects.filter(cwe_id=cwe_id).exists(): + try: + IastAssetVulType.objects.create(cwe_id=cwe_id, + name=get_cwe_name(cwe_id)) + except IntegrityError as e: + logger.debug("unique error stack: ", exc_info=True) + logger.info( + "unique error cause by concurrency insert,ignore it") + type_: IastAssetVulType = IastAssetVulType.objects.filter( + cwe_id=cwe_id).first() + IastAssetVulTypeRelation.objects.get_or_create( + asset_vul_id=asset_vul.id, asset_vul_type_id=type_.id) + nearest_safe_version = get_nearest_version( + version, [i['version'] for i in safe_version]) + latest_safe_version = get_latest_version( + [i['version'] for i in safe_version]) + Asset.objects.filter(pk=asset_id).update( + safe_version_list=safe_version, + nearest_safe_version=nearest_safe_version, + latest_safe_version=latest_safe_version) diff --git a/dongtai_web/dongtai_sca/serializers/__init__.py b/dongtai_web/dongtai_sca/serializers/__init__.py new file mode 100644 index 000000000..0ff3192d9 --- /dev/null +++ b/dongtai_web/dongtai_sca/serializers/__init__.py @@ -0,0 +1,5 @@ +# !usr/bin/env python +# coding:utf-8 +# @author:zhaoyanwei +# @file: __init__.py.py +# @time: 2022/5/7 上午7:38 diff --git a/dongtai_web/dongtai_sca/serializers/asset_project.py b/dongtai_web/dongtai_sca/serializers/asset_project.py new file mode 100644 index 000000000..84942c0bc --- /dev/null +++ b/dongtai_web/dongtai_sca/serializers/asset_project.py @@ -0,0 +1,16 @@ +# !usr/bin/env python +# coding:utf-8 +# @author:zhaoyanwei +# @file: asset_project.py +# @time: 2022/5/7 上午7:39 +from rest_framework import serializers + +from dongtai_common.models.asset import Asset + + +class AssetProjectSerializer(serializers.ModelSerializer): + class Meta: + model = Asset + fields = [ + 'project_id', 'project_name', 'dependency_level', 'package_name' + ] diff --git a/dongtai_web/dongtai_sca/tasks.py b/dongtai_web/dongtai_sca/tasks.py new file mode 100644 index 000000000..8262dbfdd --- /dev/null +++ b/dongtai_web/dongtai_sca/tasks.py @@ -0,0 +1,62 @@ +# !usr/bin/env python +# coding:utf-8 +# @author:zhaoyanwei +# @file: tasks.py +# @time: 2022/5/9 下午3:45 + +from dongtai_common.models import User +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.asset import Asset +from celery.apps.worker import logger + +from dongtai_web.dongtai_sca.utils import sca_scan_asset + + +def refresh_all_asset_data(): + """ + todo 一次性任务,更新组件数据 + """ + logger.info('开始更新组件数据') + + iast_assets = Asset.objects.filter(dependency_level=0).all() + if iast_assets: + for asset in iast_assets: + try: + update_fields = [] + asset_agent = IastAgent.objects.filter(id=asset.agent_id).values("bind_project_id", + "project_name", + "user_id", + "project_version_id", + "language").first() + if asset_agent: + if asset_agent['bind_project_id'] != 0: + asset.project_id = asset_agent['bind_project_id'] + asset.project_name = asset_agent['project_name'] + asset.project_version_id = asset_agent['project_version_id'] + update_fields.extend(['project_id', 'project_name', 'project_version_id']) + if asset_agent['user_id'] != 0: + user = User.objects.filter(id=asset_agent['user_id']).first() + if user: + user_department = user.get_department() + user_talent = user.get_talent() + asset.department_id = user_department.id if user_department else -1 + asset.talent_id = user_talent.id if user_talent else -1 + asset.user_id = asset_agent['user_id'] + update_fields.append('user_id') + update_fields.append('talent_id') + update_fields.append('department_id') + update_fields.append('language') + asset.language = asset_agent['language'] + + asset.save(update_fields=update_fields) + + # 更新asset + sca_scan_asset(asset) + + except Exception as e: + logger.error(f'SCA组件数据更新出错,错误原因:{e}') + continue + + logger.info('组件更新数据处理完成') + + return True diff --git a/dongtai_web/dongtai_sca/urls.py b/dongtai_web/dongtai_sca/urls.py new file mode 100644 index 000000000..ff4c32895 --- /dev/null +++ b/dongtai_web/dongtai_sca/urls.py @@ -0,0 +1,19 @@ +from django.urls import include, path + +from dongtai_web.dongtai_sca.views.asset_projects import AssetProjects +from dongtai_web.dongtai_sca.views.package import PackageList, AssetAggrDetailAssetIds +from dongtai_web.dongtai_sca.views.package_vul import OnePackageVulList, AssetPackageVulList, AssetPackageVulDetail +from rest_framework import routers + +router = routers.DefaultRouter() + +urlpatterns = [ + path('package/', PackageList.as_view()), + path('package_vul/', OnePackageVulList.as_view()), + path('asset_projects/', AssetProjects.as_view()), + path('asset_vuls/', AssetPackageVulList.as_view()), + path('asset_vul_detail/', AssetPackageVulDetail.as_view()), + path('asset_ids/', AssetAggrDetailAssetIds.as_view()), +] + +urlpatterns = [path('sca/v1/', include(urlpatterns), name='ScaAPI'), ] diff --git a/dongtai_web/dongtai_sca/utils.py b/dongtai_web/dongtai_sca/utils.py new file mode 100644 index 000000000..89b06722c --- /dev/null +++ b/dongtai_web/dongtai_sca/utils.py @@ -0,0 +1,413 @@ +# !usr/bin/env python +# coding:utf-8 +# @author:zhaoyanwei +# @file: utils.py +# @time: 2022/5/5 下午7:26 +import json +import logging +import random +import time + +import requests +from django.conf import settings +from django.db.models import Count, Q +from django.forms import model_to_dict + +from dongtai_common.models import User +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.asset import Asset +from dongtai_common.models.asset_aggr import AssetAggr +from dongtai_common.models.asset_vul import IastAssetVul, IastVulAssetRelation, IastAssetVulType, \ + IastAssetVulTypeRelation +from dongtai_common.models.vul_level import IastVulLevel +from dongtai_web.vul_log.vul_log import log_asset_vul_found +from dongtai_web.dongtai_sca.models import Package, VulPackage, VulCveRelation, PackageLicenseLevel, PackageDependency +from dongtai_common.models.asset_vul import IastAssetVulnerabilityDocument +from dongtai_conf.settings import ELASTICSEARCH_STATE + +logger = logging.getLogger(__name__) + + +def sca_scan_asset(asset): + """ + 根据SCA数据库,更新SCA记录信息 + :return: + """ + agent = asset.agent + version = asset.version + asset_package = Package.objects.filter(hash=asset.signature_value, version=version).first() + update_fields = list() + try: + logger.info('[sca_scan_asset]开始检测组件:{}/{}'.format(asset.id, asset.package_name)) + if asset_package: + package_name = asset_package.aql + version = asset_package.version + + if version: + version_code = "" + version_list = asset_package.version.split('.')[0:4] + while len(version_list) != 5: + version_list.append("0") + for _version in version_list: + version_code += _version.zfill(5) + else: + version_code = "0000000000000000000000000" + + vul_list = VulPackage.objects.filter(ecosystem=asset_package.ecosystem, name=asset_package.name, + introduced=asset_package.version).all() + + if asset_package.license: + asset.license = asset_package.license + update_fields.append('license') + # 最小修复版本-安全版本 + package_ranges = VulPackage.objects.filter(ecosystem=asset_package.ecosystem, + name=asset_package.name, + safe_vcode__gte=version_code).order_by("safe_vcode").first() + if package_ranges and asset.safe_version != package_ranges.safe_version: + asset.safe_version = package_ranges.fixed + update_fields.append('safe_version') + + # 最新版本 + last_package = Package.objects.filter(Q(ecosystem=asset_package.ecosystem) + & Q(name=asset_package.name) & ~Q(version="")).order_by( + "-version_publish_time").first() + if last_package: + asset.last_version = last_package.version + update_fields.append('last_version') + + levels_dict = dict() + vul_count = 0 + levels = [] + vul_records = [] + for vul in vul_list: + if vul.cve in vul_records: + continue + vul_records.append(vul.cve) + + _level = vul.severity + vul_cve_code = vul.cve + if vul_cve_code: + cve_relation = VulCveRelation.objects.filter( + Q(cve=vul_cve_code) | Q(cnvd=vul_cve_code) | Q(cnnvd=vul_cve_code)).first() + if cve_relation: + if _level == 'note': + _level = 'info' + if _level and _level not in levels: + levels.append(_level) + if _level not in levels_dict: + levels_dict[_level] = 1 + else: + levels_dict[_level] += 1 + vul_count += 1 + # 写入IastAssetVul + _add_vul_data(asset, asset_package, cve_relation) + + if len(levels) > 0: + if 'critical' in levels: + level = 'high' + elif 'high' in levels: + level = 'high' + elif 'medium' in levels: + level = 'medium' + elif 'low' in levels: + level = 'low' + else: + level = 'info' + else: + level = 'info' + + new_level = IastVulLevel.objects.get(name=level) + # 更新漏洞等级和各级漏洞数 + if asset.level != new_level: + asset.level = new_level + update_fields.append('level') + + if 'critical' in levels_dict: + asset.vul_critical_count = levels_dict['critical'] + update_fields.append('vul_critical_count') + else: + asset.vul_critical_count = 0 + update_fields.append('vul_critical_count') + if 'high' in levels_dict: + asset.vul_high_count = levels_dict['high'] + update_fields.append('vul_high_count') + else: + asset.vul_high_count = 0 + update_fields.append('vul_high_count') + if 'medium' in levels_dict: + asset.vul_medium_count = levels_dict['medium'] + update_fields.append('vul_medium_count') + else: + asset.vul_medium_count = 0 + update_fields.append('vul_medium_count') + + if 'low' in levels_dict: + asset.vul_low_count = levels_dict['low'] + update_fields.append('vul_low_count') + else: + asset.vul_low_count = 0 + update_fields.append('vul_low_count') + + if 'info' in levels_dict: + asset.vul_info_count = levels_dict['info'] + update_fields.append('vul_info_count') + else: + asset.vul_info_count = 0 + update_fields.append('vul_info_count') + + if asset.vul_count != vul_count: + asset.vul_count = vul_count + update_fields.append('vul_count') + + if asset.package_name != package_name: + asset.package_name = package_name + update_fields.append('package_name') + + if asset.version != version: + asset.version = version + update_fields.append('version') + + asset.dependency_level = 1 + update_fields.append('dependency_level') + + if len(update_fields) > 0: + logger.info(f'update asset {asset.id} dependency fields: {update_fields}') + asset.save(update_fields=update_fields) + else: + logger.warning('[sca_scan_asset]检测组件在组件库不存在:{}/{}'.format(asset.id, asset.package_name)) + update_asset_aggr(asset) + except Exception as e: + # import traceback + # traceback.print_exc() + logger.info("get package_vul failed:{}".format(e)) + + +# 处理IastAssetVul +def _add_vul_data(asset, asset_package, cve_relation): + try: + level_maps = dict() + _level = cve_relation.severity + vul_title = '' + vul_detail = '' + vul_have_poc = 0 + # vul_have_article = 1 if vul_info['references'] else 0 + vul_have_article = 0 + + vul_serial = '' + vul_reference = dict() + vul_type_ids = [] + # cve_relation_id = 0 + default_cwe_info = {'cwe_id': '', 'name_chinese': '未知'} + + if cve_relation: + # cve_relation_id = cve_relation.id + vul_title = cve_relation.vul_title + if cve_relation.description: + vul_detail = cve_relation.description[0]['content'] + vul_have_poc = 1 if cve_relation.poc else 0 + vul_reference = {"cve": cve_relation.cve, "cwe": cve_relation.cwe, "cnnvd": cve_relation.cnnvd, + "cnvd": cve_relation.cnvd} + vul_serial = ' | '.join([vul_reference[_i] for _i in vul_reference]) + vul_serial = vul_title + ' | ' + vul_serial + vul_cwe_id = cve_relation.cwe.split(',') + vul_type_cwe = IastAssetVulType.objects.filter(cwe_id__in=vul_cwe_id).all() + + if not vul_type_cwe: + if cve_relation.cwe_info: + for cwe in cve_relation.cwe_info: + vul_type_cwe1 = IastAssetVulType.objects.filter(cwe_id=cwe['cwe_id']).first() + if not vul_type_cwe1: + vul_type_cwe_new = IastAssetVulType.objects.create(cwe_id=cwe['cwe_id'], + name=cwe['name_chinese']) + vul_type_id = vul_type_cwe_new.id + else: + vul_type_id = vul_type_cwe1.id + vul_type_ids.append(vul_type_id) + else: + for cwe in vul_type_cwe: + vul_type_ids.append(cwe.id) + + if not vul_type_ids: + vul_type_cwe = IastAssetVulType.objects.filter(cwe_id=default_cwe_info['cwe_id']).first() + if not vul_type_cwe: + vul_type_cwe_new = IastAssetVulType.objects.create(cwe_id=default_cwe_info['cwe_id'], + name=default_cwe_info['name_chinese']) + vul_type_ids.append(vul_type_cwe_new.id) + else: + vul_type_ids.append(vul_type_cwe.id) + + vul_license = asset_package.license + vul_aql = asset_package.aql + vul_package_hash = asset_package.hash + vul_package_v = asset_package.version + vul_package_safe_version = asset.safe_version + vul_package_latest_version = asset.last_version + vul_package_language = asset.language + + license_level_info = PackageLicenseLevel.objects.filter(identifier=vul_license).first() + vul_license_level = license_level_info.level_id if license_level_info else 0 + + if _level in level_maps: + vul_level = level_maps[_level] + else: + level_obj = IastVulLevel.objects.filter(name=_level).first() + if level_obj: + level_maps[_level] = level_obj.id + vul_level = level_obj.id + else: + vul_level = 1 # critical IastVulLevel查不到归到high + asset_vul = IastAssetVul.objects.filter(cve_id=cve_relation.id, aql=vul_aql, + package_hash=vul_package_hash, + package_version=vul_package_v).first() + timestamp = int(time.time()) + if not asset_vul: + asset_vul = IastAssetVul.objects.create( + package_name=asset_package.name, + level_id=vul_level, + license=vul_license, + license_level=vul_license_level, + vul_name=vul_title, + vul_detail=vul_detail, + aql=vul_aql, + package_hash=vul_package_hash, + package_version=vul_package_v, + package_safe_version=vul_package_safe_version, + package_latest_version=vul_package_latest_version, + package_language=vul_package_language, + have_article=vul_have_article, + have_poc=vul_have_poc, + cve_id=cve_relation.id, + cve_code=cve_relation.cve, + vul_cve_nums=vul_reference, + vul_serial=vul_serial, + vul_publish_time=cve_relation.publish_time, + vul_update_time=cve_relation.update_time, + update_time=timestamp, + update_time_desc=-timestamp, + create_time=timestamp + ) + _add_asset_vul_relation(asset_vul) + if vul_type_ids: + type_relation_obj = [] + for vul_type_id in vul_type_ids: + type_relation = IastAssetVulTypeRelation(asset_vul_id=asset_vul.id, + asset_vul_type_id=vul_type_id) + type_relation_obj.append(type_relation) + + IastAssetVulTypeRelation.objects.bulk_create(type_relation_obj) + + # new vul add log + log_project_name = asset.project_name if asset.project_name else '' + log_project_id = asset.project_id if asset.project_id else 0 + log_user_id = asset.user_id if asset.user_id else 0 + log_asset_vul_found(log_user_id, log_project_name, log_project_id, asset_vul.id, asset_vul.vul_name) + else: + asset_vul.update_time = timestamp + asset_vul.update_time_desc = -timestamp + asset_vul.save() + # vul log + log_project_name = asset.project_name if asset.project_name else '' + log_project_id = asset.project_id if asset.project_id else 0 + log_user_id = asset.user_id if asset.user_id else 0 + log_asset_vul_found(log_user_id, log_project_name, log_project_id, asset_vul.id, asset_vul.vul_name) + _add_asset_vul_relation(asset_vul) + + except Exception as e: + # import traceback + # traceback.print_exc() + logger.info("_add_vul_data failed:{}".format(e)) + + +def _add_asset_vul_relation(asset_vul): + vul_assets = Asset.objects.filter(version=asset_vul.package_version, + signature_value=asset_vul.package_hash).values('id').all() + asset_vul_relations = [] + timestamp = int(time.time()) + if vul_assets: + for asset_vl in vul_assets: + relation_exist = IastVulAssetRelation.objects.filter(asset_vul_id=asset_vul.id, + asset_id=asset_vl['id']).first() + if not relation_exist: + asset_vul_relations.append(IastVulAssetRelation(asset_vul_id=asset_vul.id, asset_id=asset_vl['id'], + create_time=timestamp, status_id=1)) + + if asset_vul_relations: + IastVulAssetRelation.objects.bulk_create(asset_vul_relations) + if ELASTICSEARCH_STATE: + asset_vul_created = IastVulAssetRelation.objects.filter( + asset_vul=asset_vul, create_time=timestamp, status_id=1).all() + IastAssetVulnerabilityDocument().update(asset_vul_created) + +def update_asset_aggr(asset): + try: + project_count = 0 + asset_aggr = AssetAggr.objects.filter(signature_value=asset.signature_value, version=asset.version).first() + project_count_query = Asset.objects.filter(project_id__gt=0, signature_value=asset.signature_value, + version=asset.version).values( + 'signature_value', 'version').annotate(project_count=Count('project_id', distinct=True)) + if project_count_query: + project_count = [_['project_count'] for _ in project_count_query][0] + + if asset_aggr: + asset_aggr.version = asset.version + asset_aggr.safe_version = asset.safe_version + asset_aggr.last_version = asset.last_version + asset_aggr.level = asset.level + asset_aggr.vul_count = asset.vul_count + asset_aggr.vul_critical_count = asset.vul_critical_count + asset_aggr.vul_high_count = asset.vul_high_count + asset_aggr.vul_medium_count = asset.vul_medium_count + asset_aggr.vul_low_count = asset.vul_low_count + asset_aggr.vul_info_count = asset.vul_info_count + asset_aggr.language = asset.language + asset_aggr.license = asset.license + asset_aggr.is_del = asset.is_del + asset_aggr.project_count = project_count + asset_aggr.save() + else: + AssetAggr.objects.create( + package_name=asset.package_name, + signature_value=asset.signature_value, + version=asset.version, + safe_version=asset.safe_version, + last_version=asset.last_version, + level=asset.level, + vul_count=asset.vul_count, + vul_critical_count=asset.vul_critical_count, + vul_high_count=asset.vul_high_count, + vul_medium_count=asset.vul_medium_count, + vul_low_count=asset.vul_low_count, + project_count=project_count, + language=asset.language, + license=asset.license, + is_del=asset.is_del) + except Exception as e: + logger.error("update_asset_aggr error {}:".format(e)) + + +def get_asset_id_by_aggr_id(aggr_id, asset_ids=None): + data_ids = [] + asset_aggr = AssetAggr.objects.filter(id=aggr_id).first() + if asset_aggr: + assets = Asset.objects.filter(signature_value=asset_aggr.signature_value, version=asset_aggr.version) + if asset_ids: + assets = assets.filter(id__in=asset_ids) + asset_datas = assets.values('id').all() + for asset in asset_datas: + data_ids.append(asset['id']) + + return data_ids + + +def get_package_name_by_aql(aql): + name = '' + + if aql: + aql_split = aql.split(':') + if len(aql_split) > 0: + del aql_split[-2] + del aql_split[-1] + del aql_split[0] + name = ':'.join(aql_split) + + return name diff --git a/dongtai_web/dongtai_sca/views/__init__.py b/dongtai_web/dongtai_sca/views/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/dongtai_web/dongtai_sca/views/asset_projects.py b/dongtai_web/dongtai_sca/views/asset_projects.py new file mode 100644 index 000000000..72443dffe --- /dev/null +++ b/dongtai_web/dongtai_sca/views/asset_projects.py @@ -0,0 +1,216 @@ +# !usr/bin/env python +# coding:utf-8 +# @author:zhaoyanwei +# @file: asset_projects.py +# @time: 2022/5/7 上午7:18 +import logging + +from django.db.models import Count +from django.forms import model_to_dict +from django.utils.translation import gettext_lazy as _ + +from dongtai_common.endpoint import R, UserEndPoint +from dongtai_common.models import User +from dongtai_common.models.asset import Asset +from dongtai_common.models.asset_aggr import AssetAggr +from dongtai_common.models.asset_vul import IastAssetVul +from dongtai_common.models.project_version import IastProjectVersion +from dongtai_common.models.vul_level import IastVulLevel +from dongtai_web.serializers.sca import ScaSerializer +from dongtai_web.dongtai_sca.serializers.asset_project import AssetProjectSerializer + +logger = logging.getLogger(__name__) + + +class AssetProjects(UserEndPoint): + name = "api-v1-sca-projects" + description = "" + + def get(self, request, aggr_id): + try: + auth_users = self.get_auth_users(request.user) + asset_queryset = self.get_auth_assets(auth_users) + asset = Asset.objects.filter(id=aggr_id).first() + #asset_aggr = AssetAggr.objects.filter( + # signature_value=asset.signature_value).first() + #if not asset_aggr: + # return R.failure(msg=_('Components do not exist or no permission to access')) + + asset_queryset = asset_queryset.filter( + signature_value=asset.signature_value, + version=asset.version, + project_id__gt=0).values('project_id', 'id').all() + if not asset_queryset: + return R.failure(msg=_( + 'Components do not exist or no permission to access')) + + _temp_data = {_a['project_id']: _a['id'] for _a in asset_queryset} + asset_ids = [_temp_data[p_id] for p_id in _temp_data] + + data = AssetProjectSerializer( + Asset.objects.filter(pk__in=asset_ids), many=True).data + + return R.success(data=data) + except Exception as e: + logger.error(e) + return R.failure(msg=_('Component projects query failed')) + + +class AssetVulProjects(UserEndPoint): + name = "api-v1-sca-vul-projects" + description = "" + + def get(self, request, vul_id): + try: + auth_users = self.get_auth_users(request.user) + asset_queryset = self.get_auth_assets(auth_users) + asset_vul = IastAssetVul.objects.filter(id=vul_id).first() + if not asset_vul: + return R.failure(msg=_('Components of the vul do not exist or no permission to access')) + + package_kw = request.query_params.get('keyword', None) + + if package_kw and package_kw.strip() != '': + asset_queryset = asset_queryset.filter(project_name__icontains=package_kw) + + # todo 是否限制只展示4层以内的项目 + asset_queryset = asset_queryset.filter( + iastvulassetrelation__asset_vul_id=vul_id, + project_id__gt=0).values( + 'project_id', + 'project_name', + 'project_version_id', + ).distinct().all() + + data = [] + asset_queryset = asset_queryset.order_by('project_id', 'project_version_id') + page = request.query_params.get('page', 1) + page_size = request.query_params.get('pageSize', 10) + page_summary, page_data = self.get_paginator(asset_queryset, page, page_size) + + if page_data: + for _data in page_data: + project_version_query = IastProjectVersion.objects.filter(project_id=_data['project_id'], + id=_data['project_version_id']).first() + if project_version_query: + project_version = project_version_query.version_name + else: + project_version = '' + asset_info = asset_queryset.filter( + project_id=_data['project_id']).order_by( + 'level_id').values('level_id').first() + level = IastVulLevel.objects.filter( + id=asset_info['level_id']).first() + #level = IastVulLevel.objects.filter(id=_data['level_id']).first() + level_name = level.name_value if level else "" + data.append( + {'project_id': _data['project_id'], 'project_name': _data['project_name'], 'level': level_name, + 'project_version': project_version, 'dependency_level': 0, + 'project_version_id': _data['project_version_id']}) + + return R.success(data=data, page=page_summary) + except Exception as e: + logger.error(e) + return R.failure(msg=_('Component vul projects query failed')) + + +from typing import List, Optional,Dict + + +def get_tree(dep_list: List[str]): + a = {} + len_of_list = len(dep_list) + for ind, i in enumerate(dep_list): + if a: + a = {"package_name": i, "dependency_asset": [a], "dependency_level": len_of_list - ind} + else: + a = { + "package_name": i, + "dependency_level": len_of_list - ind, + } + return a + +class ProjectsAssets(UserEndPoint): + name = "api-v1-sca-vul-project-assets" + description = "" + + def get(self, request): + + try: + auth_users = self.get_auth_users(request.user) + asset_queryset = self.get_auth_assets(auth_users) + + vul_id = request.query_params.get('vul_id', 0) + project_id = request.query_params.get('project_id', 0) + project_version_id = request.query_params.get('project_version_id', 0) + + if not project_id: + return R.failure(msg=_('Param error')) + + asset_vul = IastAssetVul.objects.filter(id=vul_id).first() + if not asset_vul: + return R.failure(msg=_('Vul not exist')) + + # 当前漏洞所在组件 + asset_queryset = asset_queryset.filter( + iastvulassetrelation__asset_vul_id=vul_id, + project_id=project_id, + project_version_id=project_version_id).values( + 'id', 'dependency_level', 'package_name', 'version', + 'level', 'parent_dependency_id', + 'iastvulassetrelation__vul_dependency_path').order_by( + 'level_id').first() + if asset_queryset['iastvulassetrelation__vul_dependency_path'] is not None: + return R.success(data=[get_tree(asset_queryset['iastvulassetrelation__vul_dependency_path'])]) + dependency_level = asset_queryset['dependency_level'] + parent_dependency_id = asset_queryset['parent_dependency_id'] + LEVEL_MAPS = dict() + if asset_queryset['level'] not in LEVEL_MAPS: + asset_level = IastVulLevel.objects.filter(id=asset_queryset['level']).values('id', + 'name_value').first() + else: + asset_level = LEVEL_MAPS[asset_queryset['level']] + asset_queryset['level_id'] = asset_level['id'] + asset_queryset['level'] = asset_level['name_value'] + asset_queryset['dependency_asset'] = [] + + resp_data = asset_queryset + asset_queryset_dependency = dict() + if dependency_level > 1 and parent_dependency_id > 0: + dependency_asset = _get_parent_dependency(asset_queryset_dependency, parent_dependency_id) + if dependency_asset: + for dependency_key in dependency_asset: + if dependency_asset[dependency_key]['level'] not in LEVEL_MAPS: + asset_level = IastVulLevel.objects.filter( + id=dependency_asset[dependency_key]['level']).values('id', 'name_value').first() + else: + asset_level = LEVEL_MAPS[asset_queryset['level']] + + dependency_asset[dependency_key]['level_id'] = asset_level['id'] + dependency_asset[dependency_key]['level'] = asset_level['name_value'] + + if dependency_key + 1 not in dependency_asset: + dependency_asset[dependency_key]['dependency_asset'] = [asset_queryset] + else: + dependency_key_n = dependency_key + 1 + dependency_asset[dependency_key]['dependency_asset'] = [dependency_asset[dependency_key_n]] + resp_data = dependency_asset[1] + + return R.success(data=[resp_data]) + except Exception as e: + logger.error(e,exc_info=True) + return R.failure(msg=_('Component vul projects query failed')) + + +def _get_parent_dependency(asset_queryset_dependency, parent_dependency_id): + parent_asset = Asset.objects.filter(id=parent_dependency_id).values('id', 'dependency_level', 'package_name', + 'version', 'level', + 'parent_dependency_id').first() + if parent_asset: + dependency_level = parent_asset['dependency_level'] + parent_dependency_id = parent_asset['parent_dependency_id'] + asset_queryset_dependency[dependency_level] = parent_asset + if dependency_level > 1 and parent_dependency_id > 0: + _get_parent_dependency(asset_queryset_dependency, parent_dependency_id) + + return asset_queryset_dependency diff --git a/dongtai_web/dongtai_sca/views/package.py b/dongtai_web/dongtai_sca/views/package.py new file mode 100644 index 000000000..fc7c10fe6 --- /dev/null +++ b/dongtai_web/dongtai_sca/views/package.py @@ -0,0 +1,69 @@ +import logging + +from dongtai_common.models import User +from dongtai_web.dongtai_sca.models import Package +from django.http import JsonResponse +from rest_framework import views +from django.core.paginator import Paginator +from django.forms.models import model_to_dict +from dongtai_common.endpoint import R, AnonymousAndUserEndPoint, UserEndPoint +from django.utils.translation import gettext_lazy as _ + +from dongtai_web.dongtai_sca.utils import get_asset_id_by_aggr_id + +logger = logging.getLogger(__name__) + + +class PackageList(AnonymousAndUserEndPoint): + + def get(self, request): + filter_fields = ['hash', 'aql', 'ecosystem', 'name', 'version'] + _filter = Package.objects.filter().order_by("-updated_at") + kwargs = {} + for filter_field in filter_fields: + _val = request.GET.get(filter_field, "") + if _val != "": + kwargs[filter_field] = request.GET.get(filter_field, "") + _filter = _filter.filter(**kwargs) + + page = int(request.GET.get("page", 1)) + page_size = int(request.GET.get("page_size", 5)) + + pageinfo = Paginator(_filter, per_page=page_size) + result = { + 'data': [], + 'msg': 'success', + 'page': { + 'alltotal': pageinfo.count, + 'num_pages': pageinfo.num_pages, + 'page_size': pageinfo.per_page, + }, + 'status': 201 + } + if page == 0 or page <= pageinfo.num_pages: + rows = pageinfo.page(page).object_list + + for row in rows: + result['data'].append(model_to_dict(row)) + + return JsonResponse(result) + + +class AssetAggrDetailAssetIds(UserEndPoint): + name = "api-v1-sca-aggr-assets" + description = "" + + def get(self, request, aggr_id): + try: + auth_users = self.get_auth_users(request.user) + asset_queryset = self.get_auth_assets(auth_users) + + asset_ids = [] + for asset in asset_queryset: + asset_ids.append(asset.id) + asset_ids = get_asset_id_by_aggr_id(aggr_id, asset_ids) + + return R.success(data=asset_ids) + except Exception as e: + logger.error(e) + return R.failure(msg=_('Component asset id query failed')) diff --git a/dongtai_web/dongtai_sca/views/package_vul.py b/dongtai_web/dongtai_sca/views/package_vul.py new file mode 100644 index 000000000..7599c4e12 --- /dev/null +++ b/dongtai_web/dongtai_sca/views/package_vul.py @@ -0,0 +1,189 @@ +from dongtai_common.models import User +from dongtai_common.models.asset import Asset +from dongtai_common.models.asset_aggr import AssetAggr +from dongtai_common.models.asset_vul import IastAssetVul, IastAssetVulTypeRelation, IastVulAssetRelation +from dongtai_web.dongtai_sca.models import Package, VulPackageVersion, VulPackage, VulPackageRange, Vul, VulCveRelation, \ + PackageLicenseInfo, \ + PackageLicenseLevel +from django.http import JsonResponse +from rest_framework import views +from django.forms.models import model_to_dict +from dongtai_common.endpoint import R, AnonymousAndUserEndPoint, UserEndPoint +from django.utils.translation import gettext_lazy as _ +from dongtai_web.dongtai_sca.common.sca_vul import GetScaVulData +from dongtai_common.models.asset_vul import IastVulAssetRelation +from dongtai_common.models.asset import Asset + +LEVEL_MAP = {'critical': '严重', 'high': '高危', 'medium': '中危', 'low': '低危'} + + +class OnePackageVulList(AnonymousAndUserEndPoint): + # 查找单个漏洞下,所有的修复的高版本 + def find_fixed_versions(self, vul_package_id, ecosystem, name, version): + vul_package_ranges = VulPackageRange.objects.filter( + vul_package_id=vul_package_id, + ecosystem=ecosystem, name=name, + type__in=['ECOSYSTEM', 'SEMVER'], + # introduced__lte=version, + fixed_vcode__gte=version + ).all() + fixed_versions = [] + for vul_package_range in vul_package_ranges: + fixed_versions.append(vul_package_range.fixed) + return fixed_versions + + def get(self, request): + filter_fields = ['hash', 'aql', 'ecosystem', 'name', 'version'] + _filter = Package.objects.filter() + kwargs = {} + for filter_field in filter_fields: + _val = request.GET.get(filter_field, "") + if _val != "": + kwargs[filter_field] = request.GET.get(filter_field, "") + + ecosystem = request.GET.get('ecosystem', '') + name = request.GET.get('name', '') + version = request.GET.get('version', '') + + package = _filter.filter(**kwargs).first() + print(package) + if package is not None: + ecosystem = package.ecosystem + name = package.name + version = package.version + + version_code = "" + version_list = version.split('.')[0:4] + while len(version_list) != 5: + version_list.append("0") + for _version in version_list: + version_code += _version.zfill(5) + + vul_list = [] + vul_package_ids = [] + vul_package_ranges = VulPackageRange.objects.filter( + ecosystem=ecosystem, name=name, + introduced_vcode__lte=version_code, fixed_vcode__gt=version_code + ).all()[0:1000] + + for vul_package_range in vul_package_ranges: + vul_package_ids.append(vul_package_range.vul_package_id) + + vul_package_versions = VulPackageVersion.objects.filter( + ecosystem=ecosystem, name=name, version=version + ).all()[0:1000] + for vul_package_version in vul_package_versions: + vul_package_ids.append(vul_package_version.vul_package_id) + + for vul_package_id in vul_package_ids: + vul_package = VulPackage.objects.get(pk=vul_package_id) + vul = Vul.objects.get(pk=vul_package.vul_id) + vul_list.append( + { + "vul": model_to_dict(vul), + "vul_package": model_to_dict(vul_package), + "fixed_versions": self.find_fixed_versions( + vul_package_id, + ecosystem, + name, + version_code + ) + } + ) + if package is not None: + package = model_to_dict(package) + + result = { + 'data': { + "vul_list": vul_list, + "package": package, + }, + 'msg': 'success', + 'status': 201 + } + return JsonResponse(result) + + +class AssetPackageVulList(UserEndPoint): + name = "api-v1-sca-package-vuls" + description = "" + + def get(self, request, aggr_id): + auth_users = self.get_auth_users(request.user) + #asset_queryset = self.get_auth_assets(auth_users) + asset = Asset.objects.filter(pk=aggr_id, + user__in=auth_users).first() + if not asset: + return R.failure(msg=_('Components do not exist or no permission to access')) + #asset_aggr = AssetAggr.objects.filter( + # signature_value=asset.signature_value).first() + #if not asset_aggr: + # return R.failure(msg=_('Components do not exist or no permission to access')) + + #asset_queryset_exist = asset_queryset.filter(signature_value=asset.signature_value, + # version=asset.version, dependency_level__gt=0).exists() + #if not asset_queryset_exist: + # return R.failure(msg=_('Components do not exist or no permission to access')) + + vul_list = [] + #auth_asset_vuls = self.get_auth_asset_vuls(asset_queryset) + #asset_vuls = IastAssetVul.objects.filter(aql=asset_aggr.package_name, + # package_hash=asset_aggr.signature_value, + # package_version=asset_aggr.version).all() + auth_asset_vuls = IastAssetVul.objects.filter( + iastvulassetrelation__asset_id=aggr_id).select_related( + 'level').prefetch_related( + 'iastassetvultyperelation_set__asset_vul_type').all() + for a_vul in auth_asset_vuls: + #vul_type_relation = IastAssetVulTypeRelation.objects.filter( + # asset_vul_id=a_vul.id) + vul_type_relation = a_vul.iastassetvultyperelation_set.all() + vul_type_str = "" + if vul_type_relation: + vul_types = [ + _i.asset_vul_type.name for _i in vul_type_relation + ] + vul_type_str = ','.join(vul_types) + try: + cve_code = a_vul.vul_cve_nums.get('cve') + except Exception as e: + logger.debug(e) + cve_code = "" + vul_list.append({ + "asset_vul_id": a_vul.id, + "vul_title": a_vul.vul_name, + "cve_id": cve_code, + "sid": a_vul.sid, + "cve_nums": a_vul.vul_cve_nums, + "vul_type": vul_type_str, + "level_id": a_vul.level.id, + "level": a_vul.level.name_value, + }) + + return R.success(data=vul_list) + + +class AssetPackageVulDetail(UserEndPoint): + name = "api-v1-sca-package-vul-detail" + description = "" + + def get(self, request, vul_id): + # 组件漏洞基础 数据读取 + asset_vul = IastAssetVul.objects.filter(id=vul_id).first() + # 用户鉴权 + auth_users = self.get_auth_users(request.user) + asset_queryset = self.get_auth_assets(auth_users) + + # 判断是否有权限 + if not asset_vul or not permission_to_read_asset_vul(auth_users, vul_id): + return R.failure( + msg=_('Vul do not exist or no permission to access')) + + data = GetScaVulData(asset_vul, asset_queryset) + + return R.success(data=data) + + +def permission_to_read_asset_vul(users, asset_vul_id: int): + return IastVulAssetRelation.objects.filter( + asset__user__in=users, asset_vul_id=asset_vul_id).exists() diff --git a/dongtai_web/github_contributors.py b/dongtai_web/github_contributors.py new file mode 100644 index 000000000..92e125171 --- /dev/null +++ b/dongtai_web/github_contributors.py @@ -0,0 +1,97 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : github_contributors +# @created : Thursday Sep 16, 2021 10:52:20 CST +# +# @description : +###################################################################### + +import requests +from urllib.parse import urljoin +import json +from django.db import models +from rest_framework import serializers +import logging +from functools import partial +import time +from django.core.cache import cache + + +URL_LIST = [ + 'https://api.github.com/repos/HXSecurity/DongTai-Doc-en/', + 'https://api.github.com/repos/HXSecurity/DongTai-Doc/', + 'https://api.github.com/repos/HXSecurity/DongTai-agent-java/', + 'https://api.github.com/repos/HXSecurity/DongTai/', + 'https://api.github.com/repos/HXSecurity/DongTai-Plugin-IDEA/', + 'https://api.github.com/repos/HXSecurity/vulhub-compose/', + 'https://api.github.com/repos/HXSecurity/DongTai-web/', + 'https://api.github.com/repos/HXSecurity/DongTai-webapi/', + 'https://api.github.com/repos/HXSecurity/DongTai-openapi/', + 'https://api.github.com/repos/HXSecurity/DongTai-engine/', + 'https://api.github.com/repos/HXSecurity/dongtai-core/', + 'https://api.github.com/repos/HXSecurity/Dongtai-Base-Image/', +] + +logger = logging.getLogger('dongtai-dongtai_conf') + + +def _signed_state(dic: dict, state: int): + dic['state'] = state + return dic + + +def _change_dict_key(dic: dict, from_field: str, to_field: str): + dic[to_field] = dic[from_field] + del dic[from_field] + return dic + + +def key_filiter(dic, keylist): + new_dic = {} + for key in keylist: + new_dic[key] = dic[key] + return new_dic + + +def _get_github_user(url_list=URL_LIST, suffix='pulls?state=all'): + + total_users = {} + user_count = {} + is_over_limit = False + for url in url_list: + resp = requests.get(urljoin(url, suffix)) + if resp.status_code == 403: + is_over_limit = True + break + res = json.loads(resp.content) + repo_users = list(map(lambda x: x['user'], res)) + repo_users_dic = {_['id']: _ for _ in repo_users} + for user in repo_users: + if user_count.get(user['id'], None): + user_count[user['id']] += 1 + else: + user_count[user['id']] = 1 + total_users.update(repo_users_dic) + sorted_user_list = sorted(user_count.items(), + key=lambda x: x[1], + reverse=True) + user_list = [] + for user in sorted_user_list: + user_list.append(total_users[user[0]]) + return user_list, is_over_limit + + +_get_github_issues = partial(_get_github_user, suffix='issues?state=all') +_get_github_prs = partial(_get_github_user, suffix='pulls?state=all') + + +def get_github_contributors(dic={}, update=False): + if update: + dic1 = {} + dic1['issues'], is_over_limit_pr = _get_github_issues() + dic1['prs'], is_over_limit_issue = _get_github_prs() + dic1['time'] = int(time.time()) + if cache.get('github_contributors') is None or not any( + [is_over_limit_pr, is_over_limit_issue]): + cache.set('github_contributors', dic1, 60 * 180) + return cache.get('github_contributors', default={}) diff --git a/dongtai_web/notify/__init__.py b/dongtai_web/notify/__init__.py new file mode 100644 index 000000000..45249e423 --- /dev/null +++ b/dongtai_web/notify/__init__.py @@ -0,0 +1,4 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# project: dongtai-webapi diff --git a/dongtai_web/notify/email.py b/dongtai_web/notify/email.py new file mode 100644 index 000000000..6a00270ae --- /dev/null +++ b/dongtai_web/notify/email.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# project: dongtai-webapi + + +from email.mime.text import MIMEText +from email.utils import formatdate, make_msgid +from smtplib import SMTP_SSL, SMTP + + +class Email: + + def __init__(self): + pass + + def login_server(self, server, port, username, pwd, ssl=True): + if port: + self.smtp = SMTP_SSL(server, port=port) if ssl else SMTP(server, + port=port) + else: + self.smtp = SMTP_SSL(server) if ssl else SMTP(server) + self.smtp.login(username, pwd) + + def logout_server(self): + self.smtp.quit() + + def __send_mail(self, from_addr, to_addrs, _subject, _content, _type=None): + msg = None + if _type: + msg = MIMEText(_text=_content, _subtype=_type, _charset='utf-8') + else: + msg = MIMEText(_content) + msg["From"] = from_addr + msg["Subject"] = _subject + msg["Date"] = formatdate(localtime=True) + msg["Message-ID"] = make_msgid() + self.smtp.sendmail(msg['From'], to_addrs, msg.as_string()) + + def sendmail(self, from_addr, to_addrs, _subject, _content, content_type=None): + self.__send_mail(from_addr, to_addrs, _subject, _content, content_type) + + def sendmail_batch(self, server, username, pwd, from_addr, to_addrs, _subject, _content, content_type=None): + self.login_server(server, username, pwd) + self.__send_mail(from_addr, to_addrs, _subject, _content, content_type) + self.logout_server() + + +''' +Example 1: mail_content = "This is a auto-send mail." + mail = Email("smtp.qq.com", "execute@qq.com", password, "send_user@qq.com", [ + "recv1@qq.com", "recv2@qq.com"], "Test Moudle", mail_content, "html") + mail.sendmail() + Example 2: mail_content = ' + Hi, + This is a auto - send mail.' + mail = Email("smtp.qq.com", "execute@qq.com", password, "send_user@qq.com", ["recv1@qq.com", "recv2@qq.com"], + "Test Moudle", mail_content) + mail.sendmail() +''' diff --git a/dongtai_web/notify/feishu.py b/dongtai_web/notify/feishu.py new file mode 100644 index 000000000..1dd34b811 --- /dev/null +++ b/dongtai_web/notify/feishu.py @@ -0,0 +1,13 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# project: dongtai-webapi +import requests +from django.utils.translation import gettext_lazy as _ + + +def notify(msg): + requests.post( + url='https://open.feishu.cn/open-apis/bot/hook/6b4275518b8b457784682a507bb86304', + json={"title": _("Maven official crawler"), "text": msg} + ) diff --git a/apiserver/permissions.py b/dongtai_web/permissions.py similarity index 94% rename from apiserver/permissions.py rename to dongtai_web/permissions.py index 3c755b659..8ebfbcee8 100644 --- a/apiserver/permissions.py +++ b/dongtai_web/permissions.py @@ -1,9 +1,8 @@ #!/usr/bin/env python # -*- coding:utf-8 -*- # author:owefsad -# datetime:2021/1/12 下午7:44 # software: PyCharm -# project: lingzhi-agent-server +# project: lingzhi-webapi from rest_framework import permissions diff --git a/dongtai_web/scaupload/__init__.py b/dongtai_web/scaupload/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/dongtai_web/scaupload/admin.py b/dongtai_web/scaupload/admin.py new file mode 100644 index 000000000..8c38f3f3d --- /dev/null +++ b/dongtai_web/scaupload/admin.py @@ -0,0 +1,3 @@ +from django.contrib import admin + +# Register your models here. diff --git a/dongtai_web/scaupload/apps.py b/dongtai_web/scaupload/apps.py new file mode 100644 index 000000000..a30c4df4e --- /dev/null +++ b/dongtai_web/scaupload/apps.py @@ -0,0 +1,5 @@ +from django.apps import AppConfig + + +class ScauploadConfig(AppConfig): + name = 'scaupload' diff --git a/dongtai_web/scaupload/models.py b/dongtai_web/scaupload/models.py new file mode 100644 index 000000000..71a836239 --- /dev/null +++ b/dongtai_web/scaupload/models.py @@ -0,0 +1,3 @@ +from django.db import models + +# Create your models here. diff --git a/dongtai_web/scaupload/tests.py b/dongtai_web/scaupload/tests.py new file mode 100644 index 000000000..7ce503c2d --- /dev/null +++ b/dongtai_web/scaupload/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase + +# Create your tests here. diff --git a/dongtai_web/scaupload/urls.py b/dongtai_web/scaupload/urls.py new file mode 100644 index 000000000..e9addad55 --- /dev/null +++ b/dongtai_web/scaupload/urls.py @@ -0,0 +1,47 @@ +"""webapi URL Configuration + +The `urlpatterns` list routes URLs to views. For more information please see: + https://docs.djangoproject.com/en/3.0/topics/http/urls/ +Examples: +Function views + 1. Add an import: from my_app import views + 2. Add a URL to urlpatterns: path('', views.home, name='home') +Class-based views + 1. Add an import: from other_app.views import Home + 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') +Including another URLconf + 1. Import the include() function: from django.urls import include, path + 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) +""" +from django.conf.urls.static import static +from django.urls import include, path +from dongtai_web.scaupload.views import ( + SCADBMavenViewSet, + SCADBMavenBulkViewSet, + SCALicenseViewSet, + SCADBMavenBulkDeleteView, + SCATemplateViewSet +) +import os + +urlpatterns = [ + path('maven/bulk', + SCADBMavenBulkViewSet.as_view({ + 'get': 'list', + 'post': 'create', + })), + path('maven/bulk/delete', SCADBMavenBulkDeleteView.as_view()), + path( + 'maven/', + SCADBMavenViewSet.as_view({ + 'get': 'retrieve', + 'put': 'update', + 'delete': 'destory' + })), + path('maven', SCADBMavenViewSet.as_view({'post': 'create'})), + path('license_list', SCALicenseViewSet.as_view()), + path('maven/template/maven_sca', SCATemplateViewSet.as_view()), +] + + +urlpatterns = [path('api/v1/scadb/', include(urlpatterns))] diff --git a/dongtai_web/scaupload/utils.py b/dongtai_web/scaupload/utils.py new file mode 100644 index 000000000..2a847815d --- /dev/null +++ b/dongtai_web/scaupload/utils.py @@ -0,0 +1,36 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : utils +# @created : 星期六 1月 15, 2022 03:04:03 CST +# +# @description : +###################################################################### + + + +class ScaLibError(Exception): + pass + + + +def get_packge_from_sca_lib(**kwargs): + return get_from_sca_lib('/package/', **kwargs) + + +def get_packge_vul_from_sca_lib(**kwargs): + return get_from_sca_lib('/package_vul/', **kwargs) + + +def get_from_sca_lib(url, **kwargs): + from urllib.parse import urljoin + from dongtai_conf.settings import SCA_BASE_URL + import json + import requests + finalurl = urljoin(SCA_BASE_URL, url) + try: + resp = requests.get(url=finalurl, params=kwargs) + if resp.status_code == 200: + json = json.loads(resp.content.decode()) + except Exception as e: + raise ScaLibError('read from sca lib failure') + return json diff --git a/dongtai_web/scaupload/views.py b/dongtai_web/scaupload/views.py new file mode 100644 index 000000000..dc816dc31 --- /dev/null +++ b/dongtai_web/scaupload/views.py @@ -0,0 +1,239 @@ +from django.shortcuts import render +from dongtai_common.endpoint import UserEndPoint +from django.db.models import Q +from dongtai_common.models.sca_maven_db import ( + ScaMavenDb, + ImportFrom, +) +from rest_framework import serializers +from rest_framework import generics +from rest_framework.serializers import ValidationError +from rest_framework import viewsets +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from django.utils.translation import gettext_lazy as _ +from dongtai_common.permissions import TalentAdminPermission +from dongtai_common.endpoint import R +import csv +from django.http import FileResponse +from dongtai_conf.settings import BASE_DIR +import os +from dongtai_web.scaupload.utils import ( + get_packge_from_sca_lib, + ScaLibError, +) +from django.db.utils import IntegrityError + + +# Create your views here. + + +class ScaDBSerializer(serializers.Serializer): + name = serializers.CharField(required=False) + page_size = serializers.IntegerField(default=20, + help_text=_('Number per page')) + page = serializers.IntegerField(default=1, help_text=_('Page index')) + + +class ScaMavenDbSerializer(serializers.ModelSerializer): + class Meta: + model = ScaMavenDb + fields = '__all__' + + +class ScaMavenDbUploadSerializer(serializers.Serializer): + group_id = serializers.CharField() + atrifact_id = serializers.CharField() + version = serializers.CharField() + sha_1 = serializers.CharField() + package_name = serializers.CharField() + license = serializers.CharField(default='', required=False) + + +class ScaDeleteSerializer(serializers.Serializer): + ids = serializers.ListField(child=serializers.IntegerField()) + + +class SCADBMavenBulkViewSet(UserEndPoint, viewsets.ViewSet): + permission_classes_by_action = { + 'POST': (TalentAdminPermission,), + 'DELETE': (TalentAdminPermission,), + 'PUT': (TalentAdminPermission,), + } + + def get_permissions(self): + try: + return [permission() for permission in self.permission_classes_by_action[self.request.method]] + except KeyError: + return [permission() for permission in self.permission_classes] + + @extend_schema_with_envcheck([ScaDBSerializer], + summary=_('Get sca db bulk'), + description=_("Get sca list"), + tags=[_('SCA DB')]) + def list(self, request): + ser = ScaDBSerializer(data=request.GET) + try: + if ser.is_valid(True): + pass + except ValidationError as e: + return R.failure(data=e.detail) + q = Q(import_from=ImportFrom.USER) + if ser.validated_data.get('name'): + q = Q(package_name__icontains=ser.validated_data['name']) + queryset = ScaMavenDb.objects.filter(q) + page_summary, page_data = self.get_paginator( + queryset, ser.validated_data['page'], + ser.validated_data['page_size']) + return R.success(data=ScaMavenDbSerializer(page_data, many=True).data, + page=page_summary) + + @extend_schema_with_envcheck(request=ScaMavenDbUploadSerializer, + summary=_('Get sca db bulk'), + description=_("Get sca list"), + tags=[_('SCA DB')]) + def create(self, request): + if not request.FILES.get('file'): + return R.failure(msg='file required') + stream = request.FILES['file'].read().replace(b'\xEF\xBB\xBF', b'') + decoded_files = stream.decode('utf-8').splitlines() + reader = csv.DictReader(decoded_files) + datas = [dict(row) for row in reader] + ser = ScaMavenDbUploadSerializer(data=datas, many=True) + try: + if ser.is_valid(True): + pass + except ValidationError as e: + return R.failure(data=e.detail) + objs = [ScaMavenDb(**i) for i in ser.validated_data] + ScaMavenDb.objects.bulk_create(objs, ignore_conflicts=True) + return R.success() + + +class SCADBMavenBulkDeleteView(UserEndPoint): + permission_classes_by_action = { + 'POST': (TalentAdminPermission,), + 'DELETE': (TalentAdminPermission,), + 'PUT': (TalentAdminPermission,), + } + + def get_permissions(self): + try: + return [permission() for permission in self.permission_classes_by_action[self.request.method]] + except KeyError: + return [permission() for permission in self.permission_classes] + + @extend_schema_with_envcheck(request=ScaDeleteSerializer, + summary=_('Get sca db bulk'), + description=_("Get sca list"), + tags=[_('SCA DB')]) + def post(self, request): + ids = request.data.get('ids') + ScaMavenDb.objects.filter(pk__in=ids).delete() + return R.success() + + +class SCADBMavenViewSet(UserEndPoint, viewsets.ViewSet): + permission_classes_by_action = { + 'POST': (TalentAdminPermission,), + 'DELETE': (TalentAdminPermission,), + 'PUT': (TalentAdminPermission,), + } + + def get_permissions(self): + try: + return [permission() for permission in self.permission_classes_by_action[self.request.method]] + except KeyError: + return [permission() for permission in self.permission_classes] + + @extend_schema_with_envcheck(summary=_('Get sca db'), + description=_("Get sca list"), + tags=[_('SCA DB')]) + def retrieve(self, request, pk): + q = Q(pk=pk) + data = ScaMavenDb.objects.filter(q).first() + return R.success(data=ScaDBSerializer(data).data) + + @extend_schema_with_envcheck(request=ScaMavenDbUploadSerializer, + summary=_('Get sca db'), + description=_("Get sca list"), + tags=[_('SCA DB')]) + def create(self, request): + ser = ScaMavenDbUploadSerializer(data=request.data) + try: + if ser.is_valid(True): + pass + except ValidationError as e: + return R.failure(data=e.detail) + try: + ScaMavenDb.objects.create(**ser.data) + except IntegrityError as e: + return R.failure(msg='same sha_1 component exists') + return R.success() + + @extend_schema_with_envcheck(summary=_('Get sca db'), + description=_("Get sca list"), + tags=[_('SCA DB')]) + def destory(self, request, pk): + q = Q(pk=pk) + data = ScaMavenDb.objects.filter(q).delete() + return R.success() + + @extend_schema_with_envcheck(request=ScaMavenDbUploadSerializer, + summary=_('Get sca db'), + description=_("Get sca list"), + tags=[_('SCA DB')]) + def update(self, request, pk): + ser = ScaMavenDbUploadSerializer(data=request.data) + try: + if ser.is_valid(True): + pass + except ValidationError as e: + return R.failure(data=e.detail) + q = Q(pk=pk) + ScaMavenDb.objects.filter(q).update(**ser.data) + return R.success() + + +LICENSE_LIST = [ + 'Apache-1.0', 'Apache-1.1', 'Apache-2.0', '0BSD', 'BSD-1-Clause', + 'BSD-2-Clause-FreeBSD', 'BSD-2-Clause-NetBSD', 'BSD-2-Clause-Patent', + 'BSD-2-Clause-Views', 'BSD-2-Clause', 'BSD-3-Clause-Attribution', + 'BSD-3-Clause-Clear', 'BSD-3-Clause-LBNL', 'BSD-3-Clause-Modification', + 'BSD-3-Clause-No-Military-License', 'BSD-3-Clause-No-Nuclear-License-2014', + 'BSD-3-Clause-No-Nuclear-License', 'BSD-3-Clause-No-Nuclear-Warranty', + 'BSD-3-Clause-Open-MPI', 'BSD-3-Clause', 'BSD-4-Clause-Shortened', + 'BSD-4-Clause-UC', 'BSD-4-Clause', 'BSD-Protection', 'BSD-Source-Code', + 'AGPL-1.0-only', 'AGPL-1.0-or-later', 'AGPL-1.0', 'AGPL-3.0-only', + 'AGPL-3.0-or-later', 'AGPL-3.0', 'GPL-1.0+', 'GPL-1.0-only', + 'GPL-1.0-or-later', 'GPL-1.0', 'GPL-2.0+', 'GPL-2.0-only', + 'GPL-2.0-or-later', 'GPL-2.0-with-autoconf-exception', + 'GPL-2.0-with-bison-exception', 'GPL-2.0-with-classpath-exception', + 'GPL-2.0-with-font-exception', 'GPL-2.0-with-GCC-exception', 'GPL-2.0', + 'GPL-3.0+', 'GPL-3.0-only', 'GPL-3.0-or-later', + 'GPL-3.0-with-autoconf-exception', 'GPL-3.0-with-GCC-exception', 'GPL-3.0', + 'LGPL-2.0+', 'LGPL-2.0-only', 'LGPL-2.0-or-later', 'LGPL-2.0', 'LGPL-2.1+', + 'LGPL-2.1-only', 'LGPL-2.1-or-later', 'LGPL-2.1', 'LGPL-3.0+', + 'LGPL-3.0-only', 'LGPL-3.0-or-later', 'LGPL-3.0', 'LGPLLR', 'MIT-0', + 'MIT-advertising', 'MIT-CMU', 'MIT-enna', 'MIT-feh', 'MIT-Modern-Variant', + 'MIT-open-group', 'MIT', 'MITNFA', 'MPL-1.0', 'MPL-1.1', + 'MPL-2.0-no-copyleft-exception', 'MPL-2.0' +] + + +class SCALicenseViewSet(UserEndPoint): + @extend_schema_with_envcheck(summary=_('Get sca license list'), + description=_("Get sca list"), + tags=[_('SCA DB')]) + def get(self, request): + return R.success(data=LICENSE_LIST) + + +class SCATemplateViewSet(UserEndPoint): + @extend_schema_with_envcheck(summary=_('Get sca license list'), + description=_("Get sca list"), + tags=[_('SCA DB')]) + def get(self, request): + return FileResponse(open( + os.path.join(BASE_DIR, 'static/assets/template/maven_sca.csv'), + 'rb'), + filename='maven_sca.csv') diff --git a/dongtai_web/serializers/__init__.py b/dongtai_web/serializers/__init__.py new file mode 100644 index 000000000..a059c796f --- /dev/null +++ b/dongtai_web/serializers/__init__.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/5/22 18:28 +# software: PyCharm +# project: webapi diff --git a/dongtai_web/serializers/agent.py b/dongtai_web/serializers/agent.py new file mode 100644 index 000000000..56ddab62e --- /dev/null +++ b/dongtai_web/serializers/agent.py @@ -0,0 +1,140 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +import time +from dongtai_common.models.heartbeat import IastHeartbeat + +from rest_framework import serializers + +from dongtai_common.models.agent import IastAgent +from django.utils.translation import gettext_lazy as _ +from dongtai_common.models.agent_method_pool import MethodPool +from collections import defaultdict + +class AgentSerializer(serializers.ModelSerializer): + USER_MAP = dict() + SERVER_MAP = dict() + system_load = serializers.SerializerMethodField() + running_status = serializers.SerializerMethodField() + server = serializers.SerializerMethodField() + owner = serializers.SerializerMethodField() + flow = serializers.SerializerMethodField() + report_queue = serializers.SerializerMethodField() + method_queue = serializers.SerializerMethodField() + replay_queue = serializers.SerializerMethodField() + alias = serializers.SerializerMethodField() + register_time = serializers.SerializerMethodField() + latest_time = serializers.SerializerMethodField() + + class Meta: + model = IastAgent + fields = [ + 'id', 'token', 'server', 'running_status', 'system_load', 'owner', + 'latest_time', 'project_name', 'is_core_running', 'language', + 'flow', 'is_control', 'report_queue', 'method_queue', + 'replay_queue', 'alias', 'register_time', 'startup_time', 'is_audit' + ] + + def get_latest_heartbeat(self, obj): + try: + latest_heartbeat = getattr(obj, 'latest_heartbeat') + except Exception as heartbeat_not_found: + latest_heartbeat = obj.heartbeats.values('dt', 'cpu').order_by('-dt').first() + setattr(obj, 'latest_heartbeat', latest_heartbeat) + return latest_heartbeat + + def get_running_status(self, obj): + mapping = defaultdict(str) + mapping.update({1: _("Online"), 0: _("Offline")}) + return mapping[obj.online] + + def get_system_load(self, obj): + """ + :param obj: + :return: + """ + heartbeat = self.get_latest_heartbeat(obj) + if heartbeat: + return heartbeat['cpu'] + else: + return _("Load data is not uploaded") + + def get_server(self, obj): + def get_server_addr(): + if obj.server_id not in self.SERVER_MAP: + if obj.server.ip and obj.server.port and obj.server.port != 0: + self.SERVER_MAP[ + obj.server_id] = f'{obj.server.ip}:{obj.server.port}' + else: + return _('No flow is detected by the probe') + return self.SERVER_MAP[obj.server_id] + + if obj.server_id: + return get_server_addr() + return _('No flow is detected by the probe') + + def get_user(self, obj): + if obj.user_id not in self.USER_MAP: + self.USER_MAP[obj.user_id] = obj.user.get_username() + return self.USER_MAP[obj.user_id] + + def get_owner(self, obj): + return self.get_user(obj) + + def get_flow(self, obj): + heartbeat = IastHeartbeat.objects.values('req_count').filter( + agent=obj).first() + return heartbeat['req_count'] if heartbeat else 0 + + def get_method_queue(self, obj): + heartbeat = IastHeartbeat.objects.values('method_queue').filter( + agent_id=obj.id).order_by('-dt').first() + return heartbeat['method_queue'] if heartbeat is not None else 0 + + def get_report_queue(self, obj): + heartbeat = IastHeartbeat.objects.values('report_queue').filter( + agent_id=obj.id).order_by('-dt').first() + return heartbeat['report_queue'] if heartbeat is not None else 0 + + def get_replay_queue(self, obj): + heartbeat = IastHeartbeat.objects.values('replay_queue').filter( + agent_id=obj.id).order_by('-dt').first() + return heartbeat['replay_queue'] if heartbeat is not None else 0 + + def get_register_time(self, obj): + if obj.register_time == 0: + return obj.latest_time + return obj.register_time + + def get_alias(self, obj): + if obj.alias == '': + return obj.token + return obj.alias + + def get_latest_time(self, obj): + latest_heartbeat = obj.heartbeats.values_list( + 'dt', flat=True).order_by('-dt').first() + if latest_heartbeat: + return latest_heartbeat + return obj.latest_time + + +class ProjectEngineSerializer(serializers.ModelSerializer): + class Meta: + model = IastAgent + fields = ['id', 'token', 'is_core_running'] + + + +class AgentToggleArgsSerializer(serializers.Serializer): + id = serializers.IntegerField(help_text=_( + 'The id corresponding to the agent.')) + ids = serializers.CharField(help_text=_( + 'The id corresponding to the agent, use"," for segmentation.')) + + +class AgentInstallArgsSerializer(serializers.Serializer): + id = serializers.IntegerField(help_text=_( + 'The id corresponding to the agent.')) diff --git a/dongtai_web/serializers/agent_config.py b/dongtai_web/serializers/agent_config.py new file mode 100644 index 000000000..307660e31 --- /dev/null +++ b/dongtai_web/serializers/agent_config.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:sjh + +import time + +from rest_framework import serializers +from django.utils.translation import gettext_lazy as _ + + +class AgentConfigSettingSerializer(serializers.Serializer): + + details = serializers.JSONField(help_text=_('The details config to the agent.'), required=True) + hostname = serializers.CharField(help_text=_('The hostname of the agent.'), max_length=100, required=False, allow_blank=True) + ip = serializers.CharField(help_text=_('The ip of the agent.'), max_length=100, required=False, allow_blank=True) + port = serializers.IntegerField(help_text=_('The port of the agent.'), required=False, default=80) + id = serializers.IntegerField(help_text=_('The port of the agent.'), required=False, default=None) + cluster_name = serializers.CharField(help_text=_('The cluster_name of the agent.'), max_length=255,required=False, allow_blank=True) + cluster_version = serializers.CharField(help_text=_('The cluster_version of the agent.'), max_length=100,required=False, allow_blank=True) + priority = serializers.IntegerField(help_text=_('The priority of the agent.'), required=True) + + +class AgentWebHookSettingSerializer(serializers.Serializer): + + id = serializers.IntegerField(help_text=_('The id of the webHook.'), required=False) + type_id = serializers.IntegerField(help_text=_('The type of the webHook.'), required=True) + headers = serializers.JSONField(help_text=_('The details config to the agent.'), required=False) + url = serializers.CharField(help_text=_('The cluster_name of the agent.'), max_length=255, required=True) + + +class AgentWebHookDelSerializer(serializers.Serializer): + + id = serializers.IntegerField(help_text=_('The id of the webHook.'), required=True) diff --git a/dongtai_web/serializers/aggregation.py b/dongtai_web/serializers/aggregation.py new file mode 100644 index 000000000..18667b0cc --- /dev/null +++ b/dongtai_web/serializers/aggregation.py @@ -0,0 +1,94 @@ +from abc import ABC + +from rest_framework import serializers +from django.utils.translation import gettext_lazy as _ + +from rest_framework.serializers import ValidationError + + +def intable_str(value: str): + try: + list(map(int, value.split(','))) + except ValueError: + raise serializers.ValidationError('Not int able after str split') + +class AggregationArgsSerializer(serializers.Serializer): + page_size = serializers.IntegerField(default=20, + min_value=1, + help_text=_('Number per page')) + page = serializers.IntegerField(default=1, + min_value=1, + help_text=_('Page index')) + + order_type = serializers.IntegerField(default=0, help_text=_('Order by')) + order_type_desc = serializers.IntegerField(default=0, help_text=_('Order by desc')) + + bind_project_id = serializers.IntegerField(default=0, help_text=_('bind_project_id')) + project_version_id = serializers.IntegerField(default=0, help_text=_('project_version_id')) + + level_id_str = serializers.CharField( + required=False, + max_length=12, + error_messages={ + "level_str": _("Length limit exceeded") + }, + validators=[intable_str], + ) + project_id_str = serializers.CharField( + required=False, + max_length=255, + error_messages={ + "project_id_str": _("Length limit exceeded") + }, + validators=[intable_str], + ) + + keywords = serializers.CharField( + required=False, + max_length=100, + error_messages={ + "keywords": _("Length limit exceeded") + }, + help_text=_('Keywords select') + ) + + source_type_str = serializers.CharField( + required=False, + max_length=6, + error_messages={ + "source_type_str": _("Length limit exceeded") + }, + validators=[intable_str], + ) + availability_str = serializers.CharField( + required=False, + max_length=12, + error_messages={ + "availability_str": _("Length limit exceeded") + }, + validators=[intable_str], + ) + hook_type_id_str = serializers.CharField( + required=False, + max_length=100, + error_messages={ + "hook_type_str": _("Length limit exceeded") + }, + validators=[intable_str], + ) + language_str = serializers.CharField( + required=False, + max_length=12, + error_messages={ + "language_str": _("Length limit exceeded") + }, + validators=[intable_str], + ) + status_id_str = serializers.CharField( + required=False, + max_length=12, + error_messages={ + "status_id_str": _("Length limit exceeded") + }, + validators=[intable_str], + ) diff --git a/dongtai_web/serializers/asset.py b/dongtai_web/serializers/asset.py new file mode 100644 index 000000000..3cb012457 --- /dev/null +++ b/dongtai_web/serializers/asset.py @@ -0,0 +1,13 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-engine +from dongtai_common.models.asset import Asset +from rest_framework import serializers + + +class AssetSerializer(serializers.ModelSerializer): + class Meta: + model = Asset + fields = ['package_name', 'vul_count', 'version'] diff --git a/dongtai_web/serializers/department.py b/dongtai_web/serializers/department.py new file mode 100644 index 000000000..e622635dc --- /dev/null +++ b/dongtai_web/serializers/department.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +from rest_framework import serializers + +from dongtai_common.models import User +from dongtai_common.models.department import Department + + +class DepartmentSerializer(serializers.ModelSerializer): + user_count = serializers.SerializerMethodField() + created = serializers.SerializerMethodField() + + class Meta: + model = Department + fields = ('id', 'name', 'create_time', 'update_time', 'user_count', + 'created', 'principal_id') + + def get_user_count(self, obj): + return obj.users.count() + + def get_created(self, obj): + user = User.objects.filter(id=obj.created_by).first() + return user.get_username() diff --git a/dongtai_web/serializers/errorlog.py b/dongtai_web/serializers/errorlog.py new file mode 100644 index 000000000..3b353a949 --- /dev/null +++ b/dongtai_web/serializers/errorlog.py @@ -0,0 +1,17 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/5/22 18:28 +# software: PyCharm +# project: webapi + +import time + +from rest_framework import serializers + + +class ErrorlogSerializer(serializers.Serializer): + errorlog = serializers.CharField() + state = serializers.CharField() + dt = serializers.IntegerField(default=int(time.time())) + agent_app = serializers.CharField() diff --git a/dongtai_web/serializers/heartbeat.py b/dongtai_web/serializers/heartbeat.py new file mode 100644 index 000000000..ebf8faf4d --- /dev/null +++ b/dongtai_web/serializers/heartbeat.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/5/22 18:29 +# software: PyCharm +# project: webapi diff --git a/dongtai_web/serializers/hook_strategy.py b/dongtai_web/serializers/hook_strategy.py new file mode 100644 index 000000000..b6f2e4f8b --- /dev/null +++ b/dongtai_web/serializers/hook_strategy.py @@ -0,0 +1,95 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-engine +from dongtai_common.models import User +from dongtai_common.models.hook_strategy import HookStrategy +from rest_framework import serializers +from django.utils.translation import gettext_lazy as _ +from django.utils.text import format_lazy +from dongtai_common.utils import const + + +SINK_POSITION_HELP_TEXT = _(""" +Examples in a single case: O, P<1,2,3,4,...>, R +Combination situation: O&R, O&P1, etc. +O represents the object itself; R represents the return value; P represents the parameter, and the number represents the position of the parameter +""") + +HOOK_TYPE_CHOICE = (const.RULE_SOURCE, const.RULE_ENTRY_POINT, + const.RULE_PROPAGATOR, const.RULE_FILTER, const.RULE_SINK) + + +class SinkSerialize(serializers.ModelSerializer): + class Meta: + model = HookStrategy + fields = ['value'] + + +class HookRuleSerializer(serializers.ModelSerializer): + USER = dict() + rule_type = serializers.SerializerMethodField( + help_text=_('The name of hook rule type.')) + rule_type_id = serializers.SerializerMethodField( + help_text=_('The id of hook rule type.')) + user = serializers.SerializerMethodField( + help_text=_('The user who created the hook rule type.')) + id = serializers.IntegerField(help_text=_('The id of strategy')) + value = serializers.CharField( + help_text=_('The value of strategy'), + max_length=255, + ) + source = serializers.CharField( + help_text=format_lazy("{}\n{}", _("Source of taint"), + SINK_POSITION_HELP_TEXT), + max_length=255, + ) + target = serializers.CharField( + help_text=format_lazy("{}\n{}", _("Target of taint"), + SINK_POSITION_HELP_TEXT), + max_length=255, + ) + inherit = serializers.CharField( + help_text= + _('Inheritance type, false-only detect current class, true-inspect subclasses, all-check current class and subclasses' + ), + max_length=255, + ) + track = serializers.CharField( + help_text= + _("Indicates whether taint tracking is required, true-required, false-not required." + ), + max_length=5, + ) + update_time = serializers.IntegerField( + help_text=_("The update time of hook strategy"), ) + enable = serializers.IntegerField(help_text=_( + "The enabled state of the hook strategy: 0-disabled, 1-enabled, -1-deleted" + ), + default=1) + + class Meta: + model = HookStrategy + fields = ['id', 'rule_type_id', 'rule_type', 'value', 'source', 'target', 'inherit', 'track', 'update_time', + 'enable', 'user'] + + def get_rule_type(self, obj): + rule_type = obj.type.first() + if rule_type: + return rule_type.name + else: + return 'Unknown' + + def get_rule_type_id(self, obj): + rule_type = obj.type.first() + if rule_type: + return rule_type.id + else: + return -1 + + def get_user(self, obj): + if obj.created_by not in self.USER: + temp_user = User.objects.filter(id=obj.created_by).first() + self.USER[obj.created_by] = temp_user.get_username() if temp_user else '' + return self.USER[obj.created_by] diff --git a/dongtai_web/serializers/hook_type_strategy.py b/dongtai_web/serializers/hook_type_strategy.py new file mode 100644 index 000000000..2f6a54e84 --- /dev/null +++ b/dongtai_web/serializers/hook_type_strategy.py @@ -0,0 +1,13 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-engine +from dongtai_common.models.hook_type import HookType +from rest_framework import serializers + + +class HookTypeSerialize(serializers.ModelSerializer): + class Meta: + model = HookType + fields = ['id', 'name'] diff --git a/dongtai_web/serializers/log.py b/dongtai_web/serializers/log.py new file mode 100644 index 000000000..e02484efd --- /dev/null +++ b/dongtai_web/serializers/log.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +from django.contrib.admin.models import LogEntry +from rest_framework import serializers + + +class BaseLogSerializer(serializers.ModelSerializer): + user = serializers.SerializerMethodField() + + class Meta: + model = LogEntry + + def get_user(self, obj): + return obj.user.get_short_name() + + +class LogSerializer(BaseLogSerializer): + user = serializers.SerializerMethodField() + content_type = serializers.SerializerMethodField() + + class Meta: + model = LogEntry + fields = ['id', 'action_time', 'user', 'content_type', 'object_id', 'object_repr', 'action_flag', + 'change_message'] + + def get_user(self, obj): + return obj.user.get_short_name() + + def get_content_type(self, obj): + return obj.content_type.app_labeled_name + + +class LogExportSerializer(BaseLogSerializer): + class Meta: + model = LogEntry + fields = ['action_time', 'user', 'content_type', 'object_id', 'object_repr', 'action_flag', + 'change_message'] diff --git a/dongtai_web/serializers/login.py b/dongtai_web/serializers/login.py new file mode 100644 index 000000000..ed72e3b1b --- /dev/null +++ b/dongtai_web/serializers/login.py @@ -0,0 +1,24 @@ +#!/usr/local/env python +# -*- coding: utf-8 -*- +from rest_framework import serializers +from django.utils.translation import gettext_lazy as _ + + +class LoginSerializer(serializers.Serializer): + """ + https://www.django-rest-framework.org/api-guide/fields/ + """ + username = serializers.CharField( + required=True, + max_length=12, + error_messages={ + "username": _("Username should not be empty") + } + ) + password = serializers.CharField( + required=True, + max_length=6, + error_messages={ + "password": _("Password should not be blank") + } + ) diff --git a/dongtai_web/serializers/method_pool.py b/dongtai_web/serializers/method_pool.py new file mode 100644 index 000000000..e264d9585 --- /dev/null +++ b/dongtai_web/serializers/method_pool.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-engine + +from rest_framework import serializers + +from dongtai_common.models.agent_method_pool import MethodPool +from dongtai_common.models.asset import Asset +from dongtai_common.utils import http + +from dongtai_web.serializers.asset import AssetSerializer + + +class MethodPoolSerialize(serializers.ModelSerializer): + DEPENDENCIES = dict() + AGENTS = dict() + request = serializers.SerializerMethodField() + response = serializers.SerializerMethodField() + dependencies = serializers.SerializerMethodField() + language = serializers.SerializerMethodField() + + class Meta: + model = MethodPool + fields = ['url', 'request', 'response', 'language', 'dependencies'] + + def get_request(self, obj): + return http.build_request(obj.http_method, obj.req_header, obj.uri, obj.req_params, obj.req_data, + obj.http_protocol) + + def get_response(self, obj): + return http.build_response(obj.res_header, obj.res_body) + + def get_dependencies(self, obj): + if obj.agent_id not in self.DEPENDENCIES: + dependencies = obj.agent.dependencies.values('package_name', 'vul_count', 'version').all() + self.DEPENDENCIES[obj.agent_id] = AssetSerializer(dependencies, many=True).data + return self.DEPENDENCIES[obj.agent_id] + + def get_language(self, obj): + return obj.agent.language + + +class MethodPoolListSerialize(serializers.ModelSerializer): + DEPENDENCIES = dict() + AGENTS = dict() + rule = serializers.SerializerMethodField() + level = serializers.SerializerMethodField() + agent_name = serializers.SerializerMethodField() + language = serializers.SerializerMethodField() + + def __init__(self, rule, level, **kwargs): + super().__init__(**kwargs) + self._rule = rule + self._level = level + + class Meta: + model = MethodPool + fields = ['id', 'url', 'req_params', 'language', 'update_time', 'rule', 'level', 'agent_name'] + + def get_rule(self, obj): + return self._rule + + def get_level(self, obj): + return self._level + + def get_agent_name(self, obj): + if obj.agent_id not in self.AGENTS: + self.AGENTS[obj.agent_id] = obj.agent.token + return self.AGENTS[obj.agent_id] + + def get_language(self, obj): + return obj.agent.language + + +if __name__ == '__main__': + d = Asset() diff --git a/dongtai_web/serializers/project.py b/dongtai_web/serializers/project.py new file mode 100644 index 000000000..40e5b46c6 --- /dev/null +++ b/dongtai_web/serializers/project.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +from django.db.models import Count +from rest_framework import serializers + +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.project import (IastProject, VulValidation) +from dongtai_common.models.vul_level import IastVulLevel +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.models.vulnerablity import IastVulnerabilityStatus +from dongtai_common.utils import const +from dongtai_common.utils.systemsettings import get_vul_validate + +class ProjectSerializer(serializers.ModelSerializer): + vul_count = serializers.SerializerMethodField( + help_text="Vulnerability Count") + owner = serializers.SerializerMethodField(help_text="Project owner") + agent_count = serializers.SerializerMethodField( + help_text="Project current surviving agent") + agent_language = serializers.SerializerMethodField( + help_text="Agent language currently included in the project") + USER_MAP = {} + + class Meta: + model = IastProject + fields = [ + 'id', 'name', 'mode', 'vul_count', 'agent_count', 'owner', + 'latest_time', 'agent_language', 'vul_validation' + ] + + def get_agents(self, obj): + try: + all_agents = getattr(obj, 'project_agents') + except Exception as agent_not_found: + all_agents = IastAgent.objects.values('id').filter(bind_project_id=obj.id) + setattr(obj, 'project_agents', all_agents) + return all_agents + + def get_vul_count(self, obj): + agents = self.get_agents(obj) + vul_levels = IastVulnerabilityModel.objects.values('level').filter( + agent__in=agents, is_del=0).annotate(total=Count('level')) + for vul_level in vul_levels: + level = IastVulLevel.objects.get(id=vul_level['level']) + vul_level['name'] = level.name_value + return list(vul_levels) if vul_levels else list() + + def get_owner(self, obj): + if obj not in self.USER_MAP: + self.USER_MAP[obj] = obj.user.get_username() + return self.USER_MAP[obj] + + def get_agent_count(self, obj): + return self.get_agents(obj).filter(online=const.RUNNING).count() + + def get_agent_language(self, obj): + res = self.get_agents(obj).all().values_list( + 'language', flat=True).distinct() + return list(res) + diff --git a/dongtai_web/serializers/sca.py b/dongtai_web/serializers/sca.py new file mode 100644 index 000000000..a0b2ac40a --- /dev/null +++ b/dongtai_web/serializers/sca.py @@ -0,0 +1,157 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.asset_aggr import AssetAggr +from dongtai_common.models.project_version import IastProjectVersion +from rest_framework import serializers + +from dongtai_common.models.asset import Asset +from dongtai_common.models.project import IastProject +from django.utils.translation import gettext_lazy as _ +from dongtai_common.models.sca_maven_db import ScaMavenDb +from dongtai_web.dongtai_sca.models import PackageLicenseLevel, PackageLicenseInfo, Package + + +class ScaSerializer(serializers.ModelSerializer): + project_name = serializers.SerializerMethodField() + package_name = serializers.SerializerMethodField() + project_id = serializers.SerializerMethodField() + project_version = serializers.SerializerMethodField() + level = serializers.SerializerMethodField() + level_type = serializers.SerializerMethodField() + agent_name = serializers.SerializerMethodField() + language = serializers.SerializerMethodField() + license = serializers.SerializerMethodField() + project_cache = dict() + project_version_cache = dict() + AGENT_LANGUAGE_MAP = {} + + class Meta: + model = Asset + fields = [ + 'id', 'package_name', 'version', 'project_name', 'project_id', + 'project_version', 'language', 'package_path', 'agent_name', + 'signature_value', 'level', 'level_type', 'vul_count', 'dt', 'license' + ] + + def get_project_name(self, obj): + project_id = obj.agent.bind_project_id + if project_id == 0: + return _("The application has not been binded") + else: + if project_id in self.project_cache: + return self.project_cache[project_id] + else: + project = IastProject.objects.filter(id=project_id).first() + + self.project_cache[project_id] = project.name if project else "" + return self.project_cache[project_id] + + def get_project_id(self, obj): + return obj.agent.bind_project_id + + def get_project_version(self, obj): + project_version_id = obj.agent.project_version_id + if project_version_id: + if project_version_id in self.project_version_cache: + return self.project_version_cache[project_version_id] + else: + project_version = IastProjectVersion.objects.values('version_name').filter( + id=project_version_id).first() + self.project_version_cache[project_version_id] = project_version['version_name'] + + return self.project_version_cache[project_version_id] + else: + return _('No application version has been created') + + def get_level_type(self, obj): + return obj.level.id + + def get_level(self, obj): + return obj.level.name_value + + def get_agent_name(self, obj): + return obj.agent.token + + def get_language(self, obj): + if obj.agent_id not in self.AGENT_LANGUAGE_MAP: + agent_model = IastAgent.objects.filter(id=obj.agent_id).first() + if agent_model: + self.AGENT_LANGUAGE_MAP[obj.agent_id] = agent_model.language + return self.AGENT_LANGUAGE_MAP[obj.agent_id] + + def get_license(self, obj): + try: + if 'license_dict' not in self.context: + sca_package = Package.objects.filter(hash=obj.signature_value).first() + return sca_package.license + return self.context['license_dict'].get(obj.signature_value, '') + except Exception as e: + return '' + + def get_package_name(self, obj): + if obj.package_name.startswith('maven:') and obj.package_name.endswith(':'): + return obj.package_name.replace('maven:', '', 1)[:-1] + return obj.package_name + + +class ScaAssetSerializer(serializers.ModelSerializer): + package_name = serializers.SerializerMethodField() + level = serializers.SerializerMethodField() + level_type = serializers.SerializerMethodField() + license = serializers.SerializerMethodField() + license_level = serializers.SerializerMethodField() + license_desc = serializers.SerializerMethodField() + vul_high_count = serializers.SerializerMethodField() + project_count = serializers.SerializerMethodField() + class Meta: + model = Asset + fields = [ + 'id', 'package_name', 'version', 'safe_version', 'last_version', + 'language', 'signature_value', 'level', 'level_type', 'vul_count', + 'vul_high_count', 'vul_medium_count', 'vul_low_count', + 'vul_info_count', 'project_count', 'safe_version_list', + 'nearest_safe_version', 'license', 'latest_safe_version','license_list','highest_license', + 'license_level', 'license_desc' + ] + + def get_level_type(self, obj): + return obj.level.id + + def get_level(self, obj): + return obj.level.name_value + + def get_package_name(self, obj): + if obj.package_name.startswith('maven:') and obj.package_name.endswith(':'): + return obj.package_name.replace('maven:', '', 1)[:-1] + return obj.package_name + + def get_license(self, obj): + if not obj.license: + obj.license = '未知' + return obj.license + + def get_license_level(self, obj): + obj.license_level = 0 + obj.license_desc = "允许商业集成" + if obj.license: + license_level = PackageLicenseLevel.objects.filter(identifier=obj.license).first() + obj.license_level = license_level.level_id if license_level else 0 + obj.license_desc = license_level.level_desc if license_level else "允许商业集成" + + return obj.license_level + + def get_license_desc(self, obj): + + return obj.license_desc + + def get_vul_high_count(self, obj): + return obj.vul_high_count + obj.vul_critical_count + + def get_project_count(self, obj): + asset_aggr = AssetAggr.objects.filter( + signature_value=obj.signature_value).first() + return asset_aggr.project_count if asset_aggr else 0 diff --git a/dongtai_web/serializers/strategy.py b/dongtai_web/serializers/strategy.py new file mode 100644 index 000000000..9a473c885 --- /dev/null +++ b/dongtai_web/serializers/strategy.py @@ -0,0 +1,14 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +from rest_framework import serializers + +from dongtai_common.models.strategy import IastStrategyModel + + +class StrategySerializer(serializers.ModelSerializer): + class Meta: + model = IastStrategyModel + fields = ['id', 'vul_type','vul_fix', 'level_id', 'state', 'vul_name', 'vul_desc', 'dt'] diff --git a/dongtai_web/serializers/talent.py b/dongtai_web/serializers/talent.py new file mode 100644 index 000000000..d9f990776 --- /dev/null +++ b/dongtai_web/serializers/talent.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +from rest_framework import serializers + +from dongtai_common.models import User +from dongtai_common.models.talent import Talent + + +class TalentSerializer(serializers.ModelSerializer): + created = serializers.SerializerMethodField() + + class Meta: + model = Talent + fields = [ + 'id', 'talent_name', 'create_time', 'update_time', 'created', + 'is_active' + ] + + def get_created(self, obj): + user = User.objects.filter(id=obj.created_by).first() + return user.get_username() diff --git a/dongtai_web/serializers/user.py b/dongtai_web/serializers/user.py new file mode 100644 index 000000000..250c7d57a --- /dev/null +++ b/dongtai_web/serializers/user.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +from rest_framework import serializers + +from dongtai_common.models import User + + +class UserSerializer(serializers.ModelSerializer): + department = serializers.SerializerMethodField() + talent = serializers.SerializerMethodField() + + class Meta: + model = User + fields = ['id', 'username', 'email', 'is_superuser', 'phone', 'talent', 'department', 'is_active', + 'date_joined', 'last_login'] + + def get_department(self, obj): + department = obj.department.filter().first() + return {'name': department.get_department_name(), 'id': department.id} if department else {'name': '', 'id': -1} + + def get_talent(self, obj): + talent = obj.get_talent() + return talent.get_talent_name() if talent else '' diff --git a/dongtai_web/serializers/vul.py b/dongtai_web/serializers/vul.py new file mode 100644 index 000000000..82a811e53 --- /dev/null +++ b/dongtai_web/serializers/vul.py @@ -0,0 +1,120 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/5/22 18:29 +# software: PyCharm +# project: webapi +from dongtai_common.models.agent import IastAgent +from rest_framework import serializers + +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.models.vulnerablity import IastVulnerabilityStatus +from dongtai_common.models.hook_type import HookType +from django.utils.translation import gettext_lazy as _ +from dongtai_common.models.strategy import IastStrategyModel +from dongtai_common.models.vul_level import IastVulLevel + + +class VulSerializer(serializers.ModelSerializer): + language = serializers.SerializerMethodField() + type = serializers.SerializerMethodField() + AGENT_LANGUAGE_MAP = {} + status = serializers.SerializerMethodField() + class Meta: + model = IastVulnerabilityModel + fields = [ + 'id', 'type', 'hook_type_id', 'url', 'uri', 'agent_id', 'level_id', + 'http_method', 'top_stack', 'bottom_stack', 'taint_position', + 'latest_time', 'first_time', 'language', 'status' + ] + + @staticmethod + def split_container_name(name): + if name is None: + return "" + if '/' in name: + return name.split('/')[0].lower().strip() + elif ' ' in name: + names = name.split(' ')[:-1] + return ' '.join(names).lower().strip() + return name + + def get_language(self, obj): + if obj['agent_id'] not in self.AGENT_LANGUAGE_MAP: + agent_model = IastAgent.objects.filter(id=obj['agent_id']).first() + if agent_model: + self.AGENT_LANGUAGE_MAP[obj['agent_id']] = agent_model.language + return self.AGENT_LANGUAGE_MAP[obj['agent_id']] + + def get_type(self, obj): + hook_type = HookType.objects.filter(pk=obj['hook_type_id']).first() + hook_type_name = hook_type.name if hook_type else None + strategy = IastStrategyModel.objects.filter(pk=obj['strategy_id']).first() + strategy_name = strategy.vul_name if strategy else None + type_ = list( + filter(lambda x: x is not None, [strategy_name, hook_type_name])) + return type_[0] if type_ else '' + + def get_status(self, obj): + status = IastVulnerabilityStatus.objects.filter( + pk=obj['status_id']).first() + return status.name if status else '' + + +class VulForPluginSerializer(serializers.ModelSerializer): + type = serializers.SerializerMethodField() + level = serializers.SerializerMethodField(help_text=_("The level name of vulnerablity")) + class Meta: + model = IastVulnerabilityModel + fields = [ + 'id', 'type', 'level_id', 'url', 'http_method', 'top_stack', + 'bottom_stack', 'hook_type_id', 'level' + ] + + def get_type(self, obj): + hook_type = HookType.objects.filter(pk=obj['hook_type_id']).first() + hook_type_name = hook_type.name if hook_type else None + strategy = IastStrategyModel.objects.filter(pk=obj['strategy_id']).first() + strategy_name = strategy.vul_name if strategy else None + type_ = list( + filter(lambda x: x is not None, [strategy_name, hook_type_name])) + return type_[0] if type_ else '' + + def get_level(self, obj): + level = IastVulLevel.objects.filter(pk=obj['level_id']).first() + return level.name_value if level else '' + + +class VulSummaryLanguageSerializer(serializers.Serializer): + language = serializers.CharField(help_text=_("programming language")) + count = serializers.IntegerField(help_text=_( + "The number of vulnerabilities corresponding to the programming language" + )) + + +class VulSummaryLevelSerializer(serializers.Serializer): + level = serializers.CharField(help_text=_("The name of vulnerablity level")) + count = serializers.IntegerField(help_text=_("The number of vulnerabilities corresponding to the level")) + level_id = serializers.IntegerField(help_text=_("The id of vulnerablity level")) + + +class VulSummaryTypeSerializer(serializers.Serializer): + type = serializers.CharField(help_text=_("The name of vulnerablity type")) + count = serializers.IntegerField(help_text=_( + "The number of vulnerabilities corresponding to the vulnerablity type") + ) + + +class VulSummaryProjectSerializer(serializers.Serializer): + project_name = serializers.CharField( + help_text=_("The name of the project")) + count = serializers.IntegerField(help_text=_( + "The number of vulnerabilities corresponding to the project")) + id = serializers.IntegerField(help_text=_("The id of the project")) + + +class VulSummaryResponseDataSerializer(serializers.Serializer): + language = VulSummaryLanguageSerializer(many=True) + level = VulSummaryLevelSerializer(many=True) + type = VulSummaryTypeSerializer(many=True) + projects = VulSummaryProjectSerializer(many=True) diff --git a/dongtai_web/systemmonitor/__init__.py b/dongtai_web/systemmonitor/__init__.py new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/dongtai_web/systemmonitor/__init__.py @@ -0,0 +1 @@ + diff --git a/dongtai_web/systemmonitor/data_clean.py b/dongtai_web/systemmonitor/data_clean.py new file mode 100644 index 000000000..dc09ce41f --- /dev/null +++ b/dongtai_web/systemmonitor/data_clean.py @@ -0,0 +1,102 @@ +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from dongtai_conf.settings import config +from dongtai_common.models.profile import IastProfile +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from django.utils.translation import gettext_lazy as _ +from rest_framework import serializers +from rest_framework.serializers import ValidationError +from django.forms.models import model_to_dict +import json +from datetime import datetime +from django_celery_beat.models import ( + CrontabSchedule, + PeriodicTask, +) +from dongtai_engine.plugins.data_clean import data_cleanup + +class DataCleanSettingsSer(serializers.Serializer): + clean_time = serializers.TimeField(format="%H:%M:%S") + days_before = serializers.IntegerField() + enable = serializers.BooleanField() + +class DataCleanDoItNowArgsSer(serializers.Serializer): + days_before = serializers.IntegerField() + +class DataCleanEndpoint(UserEndPoint): + + @extend_schema_with_envcheck(summary=_('Get Profile'), + description=_("Get Profile with key"), + tags=[_('Profile')]) + def get(self, request): + key = 'data_clean' + profile = IastProfile.objects.filter(key=key).values_list( + 'value', flat=True).first() + if profile is None: + return R.failure( + msg=_("Failed to get {} configuration").format(key)) + data = json.loads(profile) + return R.success(data=data) + + @extend_schema_with_envcheck(summary=_('Profile modify'), + request=DataCleanSettingsSer, + description=_("Modifiy Profile with key"), + tags=[_('Profile')]) + def post(self, request): + ser = DataCleanSettingsSer(data=request.data) + try: + if ser.is_valid(True): + pass + except ValidationError as e: + return R.failure(data=e.detail) + key = 'data_clean' + datetime_obj = datetime.strptime(ser.data['clean_time'], '%H:%M:%S') + hour = datetime_obj.hour + minute = datetime_obj.hour + enabled = 1 if ser.data['enable'] else 0 + kwargs = {'days': int(ser.data['days_before'])} + schedule, _ = CrontabSchedule.objects.get_or_create( + minute=minute, + hour=hour, + day_of_week='*', + day_of_month='*', + month_of_year='*', + ) + PeriodicTask.objects.get_or_create( + name='data clean functions', # simply describes this periodic task. + defaults={ + 'crontab': schedule, # we created this above. + 'enabled': enabled, + 'task': + 'dongtai_engine.plugins.data_clean.data_cleanup', # name of task. + 'args': json.dumps([]), + 'kwargs': json.dumps(kwargs), + }) + value = json.dumps(ser.data) + try: + obj, created = IastProfile.objects.update_or_create( + { + 'key': key, + 'value': value + }, key=key) + except Exception as e: + return R.failure(msg=_("Update {} failed").format(key)) + data = json.loads(value) + return R.success(data=data) + +class DataCleanDoItNowEndpoint(UserEndPoint): + + @extend_schema_with_envcheck(summary=_('Get Profile'), + request=DataCleanDoItNowArgsSer, + description=_("Get Profile with key"), + tags=[_('Profile')]) + def post(self, request): + ser = DataCleanDoItNowArgsSer(data=request.data) + try: + if ser.is_valid(True): + pass + except ValidationError as e: + return R.failure(data=e.detail) + data_cleanup.delay(days=ser.data['days_before']) + return R.success() diff --git a/dongtai_web/systemmonitor/urls.py b/dongtai_web/systemmonitor/urls.py new file mode 100644 index 000000000..4896e07be --- /dev/null +++ b/dongtai_web/systemmonitor/urls.py @@ -0,0 +1,15 @@ +from django.urls import include, path + +from rest_framework import routers +from dongtai_web.systemmonitor.data_clean import DataCleanEndpoint, DataCleanDoItNowEndpoint + +router = routers.DefaultRouter() + +base_urlpatterns = [ + path('data_clean', DataCleanEndpoint.as_view()), + path('data_clean/task', DataCleanDoItNowEndpoint.as_view()), +] + +urlpatterns = [ + path('api/v1/systemmonitor/', include(base_urlpatterns)), +] diff --git a/dongtai_web/tests.py b/dongtai_web/tests.py new file mode 100644 index 000000000..9b439eef1 --- /dev/null +++ b/dongtai_web/tests.py @@ -0,0 +1,75 @@ +# Create your tests here. +from django.test import TestCase +from dongtai_web.views.agents_v2 import query_agent +from dongtai_web.views.log_download import (file_newest_N_file_under_path, getzipfilesinmemorty, ) +from dongtai_web.views.agents_v2 import ( + query_agent, ) + +from dongtai_web.threshold.config_setting import (convert_choices_to_dict, + convert_choices_to_value_dict, + get_metric_types, get_targets) +from dongtai_common.models.agent_config import ( + IastCircuitTarget, + IastCircuitConfig, + IastCircuitMetric, + TargetType, + TargetOperator, + DealType, + MetricType, + MetricGroup, + MetricOperator, +) + +class DashboardTestCase(TestCase): + def test_query_agent(self): + res = query_agent() + print(res) + + +class ZipFileTestCase(TestCase): + + def test_findnewest_file(self): + res = file_newest_N_file_under_path('./dongtai_web', 2) + print(res) + + def test_getzipfilesinmemorty(self): + res = getzipfilesinmemorty(['./README.md', './lingzhi.sh']) + print(res) + + def test_get_zip_together(self): + from dongtai_web.views.log_download import get_zip_together + res = get_zip_together([1, 2, 3], 1) + print(res) + +class ChoiceConvertTestCase(TestCase): + def test_choice_convert(self): + able_to_search = (MetricType, MetricGroup, + TargetOperator, + MetricOperator) + for i in able_to_search: + res = convert_choices_to_dict(i) + print(res) + def test_choice_convert_value(self): + able_to_search = (MetricType, MetricGroup, + TargetOperator, + MetricOperator) + for i in able_to_search: + res = convert_choices_to_value_dict(i) + print(res) + + def test_metric_string_concate(self): + metrics = [{ + "metric_type": 1, + "opt": 5, + "value": 100 + }, { + "metric_type": 2, + "opt": 5, + "value": 100 + }, { + "metric_type": 3, + "opt": 5, + "value": 1000000000 + }] + res = get_metric_types(metrics) + print(res) diff --git a/dongtai_web/threshold/__init__.py b/dongtai_web/threshold/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/dongtai_web/threshold/agent_core_status.py b/dongtai_web/threshold/agent_core_status.py new file mode 100644 index 000000000..cae3279b6 --- /dev/null +++ b/dongtai_web/threshold/agent_core_status.py @@ -0,0 +1,126 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:sjh +# software: PyCharm +import time +from dongtai_common.endpoint import UserEndPoint, R + +from dongtai_common.models.agent import IastAgent +from django.utils.translation import gettext_lazy as _ +from rest_framework import serializers +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from dongtai_web.serializers.agent import AgentToggleArgsSerializer +from dongtai_web.views import AGENT_STATUS +from collections import defaultdict + + +STATUS_MAPPING = defaultdict(lambda: 1, {3: 1, 4: 2}) + + +_ResponseSerializer = get_response_serializer( + status_msg_keypair=(((201, _('Suspending ...')), ''), )) + + +class AgentCoreStatusSerializer(serializers.Serializer): + + id = serializers.IntegerField(help_text=_('The id of the webHook.'), required=False) + core_status = serializers.IntegerField(help_text=_('The type of the webHook.'), required=True) + agent_ids = serializers.CharField(help_text=_('The cluster_name of the agent.'), max_length=255, required=False) + + +class AgentCoreStatusUpdate(UserEndPoint): + name = "api-v1-agent-core-status-update" + description = _("Suspend Agent") + + @extend_schema_with_envcheck( + request=AgentToggleArgsSerializer, + tags=[_('Agent')], + summary=_('Agent Status Update'), + description=_("Control the running agent by specifying the id."), + response_schema=_ResponseSerializer) + def post(self, request): + ser = AgentCoreStatusSerializer(data=request.data) + if ser.is_valid(False): + agent_id = ser.validated_data.get('id', None) + core_status = ser.validated_data.get('core_status', None) + agent_ids = ser.validated_data.get('agent_ids', "").strip() + else: + return R.failure(msg=_('Incomplete parameter, please check again')) + + if agent_ids: + try: + agent_ids = [int(i) for i in agent_ids.split(',')] + except BaseException: + return R.failure(_("Parameter error")) + elif agent_id is not None: + agent_ids = [int(agent_id)] + + if agent_ids: + except_running_status = STATUS_MAPPING[core_status] + #Here could be simply to such as "control_status in statusData.keys()" + statusData = AGENT_STATUS.get(core_status,{}) + control_status = statusData.get("value",None) + if control_status is None: + return R.failure(msg=_('Incomplete parameter, please check again')) + user = request.user + + # 超级管理员 + if user.is_system_admin(): + queryset = IastAgent.objects.all() + # 租户管理员 + elif user.is_superuser == 2: + users = self.get_auth_users(user) + user_ids = list(users.values_list('id', flat=True)) + queryset = IastAgent.objects.filter(user_id__in=user_ids) + else: + # 普通用户 + queryset = IastAgent.objects.filter(user=user) + queryset.filter(id__in=agent_ids).update( + except_running_status=except_running_status, + control=core_status, + is_control=1, + latest_time=int(time.time())) + # for agent_id in agent_ids: + # agent = IastAgent.objects.filter(user=request.user, id=agent_id).first() + # if agent is None: + # continue + # # edit by song + # # if agent.is_control == 1 and agent.control != 3 and agent.control != 4: + # # continue + # agent.control = core_status + # agent.is_control = 1 + # agent.latest_time = int(time.time()) + # agent.save(update_fields=['latest_time', 'control', 'is_control']) + + return R.success(msg=_('状态已下发')) + + +class AgentCoreStatusUpdateALL(UserEndPoint): + name = "api-v1-agent-core-status-update" + description = _("Suspend Agent") + + def post(self, request): + ser = AgentCoreStatusSerializer(data=request.data) + if ser.is_valid(False): + core_status = ser.validated_data.get('core_status', None) + else: + return R.failure(msg=_('Incomplete parameter, please check again')) + user = request.user + # 超级管理员 + if user.is_system_admin(): + queryset = IastAgent.objects.all() + # 租户管理员 + elif user.is_superuser == 2: + users = self.get_auth_users(user) + user_ids = list(users.values_list('id', flat=True)) + queryset = IastAgent.objects.filter(user_id__in=user_ids) + else: + # 普通用户 + queryset = IastAgent.objects.filter(user=user) + except_running_status = STATUS_MAPPING[core_status] + queryset.filter(online=1).update( + except_running_status=except_running_status, + control=core_status, + is_control=1, + latest_time=int(time.time())) + return R.success(msg=_('状态已下发')) diff --git a/dongtai_web/threshold/config_setting.py b/dongtai_web/threshold/config_setting.py new file mode 100644 index 000000000..83ed24c5d --- /dev/null +++ b/dongtai_web/threshold/config_setting.py @@ -0,0 +1,390 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:sjh +# software: PyCharm +# project: webApi +# agent threshold setting +import time + +from dongtai_common.endpoint import UserEndPoint, R, TalentAdminEndPoint +from dongtai_common.models.agent_config import IastAgentConfig +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from dongtai_web.serializers.agent_config import AgentConfigSettingSerializer +from rest_framework.serializers import ValidationError + +_ResponseSerializer = get_response_serializer(status_msg_keypair=( + ((201, _('The setting is complete')), ''), + ((202, _('Incomplete parameter, please try again later')), '') +)) + + +class AgentThresholdConfig(UserEndPoint): + name = "api-v1-agent-threshold-config-setting" + description = _("config Agent") + + def create_agent_config(self,user, details, hostname, ip, port, cluster_name, cluster_version, priority,id): + try: + + timestamp = int(time.time()) + if id: + strategy = IastAgentConfig.objects.filter(user=user,id=id).order_by("-create_time").first() + else: + strategy = IastAgentConfig.objects.filter(user=user, id=id).order_by("-create_time").first() + if strategy: + strategy.details = details + strategy.hostname = hostname + strategy.ip = ip + strategy.port = port + strategy.cluster_name = cluster_name + strategy.cluster_version = cluster_version + strategy.priority = priority + else: + strategy = IastAgentConfig( + user=user, + details=details, + hostname=hostname, + ip=ip, + port=port, + cluster_name=cluster_name, + cluster_version=cluster_version, + priority=priority, + create_time=timestamp + ) + strategy.save() + return strategy + except Exception as e: + + return None + + @extend_schema_with_envcheck( + tags=[_('Agent')], + summary=_('Agent threshold Config'), + description=_("Configure agent disaster recovery strategy"), + response_schema=_ResponseSerializer) + def post(self, request): + + ser = AgentConfigSettingSerializer(data=request.data) + user = request.user + try: + if ser.is_valid(True): + details = ser.validated_data.get('details', {}) + hostname = ser.validated_data.get('hostname', "").strip() + ip = ser.validated_data.get('ip', "") + id = ser.validated_data.get('id', "") + port = ser.validated_data.get('port', 80) + cluster_name = ser.validated_data.get('cluster_name', "").strip() + cluster_version = ser.validated_data.get('cluster_version', "") + priority = ser.validated_data.get('priority', 0) + + except ValidationError as e: + + return R.failure(data=e.detail) + + config = self.create_agent_config(user, details, hostname, ip, port, cluster_name, cluster_version, priority,id) + if config: + return R.success(msg=_('保存成功')) + else: + return R.failure(msg=_('保存失败')) + + +from rest_framework import serializers +from django.db.models import IntegerChoices +from rest_framework import viewsets +from dongtai_common.models.agent_config import ( + IastCircuitTarget, + IastCircuitConfig, + IastCircuitMetric, + TargetType, + TargetOperator, + DealType, + MetricType, + MetricGroup, + MetricOperator, + SystemMetricType, + JVMMetricType, + ApplicationMetricType, + UNIT_DICT, +) +from collections.abc import Iterable +from inflection import underscore +from functools import partial + +def intable_validate(value): + try: + a = int(value) + except ValueError as e: + raise serializers.ValidationError('This field must be an intable.') + +class AgentConfigSettingV2TargetSerializer(serializers.Serializer): + target_type = serializers.ChoiceField(TargetType.choices) + opt = serializers.ChoiceField(TargetOperator.choices) + value = serializers.CharField() + + +class AgentConfigSettingV2MetricSerializer(serializers.Serializer): + metric_type = serializers.ChoiceField(MetricType.choices) + opt = serializers.ChoiceField(MetricOperator.choices) + value = serializers.CharField(validators=[intable_validate]) + + +class AgentConfigSettingV2Serializer(serializers.Serializer): + name = serializers.CharField() + targets = serializers.ListField( + child=AgentConfigSettingV2TargetSerializer()) + metric_group = serializers.ChoiceField(MetricGroup.choices) + metrics = serializers.ListField( + child=AgentConfigSettingV2MetricSerializer()) + interval = serializers.IntegerField() + deal = serializers.ChoiceField(DealType.choices) + is_enable = serializers.IntegerField() + + +from django.db.models import Max, Min +from django.forms.models import model_to_dict + + +def get_priority_max_now() -> int: + res = IastCircuitConfig.objects.all().aggregate(Max("priority")) + return res["priority__max"] + 1 + +def get_priority_min_now() -> int: + res = IastCircuitConfig.objects.all().aggregate(Min("priority")) + return res["priority__min"] - 1 + +def config_create(data, user): + fields = ('name', 'metric_group', 'is_enable', 'deal', + "interval") + filted_data = get_data_from_dict_by_key(data, fields) + metric_types = get_metric_types(data['metrics']) + targets = get_targets(data['targets']) + obj = IastCircuitConfig.objects.create(**filted_data, + metric_types=metric_types, + target_types=targets, + priority=get_priority_max_now(), + user=user) + for i in data['targets']: + create_target(i, obj) + + for i in data['metrics']: + create_metric(i, obj) + + +def config_update(data, config_id): + fields = ('name', 'metric_group', 'is_enable', 'deal', + "interval") + filted_data = get_data_from_dict_by_key(data, fields) + metric_types = get_metric_types(data['metrics']) + targets = get_targets(data['targets']) + IastCircuitConfig.objects.filter( + pk=config_id).update(**filted_data, + metric_types=metric_types, + target_types=targets) + IastCircuitTarget.objects.filter( + circuit_config_id=config_id).delete() + IastCircuitMetric.objects.filter( + circuit_config_id=config_id).delete() + obj = IastCircuitConfig.objects.filter(pk=config_id).first() + for i in data['targets']: + create_target(i, obj) + + for i in data['metrics']: + create_metric(i, obj) + + +def create_metric(metrics: dict, circuit_config: IastCircuitConfig): + IastCircuitMetric.objects.create(circuit_config=circuit_config, **metrics) + + +def create_target(target: dict, circuit_config: IastCircuitConfig): + IastCircuitTarget.objects.create(circuit_config=circuit_config, **target) + + +def get_metric_types(metrics): + str_list = [] + for metric in metrics: + str_list.append(str(MetricType(metric['metric_type']).label)) + return str(_("、")).join(str_list) + + +def get_targets(targets): + str_list = [] + for target in targets: + str_list.append(str(TargetType(target['target_type']).label)) + res = str(_("、")).join(str_list) + if not res: + return str(_("全部")) + return res + + +def get_data_from_dict_by_key(dic: dict, fields: Iterable) -> dict: + return {i: dic[i] for i in fields} + + +from django.db.models import F + + +#when target_priority < config.priorty +def set_config_change_lt(config_id, target_priority: int): + config = IastCircuitConfig.objects.filter(pk=config_id).first() + IastCircuitConfig.objects.filter( + priority__gte=target_priority, + priority__lt=config.priority).update(priority=F('priority') + 1) + config.priority = target_priority + config.save() + + +def set_config_top(config_id): + return set_config_change_lt(config_id, + target_priority=get_priority_min_now()) + + +#when target_priority > config.priorty +def set_config_change_gt(config_id, target_priority: int): + config = IastCircuitConfig.objects.filter(pk=config_id).first() + IastCircuitConfig.objects.filter( + priority__lte=target_priority, + priority__gt=config.priority).update(priority=F('priority') - 1) + config.priority = target_priority + config.save() + + +def set_config_bottom(config_id): + set_config_change_gt(config_id, target_priority=get_priority_max_now()) + + +def set_config_change_proprity(config_id, priority_range: list): + config = IastCircuitConfig.objects.filter(pk=config_id).first() + if min(priority_range) > config.priority: + set_config_change_gt(config.id, min(priority_range)) + if max(priority_range) < config.priority: + set_config_change_lt(config.id, max(priority_range)) + + +from dongtai_conf.settings import DEFAULT_CIRCUITCONFIG + + +class AgentThresholdConfigV2(TalentAdminEndPoint, viewsets.ViewSet): + name = "api-v1-agent-threshold-config-setting-v2" + description = _("config Agent V2") + + @extend_schema_with_envcheck( + [AgentConfigSettingV2Serializer], + summary=_('Create AgentThresholdConfig'), + description=_("Create AgentThresholdConfigV2"), + tags=[_('AgentThresholdConfigV2')]) + def create(self, request): + ser = AgentConfigSettingV2Serializer(data=request.data) + try: + if ser.is_valid(True): + pass + except ValidationError as e: + return R.failure(data=e.detail) + config_create(ser.data, request.user) + return R.success() + + def retrieve(self, request, pk): + obj = IastCircuitConfig.objects.filter(pk=pk, + is_deleted=0).values().first() + if not obj: + return R.failure() + obj['targets'] = list( + IastCircuitTarget.objects.filter( + circuit_config_id=pk).values().all()) + obj['metrics'] = list( + IastCircuitMetric.objects.filter( + circuit_config_id=pk).values().all()) + return R.success(data=obj) + + def list(self, request): + # page = request.query_params.get('page', 1) + # page_size = request.query_params.get("page_size", 10) + queryset = IastCircuitConfig.objects.filter( + is_deleted=0).order_by('priority').prefetch_related( + 'iastcircuittarget_set', 'iastcircuitmetric_set').all() + #page_summary, page_data = self.get_paginator(queryset, page, page_size) + obj_list = [] + for data in queryset: + obj = model_to_dict(data) + obj['targets'] = list(data.iastcircuittarget_set.values().all()) + obj['metrics'] = list(data.iastcircuitmetric_set.values().all()) + obj_list.append(obj) + return R.success(data=obj_list) + + def update(self, request, pk): + ser = AgentConfigSettingV2Serializer(data=request.data) + try: + if ser.is_valid(True): + pass + except ValidationError as e: + return R.failure(data=e.detail) + config_update(ser.data, pk) + return R.success() + + def reset(self, request, pk): + if IastCircuitConfig.objects.filter(pk=pk).exists(): + config = IastCircuitConfig.objects.filter(pk=pk, ).first() + mg = MetricGroup(config.metric_group) + data = DEFAULT_CIRCUITCONFIG[mg.name] + config_update(data, pk) + return R.success() + return R.failure() + + def change_priority(self, request, pk): + type_ = request.data.get('type') + priority_range = request.data.get('priority_range') + if IastCircuitConfig.objects.filter(pk=pk).exists(): + if type_ == 1: + set_config_top(pk) + return R.success() + if type_ == 2 and priority_range: + set_config_change_proprity(pk, priority_range) + return R.success() + if type_ == 3: + set_config_bottom(pk) + return R.success() + return R.failure() + + def delete(self, request, pk): + IastCircuitConfig.objects.filter(pk=pk).update(is_deleted=1) + return R.success() + + def enum(self, request, enumname): + able_to_search = (TargetType, MetricType, MetricGroup, TargetOperator, + MetricOperator, DealType, SystemMetricType, + JVMMetricType, ApplicationMetricType) + able_to_search_dict = { + underscore(item.__name__): item + for item in able_to_search + } + if enumname not in able_to_search_dict.keys(): + return R.failure() + return R.success(data=convert_choices_to_value_dict( + able_to_search_dict.get(enumname))) + + def enumall(self, request): + able_to_search = (TargetType, MetricType, MetricGroup, TargetOperator, + MetricOperator, DealType, SystemMetricType, + JVMMetricType, ApplicationMetricType) + res = { + underscore(item.__name__): convert_choices_to_value_dict(item) + for item in able_to_search + } + res['UNIT_DICT'] = UNIT_DICT + return R.success(data=res) + + +def convert_choices_to_dict(choices): + fields = ['value', 'name', 'label'] + return [{field: getattr(choice, field) + for field in fields} + for choice in choices] + + +def convert_choices_to_value_dict(choices): + fields = ['name', 'label'] + return { + choice.value: {field: getattr(choice, field) + for field in fields} + for choice in choices + } diff --git a/dongtai_web/threshold/del_threshold_setting.py b/dongtai_web/threshold/del_threshold_setting.py new file mode 100644 index 000000000..1075ccc48 --- /dev/null +++ b/dongtai_web/threshold/del_threshold_setting.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:sjh +# software: PyCharm +# project: webApi +# agent webHook setting +import time + +from dongtai_common.endpoint import UserEndPoint, R +from dongtai_common.models.agent_config import IastAgentConfig +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from dongtai_web.serializers.agent_config import AgentWebHookDelSerializer +from rest_framework.serializers import ValidationError + +_ResponseSerializer = get_response_serializer(status_msg_keypair=( + ((201, _('The setting is complete')), ''), + ((202, _('Incomplete parameter, please try again later')), '') +)) + + +class DelAgentThresholdConfig(UserEndPoint): + name = "api-v1-agent-Threshold-config-del" + description = _("del webHook Agent") + + @extend_schema_with_envcheck( + tags=[_('Agent')], + summary=_('Agent webHook delete'), + description=_("Delete agent traffic reporting data forwarding address configuration"), + response_schema=_ResponseSerializer) + def post(self, request): + ser = AgentWebHookDelSerializer(data=request.data) + user = request.user + if ser.is_valid(False): + id = ser.validated_data.get('id', None) + else: + return R.failure(msg=_('Incomplete parameter, please check again')) + config = IastAgentConfig.objects.filter(user=user, id=id).delete() + if config: + return R.success(msg=_('Config has been deleted successfully')) + else: + R.failure(msg=_('Failed to delete config')) diff --git a/dongtai_web/threshold/del_webhook_setting.py b/dongtai_web/threshold/del_webhook_setting.py new file mode 100644 index 000000000..77f4f2f2b --- /dev/null +++ b/dongtai_web/threshold/del_webhook_setting.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:sjh +# software: PyCharm +# project: webApi +# agent webHook setting +import time + +from dongtai_common.endpoint import UserEndPoint, R +from dongtai_common.models.agent_webhook_setting import IastAgentUploadTypeUrl +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from dongtai_web.serializers.agent_config import AgentWebHookDelSerializer +from rest_framework.serializers import ValidationError + +_ResponseSerializer = get_response_serializer(status_msg_keypair=( + ((201, _('The setting is complete')), ''), + ((202, _('Incomplete parameter, please try again later')), '') +)) + + +class DelAgentWebHookConfig(UserEndPoint): + name = "api-v1-agent-webHook-config-del" + description = _("del webHook Agent") + + @extend_schema_with_envcheck( + tags=[_('Agent')], + summary=_('Agent webHook delete'), + description=_("Delete agent traffic reporting data forwarding address configuration"), + response_schema=_ResponseSerializer) + def post(self, request): + ser = AgentWebHookDelSerializer(data=request.data) + user = request.user + if ser.is_valid(False): + id = ser.validated_data.get('id', None) + else: + return R.failure(msg=_('Incomplete parameter, please check again')) + config = IastAgentUploadTypeUrl.objects.filter(user=user, id=id).delete() + if config: + return R.success(msg=_('Config has been deleted successfully')) + else: + R.failure(msg=_('Failed to delete config')) diff --git a/dongtai_web/threshold/get_config_setting.py b/dongtai_web/threshold/get_config_setting.py new file mode 100644 index 000000000..7c6489213 --- /dev/null +++ b/dongtai_web/threshold/get_config_setting.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:sjh +# software: PyCharm +# project: webApi +# agent threshold setting +import time + +from django.forms import model_to_dict +from dongtai_common.endpoint import UserEndPoint, R +from dongtai_common.models.agent_config import IastAgentConfig +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer + +_ResponseSerializer = get_response_serializer(status_msg_keypair=( + ((201, _('Get success')), ''), + ((202, _('Incomplete parameter, please try again later')), '') +)) + + +class GetAgentThresholdConfig(UserEndPoint): + name = "api-v1-agent-threshold-config-get" + description = _("config Agent") + + @extend_schema_with_envcheck( + tags=[_('Agent')], + summary=_('Agent threshold Config'), + description=_("Configure agent disaster recovery strategy"), + response_schema=_ResponseSerializer) + def get(self, request): + user = request.user + configData = IastAgentConfig.objects.filter(user=user) + result = [] + if configData: + for item in configData: + data = model_to_dict(item) + data_detail = {} + if data['details']: + data_detail = data['details'] + del data['user'] + del data['details'] + + if isinstance(data_detail, dict): + + data['enableAutoFallback'] = data_detail.get("enableAutoFallback", None) + data['hookLimitTokenPerSecond'] = data_detail.get("hookLimitTokenPerSecond", None) + data['heavyTrafficLimitTokenPerSecond'] = data_detail.get("heavyTrafficLimitTokenPerSecond", None) + data['cpuUsagePercentage'] = data_detail.get("performanceLimitMaxThreshold", {}).get("cpuUsage", {}).get("cpuUsagePercentage",None) + data['memUsagePercentage'] = data_detail.get("performanceLimitMaxThreshold", {}).get("memoryUsage", {}).get("memUsagePercentage",None) + else: + data['enableAutoFallback'] = "" + data['hookLimitTokenPerSecond'] = "" + data['heavyTrafficLimitTokenPerSecond'] = "" + data['cpuUsagePercentage'] = "" + data['memUsagePercentage'] = "" + result.append(data) + else: + result = [] + return R.success(msg=_('Successfully'), data={"result": result}) diff --git a/dongtai_web/threshold/get_config_setting_detail.py b/dongtai_web/threshold/get_config_setting_detail.py new file mode 100644 index 000000000..bba1a96a9 --- /dev/null +++ b/dongtai_web/threshold/get_config_setting_detail.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:sjh +# software: PyCharm +# project: webApi +# agent threshold setting +import time + +from django.forms import model_to_dict +from dongtai_common.endpoint import UserEndPoint, R +from dongtai_common.models.agent_config import IastAgentConfig +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer + +_ResponseSerializer = get_response_serializer(status_msg_keypair=( + ((201, _('Get detail success')), ''), + ((202, _('Incomplete parameter, please try again later')), '') +)) + + +class GetAgentThresholdConfigDetail(UserEndPoint): + name = "api-v1-agent-threshold-config-get-detail" + description = _("config Agent") + + @extend_schema_with_envcheck( + tags=[_('Agent')], + summary=_('Agent threshold Config'), + description=_("Configure agent disaster recovery strategy"), + response_schema=_ResponseSerializer) + def get(self, request, pk): + user = request.user + configData = IastAgentConfig.objects.filter(user=user, pk=pk).first() + result = {} + if configData: + result = model_to_dict(configData) + + return R.success(msg=_('Successfully'), data={"result": result}) diff --git a/dongtai_web/threshold/get_webhook_setting.py b/dongtai_web/threshold/get_webhook_setting.py new file mode 100644 index 000000000..e462bc3a6 --- /dev/null +++ b/dongtai_web/threshold/get_webhook_setting.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:sjh +# software: PyCharm +# project: webApi +# agent threshold setting +import time + +from django.forms import model_to_dict +from dongtai_common.endpoint import UserEndPoint, R + +from dongtai_common.models.agent_webhook_setting import IastAgentUploadTypeUrl + +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer + +_ResponseSerializer = get_response_serializer(status_msg_keypair=( + ((201, _('Get success')), ''), + ((202, _('Incomplete parameter, please try again later')), '') +)) + + +class GetAgentWebHookConfig(UserEndPoint): + name = "api-v1-agent-webHook-config-get" + description = _("config Agent") + + @extend_schema_with_envcheck( + tags=[_('WebHook')], + summary=_('WebHook threshold Config get'), + description=_("WebHook threshold list"), + response_schema=_ResponseSerializer) + def get(self, request): + user = request.user + configData = IastAgentUploadTypeUrl.objects.filter(user=user).order_by("-create_time") + data = [] + if configData: + for item in configData: + itemData = model_to_dict(item) + del itemData['user'] + data.append(itemData) + + return R.success(msg=_('Successfully'), data={"result": data}) diff --git a/dongtai_web/threshold/webhook_setting.py b/dongtai_web/threshold/webhook_setting.py new file mode 100644 index 000000000..25f67d722 --- /dev/null +++ b/dongtai_web/threshold/webhook_setting.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:sjh +# software: PyCharm +# project: webApi +# agent webHook setting +import time + +from dongtai_common.endpoint import UserEndPoint, R +from dongtai_common.models.agent_webhook_setting import IastAgentUploadTypeUrl +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from dongtai_web.serializers.agent_config import AgentWebHookSettingSerializer +from rest_framework.serializers import ValidationError + +_ResponseSerializer = get_response_serializer(status_msg_keypair=( + ((201, _('The setting is complete')), ''), + ((202, _('Incomplete parameter, please try again later')), '') +)) + + +class AgentWebHookConfig(UserEndPoint): + name = "api-v1-agent-webHook-config-setting" + description = _("config webHook Agent") + + def create_webHook_config(self, user, type_id, url, headers, id): + try: + setting = {} + if id is not None: + setting = IastAgentUploadTypeUrl.objects.filter(user=user, id=id).first() + if setting: + setting.type_id = type_id + setting.url = url + setting.headers = headers + else: + timestamp = int(time.time()) + setting = IastAgentUploadTypeUrl( + user=user, + type_id=type_id, + url=url, + headers=headers, + create_time=timestamp + ) + setting.save() + return setting + except Exception as e: + return None + + @extend_schema_with_envcheck( + tags=[_('Agent')], + summary=_('Agent webHook Config'), + description=_("Agent traffic reporting data forwarding address configuration"), + response_schema=_ResponseSerializer) + def post(self, request): + ser = AgentWebHookSettingSerializer(data=request.data) + user = request.user + if ser.is_valid(False): + id = ser.validated_data.get('id', None) + type_id = ser.validated_data.get('type_id', None) + headers = ser.validated_data.get('headers', {}) + url = ser.validated_data.get('url', "").strip() + else: + return R.failure(msg=_('Incomplete parameter, please check again')) + config = self.create_webHook_config(user, type_id, url, headers, id) + if config: + return R.success(msg=_('Config has been created successfully'),data={"id":config.id}) + else: + R.failure(msg=_('Failed to create config')) diff --git a/dongtai_web/threshold/webhook_type.py b/dongtai_web/threshold/webhook_type.py new file mode 100644 index 000000000..8bd54551a --- /dev/null +++ b/dongtai_web/threshold/webhook_type.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:sjh +# software: PyCharm +# project: webApi +# agent webHook setting +import time + +from dongtai_common.endpoint import UserEndPoint, R + +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +_ResponseSerializer = get_response_serializer(status_msg_keypair=( + ((201, _('The type is return')), ''), + ((202, _('Incomplete parameter, please try again later')), '') +)) + + +class AgentWebHookTypeList(UserEndPoint): + name = "api-v1-agent-webHook-type-list" + description = _("get webhook all type ") + + @extend_schema_with_envcheck( + tags=[_('WebHook')], + summary=_('Agent webHook type'), + description=_("type list of agent webHook"), + response_schema=_ResponseSerializer) + def get(self, request): + typeData = [ + { + "key": "错误日志", + "value": 81 + }, { + "key": "心跳", + "value": 1 + }, { + "key": "低风险漏洞", + "value": 33 + }, { + "key": "调用链", + "value": 36 + }, { + "key": "SCA", + "value": 17 + }, { + "key": "SCA批量", + "value": 18 + }, { + "key": "ApiSiteMap", + "value": 97 + }, { + "key": "硬编码", + "value": 37 + }, { + "key": "高频hook限流", + "value": 65 + }, { + "key": "高频请求限流", + "value": 66 + }, { + "key": "性能监控降级", + "value": 67 + }, { + "key": "异常降级", + "value": 68 + }, { + "key": "监控线程异常", + "value": 69 + }, { + "key": "触发二次降级", + "value": 70 + } + ] + + return R.success(msg=_('Get type list successfully'), data={"result": typeData}) diff --git a/dongtai_web/upload/masterimg/weblogic-01.png b/dongtai_web/upload/masterimg/weblogic-01.png new file mode 100644 index 000000000..1a1eae114 Binary files /dev/null and b/dongtai_web/upload/masterimg/weblogic-01.png differ diff --git a/dongtai_web/upload/masterimg/weblogic-02.png b/dongtai_web/upload/masterimg/weblogic-02.png new file mode 100644 index 000000000..6e68555ca Binary files /dev/null and b/dongtai_web/upload/masterimg/weblogic-02.png differ diff --git a/dongtai_web/upload/masterimg/weblogic-03.png b/dongtai_web/upload/masterimg/weblogic-03.png new file mode 100644 index 000000000..dcbf2719a Binary files /dev/null and b/dongtai_web/upload/masterimg/weblogic-03.png differ diff --git a/dongtai_web/upload/masterimg/weblogic-04.png b/dongtai_web/upload/masterimg/weblogic-04.png new file mode 100644 index 000000000..151cddaff Binary files /dev/null and b/dongtai_web/upload/masterimg/weblogic-04.png differ diff --git a/dongtai_web/upload/masterimg/websphere-01.png b/dongtai_web/upload/masterimg/websphere-01.png new file mode 100644 index 000000000..d479143bf Binary files /dev/null and b/dongtai_web/upload/masterimg/websphere-01.png differ diff --git a/dongtai_web/upload/masterimg/websphere-02.png b/dongtai_web/upload/masterimg/websphere-02.png new file mode 100644 index 000000000..fc7d32892 Binary files /dev/null and b/dongtai_web/upload/masterimg/websphere-02.png differ diff --git a/dongtai_web/upload/masterimg/websphere-03.png b/dongtai_web/upload/masterimg/websphere-03.png new file mode 100644 index 000000000..6ed7a6537 Binary files /dev/null and b/dongtai_web/upload/masterimg/websphere-03.png differ diff --git a/dongtai_web/upload/reports/report.example b/dongtai_web/upload/reports/report.example new file mode 100644 index 000000000..e69de29bb diff --git a/dongtai_web/urls.py b/dongtai_web/urls.py new file mode 100644 index 000000000..0afeb7351 --- /dev/null +++ b/dongtai_web/urls.py @@ -0,0 +1,399 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/5/21 15:55 +# software: PyCharm +# project: webapi +import os +from django.urls import path, include +from rest_framework.urlpatterns import format_suffix_patterns + +from dongtai_web.base.update_project_version import UpdateProjectVersion +from dongtai_web.threshold.del_threshold_setting import DelAgentThresholdConfig +from dongtai_web.threshold.del_webhook_setting import DelAgentWebHookConfig +from dongtai_web.threshold.get_config_setting_detail import GetAgentThresholdConfigDetail +from dongtai_web.views.agent_delete import AgentDeleteEndPoint +from dongtai_web.views.agent_deploy import AgentDeploy +from dongtai_web.views.agent_install import AgentInstall +from dongtai_web.views.agent_start import AgentStart +from dongtai_web.views.agent_status_update import AgentStatusUpdate +from dongtai_web.views.agents_delete import AgentsDeleteEndPoint +from dongtai_web.views.agent_stop import AgentStop +from dongtai_web.views.agent_uninstall import AgentUninstall +from dongtai_web.views.agent import Agent +from dongtai_web.views.agents import AgentList +from dongtai_web.views.agents_user import UserAgentList +from dongtai_web.views.agent_summary import AgentSummary +from dongtai_web.views.captcha_create import CaptchaCreate +from dongtai_web.views.documents import DocumentsEndpoint +from dongtai_web.views.engine_hook_rule_add import EngineHookRuleAddEndPoint +from dongtai_web.views.engine_hook_rule_modify import EngineHookRuleModifyEndPoint +from dongtai_web.views.engine_hook_rule_status import EngineHookRuleEnableEndPoint +from dongtai_web.views.engine_hook_rule_summary import EngineHookRuleSummaryEndPoint +from dongtai_web.views.engine_hook_rule_type_add import EngineHookRuleTypeAddEndPoint +from dongtai_web.views.engine_hook_rule_type_disable import EngineHookRuleTypeDisableEndPoint +from dongtai_web.views.engine_hook_rule_type_enable import EngineHookRuleTypeEnableEndPoint +from dongtai_web.views.engine_hook_rule_types import EngineHookRuleTypesEndPoint +from dongtai_web.views.engine_hook_rules import EngineHookRulesEndPoint +from dongtai_web.views.engine_method_pool_detail import MethodPoolDetailProxy +from dongtai_web.views.engine_method_pool_sca import EngineMethodPoolSca +from dongtai_web.views.engine_method_pool_search import MethodPoolSearchProxy +from dongtai_web.views.log_clear import LogClear +from dongtai_web.views.log_delete import LogDelete +from dongtai_web.views.log_export import LogExport +from dongtai_web.views.logs import LogsEndpoint +from dongtai_web.views.method_graph import MethodGraph +from dongtai_web.views.openapi import OpenApiEndpoint +from dongtai_web.views.profile import ProfileEndpoint, ProfileBatchGetEndpoint, ProfileBatchModifiedEndpoint +from dongtai_web.views.project_add import ProjectAdd +from dongtai_web.views.project_delete import ProjectDel +from dongtai_web.views.project_detail import ProjectDetail +from dongtai_web.views.project_engines import ProjectEngines +from dongtai_web.views.project_summary import ProjectSummary +from dongtai_web.views.project_search import ProjectSearch +from dongtai_web.views.project_version_add import ProjectVersionAdd +from dongtai_web.views.project_version_current import ProjectVersionCurrent +from dongtai_web.views.project_version_delete import ProjectVersionDelete +from dongtai_web.views.project_version_list import ProjectVersionList +from dongtai_web.views.project_version_update import ProjectVersionUpdate +from dongtai_web.views.projects import Projects + + +from dongtai_web.views.sca_details import ScaDetailView +from dongtai_web.views.sca_summary import ScaSummary +from dongtai_web.views.scas import ScaList +from dongtai_web.views.strategy_disable import StrategyDisableEndpoint +from dongtai_web.views.strategy_enable import StrategyEnableEndpoint +from dongtai_web.views.strategys import StrategysEndpoint, StrategyEndpoint +from dongtai_web.views.strategys_add import StrategyAdd +from dongtai_web.views.strategys_list import StrategyList +from dongtai_web.views.strategys_type import StrategyType +from dongtai_web.views.strategy_delete import StrategyDelete +from dongtai_web.views.strategy_modified import StrategyModified +from dongtai_web.views.system_info import SystemInfo +from dongtai_web.views.user_detail import UserDetailEndPoint +from dongtai_web.views.user_info import UserInfoEndpoint +from dongtai_web.views.user_login import UserLogin +from dongtai_web.views.user_logout import UserLogout +from dongtai_web.views.user_passwrd import UserPassword +from dongtai_web.views.user_passwrd_reset import UserPasswordReset +from dongtai_web.views.user_register_batch import UserRegisterEndPoint +from dongtai_web.views.user_token import UserToken +from dongtai_web.views.vul_count_for_plugin import VulCountForPluginEndPoint +from dongtai_web.views.vul_delete import VulDelete +from dongtai_web.views.vul_details import ( + VulDetail, + VulDetailV2, +) +from dongtai_web.views.vul_list_for_plugin import VulListEndPoint +from dongtai_web.views.vul_recheck import VulReCheck +from dongtai_web.views.vul_request_replay import RequestReplayEndPoint +from dongtai_web.views.vul_status import VulStatus +from dongtai_web.views.vul_summary import VulSummary +from dongtai_web.views.vul_summary_type import VulSummaryType +from dongtai_web.views.vul_summary_project import VulSummaryProject +from dongtai_web.views.vuls import VulsEndPoint +from dongtai_web.views.vulnerability_status import VulnerabilityStatusView +from dongtai_web.views.version_update import MethodPoolVersionUpdate +from dongtai_web.views.demo import Demo +from static.i18n.views.setlang import LanguageSetting +from dongtai_web.views.api_route_search import ApiRouteSearch +from dongtai_web.views.api_route_related_request import ApiRouteRelationRequest +from dongtai_web.views.api_route_cover_rate import ApiRouteCoverRate + +from dongtai_web.views.program_language import ProgrammingLanguageList +from dongtai_web.views.filereplace import FileReplace +from dongtai_web.views.messages_list import MessagesEndpoint +from dongtai_web.views.messages_new import MessagesNewEndpoint +from dongtai_web.views.messages_del import MessagesDelEndpoint +from dongtai_web.views.agent_alias_modified import AgentAliasModified +from dongtai_web.views.engine_method_pool_time_range import MethodPoolTimeRangeProxy + +from dongtai_web.views.vul_levels import VulLevelList +from dongtai_web.views.sensitive_info_rule import ( + SensitiveInfoRuleViewSet, + SensitiveInfoPatternTypeView, + SensitiveInfoPatternValidationView, + SensitiveInfoRuleBatchView, + SensitiveInfoRuleAllView, +) +from dongtai_web.views.scan_strategys import ( + ScanStrategyViewSet, + ScanStrategyRelationProject, + ScanStrategyAllView, +) +from dongtai_web.views.details_id import (AgentListWithid, ProjectListWithid, + ScaListWithid, VulsListWithid) +from dongtai_web.views.vul_recheck_v2 import VulReCheckv2 +from dongtai_web.threshold.config_setting import AgentThresholdConfig +from dongtai_web.threshold.webhook_setting import AgentWebHookConfig +from dongtai_web.threshold.get_webhook_setting import GetAgentWebHookConfig +from dongtai_web.threshold.webhook_type import AgentWebHookTypeList +from dongtai_web.threshold.get_config_setting import GetAgentThresholdConfig +from dongtai_web.views.log_download import AgentLogDownload + +from dongtai_web.threshold.agent_core_status import (AgentCoreStatusUpdate, + AgentCoreStatusUpdateALL) +from dongtai_web.aggregation.aggregation_del import DelVulMany + +from dongtai_web.threshold.config_setting import ( + AgentThresholdConfigV2, ) +from dongtai_web.vul_log.vul_log_view import VulLogViewSet +from dongtai_web.vul_recheck_payload.vul_recheck_payload import VulReCheckPayloadViewSet + +urlpatterns = [ + path('user/', UserDetailEndPoint.as_view()), + path('user/changePassword', UserPassword.as_view()), + path('user/login', UserLogin.as_view()), + path('user/logout', UserLogout.as_view()), + path('user/info', UserInfoEndpoint.as_view()), + path('user/token', UserToken.as_view()), + path('user/password/reset', UserPasswordReset.as_view()), + path('captcha/', include('captcha.urls')), + path(r'captcha/refresh', CaptchaCreate.as_view()), + path('project/', ProjectDetail.as_view()), + path('project/add', ProjectAdd.as_view()), + path('project/delete', ProjectDel.as_view()), + path('projects', Projects.as_view()), + path('projects/summary/', ProjectSummary.as_view()), + path('project/engines/', ProjectEngines.as_view()), + path('project/search', ProjectSearch.as_view()), + path('project/version/add', ProjectVersionAdd.as_view()), + path('project/version/update', ProjectVersionUpdate.as_view()), + path('project/version/delete', ProjectVersionDelete.as_view()), + path('project/version/current', ProjectVersionCurrent.as_view()), + path('project/version/list/', + ProjectVersionList.as_view()), + path('project/version/check', UpdateProjectVersion.as_view()), + path('vulns', VulsEndPoint.as_view()), + path('vuln/summary', VulSummary.as_view()), + path('vuln/summary_type', VulSummaryType.as_view()), + path('vuln/summary_project', VulSummaryProject.as_view()), + # path('vuln/list', VulSideBarList.as_view()), Departured + path('vuln/', VulDetail.as_view()), + path('vuln/status', VulStatus.as_view()), + path('vuln/delete/', VulDelete.as_view()), + path('vul/recheck', VulReCheck.as_view()), + path('vul/status_list', VulnerabilityStatusView.as_view()), + path('plugin/vuln/list', VulListEndPoint.as_view()), + path('plugin/vuln/count', VulCountForPluginEndPoint.as_view()), + path('scas', ScaList.as_view()), + path('sca/summary', ScaSummary.as_view()), + # path('sca/list', ScaSidebarList.as_view()), Departured + path('sca/', ScaDetailView.as_view()), + path('strategys', StrategysEndpoint.as_view()), + path('strategy/', StrategyEndpoint.as_view()), + path('strategy//enable', StrategyEnableEndpoint.as_view()), + path('strategy//disable', StrategyDisableEndpoint.as_view()), + path('strategy//delete', StrategyDelete.as_view()), + path('strategy//update', StrategyModified.as_view()), + path('strategy/types', StrategyType.as_view()), + path('strategy/user/add', StrategyAdd.as_view()), + path('strategy/user/list', StrategyList.as_view()), + path('agent/', Agent.as_view()), + path('agent/deploy/', AgentDeploy.as_view()), + # path('agent/deploy/doc', AgentDeployDesc.as_view()), Departured + # path('agent/deploy/info', AgentDeployInfo.as_view()), + # path('agent/deploy/submit', AgentDeploySave.as_view()), + path('agents', AgentList.as_view()), + path('agent//delete', AgentDeleteEndPoint.as_view()), + path('agents/user', UserAgentList.as_view()), + path('agent/install', AgentInstall.as_view()), + path('agent/uninstall', AgentUninstall.as_view()), + #path('agent/upgrade/online', AgentUpgradeOnline.as_view()), + # path('agent/upgrade/offline', AgentUpgradeOffline.as_view()), + path('agent/status/update', AgentStatusUpdate.as_view()), + path('agent/start', AgentStart.as_view()), + path('agent/stop', AgentStop.as_view()), + # path('agents/search', AgentSearch.as_view()), + path('agents/delete', AgentsDeleteEndPoint.as_view()), + path('agent/alias/modified', AgentAliasModified.as_view()), + path('openapi', OpenApiEndpoint.as_view()), + path('profile/', ProfileEndpoint.as_view()), + path('profile/batch/get', ProfileBatchGetEndpoint.as_view()), + path('profile/batch/modified', ProfileBatchModifiedEndpoint.as_view()), + path('system/info', SystemInfo.as_view()), + path('logs', LogsEndpoint.as_view()), + path('log/export', LogExport.as_view()), + path('log/delete', LogDelete.as_view()), + path('log/clear', LogClear.as_view()), + path('engine/method_pool/search', MethodPoolSearchProxy.as_view()), + path('engine/method_pool/detail', MethodPoolDetailProxy.as_view()), + path('engine/method_pool/timerange', MethodPoolTimeRangeProxy.as_view()), + path('engine/method_pool/sca', EngineMethodPoolSca.as_view()), + path('engine/graph', MethodGraph.as_view()), + path('engine/request/replay', RequestReplayEndPoint.as_view()), + path('engine/hook/rule/summary', EngineHookRuleSummaryEndPoint.as_view()), + path('engine/hook/rule/add', EngineHookRuleAddEndPoint.as_view()), + path('engine/hook/rule/modify', EngineHookRuleModifyEndPoint.as_view()), + path('engine/hook/rule/status', EngineHookRuleEnableEndPoint.as_view()), + path('engine/hook/rule_type/add', EngineHookRuleTypeAddEndPoint.as_view()), + path('engine/hook/rule_type/disable', + EngineHookRuleTypeDisableEndPoint.as_view()), + path('engine/hook/rule_type/enable', + EngineHookRuleTypeEnableEndPoint.as_view()), + path('engine/hook/rule_types', EngineHookRuleTypesEndPoint.as_view()), + path('engine/hook/rules', EngineHookRulesEndPoint.as_view()), + path('documents', DocumentsEndpoint.as_view()), + path('version_update/K23DiutPrwpoqAddqNbHUk', + MethodPoolVersionUpdate.as_view()), + path('i18n/setlang', LanguageSetting.as_view()), + path('api_route/search', ApiRouteSearch.as_view()), + path('api_route/relationrequest', ApiRouteRelationRequest.as_view()), + path('api_route/cover_rate', ApiRouteCoverRate.as_view()), + path('program_language', ProgrammingLanguageList.as_view()), + path('filereplace/', FileReplace.as_view()), + path('message/list', MessagesEndpoint.as_view()), + path('message/unread_count', MessagesNewEndpoint.as_view()), + path('message/delete', MessagesDelEndpoint.as_view()), + path('vul_levels', VulLevelList.as_view()), + # path('message/send', MessagesSendEndpoint.as_view()), + path('sensitive_info_rule', + SensitiveInfoRuleViewSet.as_view({ + 'get': 'list', + 'post': 'create' + })), + path( + 'sensitive_info_rule/', + SensitiveInfoRuleViewSet.as_view({ + 'get': 'retrieve', + 'put': 'update', + 'delete': 'destory' + })), + path('sensitive_info_rule/pattern_type', + SensitiveInfoPatternTypeView.as_view()), + path('sensitive_info_rule/_validation', + SensitiveInfoPatternValidationView.as_view()), + path('scan_strategy', + ScanStrategyViewSet.as_view({ + 'get': 'list', + 'post': 'create' + })), + path( + 'scan_strategy/', + ScanStrategyViewSet.as_view({ + 'get': 'retrieve', + 'put': 'update', + 'delete': 'destory' + })), + path('scan_strategy//relationprojects', + ScanStrategyRelationProject.as_view()), + path('sensitive_info_rule/batch_update', + SensitiveInfoRuleBatchView.as_view()), + path('sensitive_info_rule/all', SensitiveInfoRuleAllView.as_view()), + path('scan_strategy/all', ScanStrategyAllView.as_view()), + path('agent/list/ids', AgentListWithid.as_view()), + path('vul/list/ids', VulsListWithid.as_view()), + path('sca/list/ids', ScaListWithid.as_view()), + path('project/list/ids', ProjectListWithid.as_view()), + # user settings disaster recovery strategy + path('threshold/settings', AgentThresholdConfig.as_view()), + # get user settings disaster recovery strategy GetAgentThresholdConfig + path('threshold/settings/get', GetAgentThresholdConfig.as_view()), + path('threshold/settings/get/', + GetAgentThresholdConfigDetail.as_view()), + path('threshold/settings/del', DelAgentThresholdConfig.as_view()), + # user webhook setting agent static report forward + path('webhook/settings', AgentWebHookConfig.as_view()), + path('webhook/type/list', AgentWebHookTypeList.as_view()), + path('webhook/type/del', DelAgentWebHookConfig.as_view()), + + # get webHook setting + path('webhook/settings/get', GetAgentWebHookConfig.as_view()), + path('agent/core/update', AgentCoreStatusUpdate.as_view()), + path('agent/core/update/all', AgentCoreStatusUpdateALL.as_view()), + path('agent/summary/', AgentSummary.as_view()), + + # 消息通知规则配置 + path('agent/log/batch', + AgentLogDownload.as_view({'post': 'batch_task_add'})), + path('agent/log/', AgentLogDownload.as_view({'get': + 'get_single'})), + path('agent/log/batch/', + AgentLogDownload.as_view({'get': 'batch_log_download'})), + + # vul list page of sca and common vul + path('vul_list_delete', DelVulMany.as_view()), + path('circuit_config', + AgentThresholdConfigV2.as_view({ + "post": "create", + "get": "list" + })), + path('circuit_config/enum/all', + AgentThresholdConfigV2.as_view({"get": "enumall"})), + path('circuit_config//priority', + AgentThresholdConfigV2.as_view({"put": "change_priority"})), + path('circuit_config//reset', + AgentThresholdConfigV2.as_view({"put": "reset"})), + path('circuit_config/enum/', + AgentThresholdConfigV2.as_view({"get": "enum"})), + path( + 'circuit_config/', + AgentThresholdConfigV2.as_view({ + "put": "update", + "delete": "delete", + "get": "retrieve" + })), + path("vullog/", VulLogViewSet.as_view({"get": "list"})), + path( + 'vul_recheck_payload/', + VulReCheckPayloadViewSet.as_view({ + 'get': "retrieve", + 'put': 'update', + 'delete': 'delete' + })), + path('vul_recheck_payload', + VulReCheckPayloadViewSet.as_view({ + 'get': "list", + 'post': "create", + })), + path('vul_recheck_payload/status', + VulReCheckPayloadViewSet.as_view({ + 'put': "status_change", + })), +] +if os.getenv('environment', None) in ('TEST', 'PROD'): + # demo接口 + urlpatterns.extend([ + path('demo', Demo.as_view()), + path('user/register', UserRegisterEndPoint.as_view()), + path('user/register/', UserRegisterEndPoint.as_view()), + ]) +if os.getenv('githubcount', None) in ('true', ) or os.getenv('environment', None) in ('PROD',): + from dongtai_web.views.github_contributors import GithubContributorsView + urlpatterns.extend([ + path('github_contributors', GithubContributorsView.as_view()), + ]) +from dongtai_web.views.agents_v2 import AgentListv2 +from dongtai_web.aggr_vul.aggr_vul_list import GetAggregationVulList +from dongtai_web.aggr_vul.aggr_vul_summary import GetScaSummary +from dongtai_web.aggr_vul.app_vul_list import GetAppVulsList +from dongtai_web.aggr_vul.app_vul_summary import GetAppVulsSummary + +urlpatterns = [path('api/v1/', include(urlpatterns))] +urlpatterns.extend([ + path('api/v2/vul/recheck', VulReCheckv2.as_view()), + path('api/v2/vuln/', VulDetailV2.as_view()), + path('api/v2/agents', AgentListv2.as_view({"get": "pagenation_list"})), + path('api/v2/agents/summary', AgentListv2.as_view({"get": "summary"})), + path('api/v2/agents/stat', AgentListv2.as_view({"get": "agent_stat"})), + # 组件漏洞 列表 + path('api/v2/sca_vul_list_content', GetAggregationVulList.as_view()), + # 组件漏洞 汇总 + path('api/v2/sca_vul_summary', GetScaSummary.as_view()), + path('api/v2/app_vul_list_content', GetAppVulsList.as_view()), + path('api/v2/app_vul_summary', GetAppVulsSummary.as_view()), +]) +from dongtai_web.scaupload.urls import urlpatterns as scaupload_urls +from dongtai_web.apitimelog.urls import urlpatterns as apitimelog_urls +from dongtai_web.dongtai_sca.urls import urlpatterns as sca_urls +from dongtai_web.versioncontrol.urls import urlpatterns as versioncontrol_urls +from dongtai_web.systemmonitor.urls import urlpatterns as systemmonitor_urls + +urlpatterns.extend(scaupload_urls) +urlpatterns.extend(apitimelog_urls) +urlpatterns.extend(sca_urls) +urlpatterns.extend(versioncontrol_urls) +urlpatterns.extend(systemmonitor_urls) + +urlpatterns = format_suffix_patterns(urlpatterns) diff --git a/dongtai_web/utils.py b/dongtai_web/utils.py new file mode 100644 index 000000000..9ac295964 --- /dev/null +++ b/dongtai_web/utils.py @@ -0,0 +1,287 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:Bidaya0 +# datetime:2021/7/27 12:06 +# software: Vim8 +# project: webapi + +from dongtai_common.models.profile import IastProfile +from requests.exceptions import ConnectionError, ConnectTimeout +import logging +import json +import requests +from urllib.parse import urlparse +import uuid +from rest_framework import serializers +from django.utils.translation import gettext_lazy as _ +from django.utils.text import format_lazy +from django.utils.translation import get_language +from rest_framework.serializers import SerializerMetaclass +from functools import reduce +from django.db.models import Q +import operator +import hashlib +from dongtai_common.models.api_route import IastApiRoute, IastApiMethod, IastApiRoute, HttpMethod, IastApiResponse, IastApiMethodHttpMethodRelation +from dongtai_common.models.agent_method_pool import MethodPool +from rest_framework.serializers import Serializer +from dongtai_conf.settings import OPENAPI + + +def get_model_field(model, exclude=[], include=[]): + fields = [field.name for field in model._meta.fields] + if include: + return [ + field for field in list(set(fields) - set(exclude)) + if field in include + ] + return list(set(fields) - set(exclude)) + + +def get_model_order_options(*args, **kwargs): + order_fields = get_model_field(*args, **kwargs) + return order_fields + list(map(lambda x: ''.join(['-', x]), order_fields)) + + +def assemble_query(condictions: dict, + lookuptype='', + base_query=Q(), + operator_=operator.or_): + return reduce( + operator_, + map( + lambda x: Q(**x), + map( + lambda kv_pair: { + '__'.join(filter(lambda x: x, [kv_pair[0], lookuptype])): + kv_pair[1] + }, condictions)), base_query) + + +def assemble_query_2(condictions: dict, + lookuptype='', + base_query=Q(), + operator_=operator.or_): + return reduce( + operator_, + map( + lambda x: ~Q(**x), + map( + lambda kv_pair: { + '__'.join(filter(lambda x: x, [kv_pair[0], lookuptype])): + kv_pair[1] + }, condictions)), base_query) + + +def extend_schema_with_envcheck(querys: list = [], + request_bodys: list = [], + response_bodys: list = [], + response_schema=None, + **kwargs): + def myextend_schema(func): + import os + if os.getenv( + 'environment', + None) in ( + 'TEST', + 'DOC') or os.getenv( + 'DOC', + None) == 'TRUE': + from drf_spectacular.utils import extend_schema, OpenApiParameter, OpenApiExample, OpenApiTypes + from drf_spectacular.utils import OpenApiResponse + parameters = list(filter(lambda x: x, map(_filter_query, querys))) + request_examples = list( + filter(lambda x: x, map(_filter_request_body, request_bodys))) + response_examples = list( + filter(lambda x: x, map(_filter_response_body, + response_bodys))) + examples = request_examples + response_examples + if kwargs.get('request', None) and request_examples: + kwargs['request'] = {'application/json': OpenApiTypes.OBJECT} + elif isinstance(kwargs.get('request', None), + SerializerMetaclass): + kwargs['request'] = {'application/json': kwargs['request']} + elif kwargs.get('request', None): + kwargs['request'] = {'application/json': kwargs['request']} + deco = extend_schema( + parameters=parameters, + examples=examples if examples else None, + responses={ + 200: OpenApiResponse( + description=_('The http status codes are both 200, please use the status and msg field returned by the response data to troubleshoot'), + response=response_schema)}, + **kwargs) + funcw = deco(func) + funcw.querys = querys + funcw.request_body = request_bodys if request_bodys else [] + return funcw + return func + + return myextend_schema + + +def get_response_serializer(data_serializer=None, + msg_list=None, + status_msg_keypair=None): + status_msg_keypair = ( + ((201, 'success'), + 'success'), ) if status_msg_keypair is None else status_msg_keypair + msg_list = list( + set(map(lambda x: x[1], map(lambda x: x[0], status_msg_keypair)))) + status_list = list( + set(map(lambda x: x[0], map(lambda x: x[0], status_msg_keypair)))) + msg_list = ['success'] if msg_list is None else msg_list + status_list = [201] if status_list is None else status_list + newclass = type( + str(uuid.uuid1()), (serializers.Serializer, ), { + 'status': + serializers.IntegerField(default=201, + help_text=format_lazy( + "{} :" + "{} ; " * len(status_list), + *([_("status code")] + status_list))), + 'msg': + serializers.CharField( + default='success', + help_text=format_lazy( + "{} :" + "{} ; " * len(msg_list), + *([_("human readable message")] + msg_list))), + 'data': + data_serializer + }) + return newclass + + +def _filter_query(item): + from drf_spectacular.utils import OpenApiParameter + if isinstance(item, SerializerMetaclass): + return item + elif isinstance(item, dict): + return OpenApiParameter(**item) + + +def _filter_request_body(item): + from drf_spectacular.utils import OpenApiExample + if isinstance(item, dict): + item['request_only'] = True + return OpenApiExample(**item) + + +def _filter_response_body(item): + from drf_spectacular.utils import OpenApiExample + if isinstance(item, dict): + item['response_only'] = True + return OpenApiExample(**item) + + +def _map_response_description(item): + """ + struct like {(1,2):'3'} + """ + key, value = item + return "{} : {} : {}".format(key[0], key[1], value) + + +def _reduce_response_description(itema, itemb): + return "{} \n{} ".format(itema, itemb) + + +def batch_queryset(queryset, batch_size=1): + iter_ = 0 + while True: + queryset_ = list(queryset[iter_:iter_ + 1]) + iter_ += 1 + if not queryset_: + break + else: + yield queryset_[0] + + +def checkcover(api_route, agents, http_method=None): + uri_hash = hashlib.sha1(api_route.path.encode('utf-8')).hexdigest() + api_method_id = api_route.method_id + q = Q(agent_id__in=[_['id'] for _ in agents]) + if http_method: + http_method_ids = IastApiMethodHttpMethodRelation.objects.filter( + api_method_id=api_method_id).values('api_method_id') + http_methods = HttpMethod.objects.filter( + pk__in=http_method_ids).all().values_list('method') + q = q & Q(http_method__in=http_methods) + q = q & Q(uri_sha1=uri_hash) + if MethodPool.objects.filter(q).exists(): + return True + return False + + +def checkcover_batch(api_route, agents): + uri_hash = [hashlib.sha1(api_route.path.encode('utf-8')).hexdigest() + for api_route in api_route.only('path')] + cover_count = MethodPool.objects.filter( + uri_sha1__in=uri_hash, + agent__in=agents).values('uri_sha1').distinct().count() + return cover_count + + +def apiroute_cachekey(api_route, agents, http_method=None): + agent_id = sha1(str([_['id'] for _ in agents])) + http_method = str(http_method) + return "{}_{}_{}".format(agent_id, http_method, api_route.id) + + +def sha1(string, encoding='utf-8'): + return hashlib.sha1(string.encode(encoding)).hexdigest() + + +def get_openapi(): + profilefromdb = IastProfile.objects.filter(key='apiserver').values_list( + 'value', flat=True).first() + profilefromini = OPENAPI + profiles = list( + filter(lambda x: x is not None, [profilefromini, profilefromdb])) + if profiles == []: + return None + return profiles[0] + + +def validate_url(url): + try: + result = urlparse(url) + return all([result.scheme, result.netloc]) + except BaseException: + return False + return True + + +logger = logging.getLogger('dongtai-dongtai_conf') + + +def checkopenapistatus(openapiurl, token): + try: + resp = requests.get( + openapiurl, + timeout=10, + headers={'Authorization': "Token {}".format(token)}) + resp = json.loads(resp.content) + resp = resp.get("data", None) + except (ConnectionError, ConnectTimeout): + return False, None + except Exception as e: + logger.info("HealthView_{}:{}".format(openapiurl, e)) + return False, None + return True, resp + + +METHOD_OVERRIDE_HEADER = 'HTTP_X_HTTP_METHOD_OVERRIDE' + + +class MethodOverrideMiddleware: + + def __init__(self, get_response): + self.get_response = get_response + + def __call__(self, request): + if request.method == 'POST' and METHOD_OVERRIDE_HEADER in request.META: + request.method = request.META[METHOD_OVERRIDE_HEADER] + return self.get_response(request) + + +def dict_transfrom(dic: dict, key: str): + return {i[key]: i for i in dic} diff --git a/dongtai_web/versioncontrol/__init__.py b/dongtai_web/versioncontrol/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/dongtai_web/versioncontrol/admin.py b/dongtai_web/versioncontrol/admin.py new file mode 100644 index 000000000..8c38f3f3d --- /dev/null +++ b/dongtai_web/versioncontrol/admin.py @@ -0,0 +1,3 @@ +from django.contrib import admin + +# Register your models here. diff --git a/dongtai_web/versioncontrol/apps.py b/dongtai_web/versioncontrol/apps.py new file mode 100644 index 000000000..b69a81420 --- /dev/null +++ b/dongtai_web/versioncontrol/apps.py @@ -0,0 +1,5 @@ +from django.apps import AppConfig + + +class VersioncontrolConfig(AppConfig): + name = 'versioncontrol' diff --git a/dongtai_web/versioncontrol/models.py b/dongtai_web/versioncontrol/models.py new file mode 100644 index 000000000..71a836239 --- /dev/null +++ b/dongtai_web/versioncontrol/models.py @@ -0,0 +1,3 @@ +from django.db import models + +# Create your models here. diff --git a/dongtai_web/versioncontrol/tests.py b/dongtai_web/versioncontrol/tests.py new file mode 100644 index 000000000..7ce503c2d --- /dev/null +++ b/dongtai_web/versioncontrol/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase + +# Create your tests here. diff --git a/dongtai_web/versioncontrol/urls.py b/dongtai_web/versioncontrol/urls.py new file mode 100644 index 000000000..ca8384990 --- /dev/null +++ b/dongtai_web/versioncontrol/urls.py @@ -0,0 +1,26 @@ +"""webapi URL Configuration + +The `urlpatterns` list routes URLs to views. For more information please see: + https://docs.djangoproject.com/en/3.0/topics/http/urls/ +Examples: +Function views + 1. Add an import: from my_app import views + 2. Add a URL to urlpatterns: path('', views.home, name='home') +Class-based views + 1. Add an import: from other_app.views import Home + 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') +Including another URLconf + 1. Import the include() function: from django.urls import include, path + 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) +""" +from django.conf.urls.static import static +from django.urls import include, path +import os +from dongtai_web.versioncontrol.views import VersionListView + +urlpatterns = [ + path('versionlist', VersionListView.as_view()), +] + + +urlpatterns = [path('api/v1/version_control/', include(urlpatterns))] diff --git a/dongtai_web/versioncontrol/views.py b/dongtai_web/versioncontrol/views.py new file mode 100644 index 000000000..1bb946082 --- /dev/null +++ b/dongtai_web/versioncontrol/views.py @@ -0,0 +1,27 @@ +from django.shortcuts import render +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.models.version_control import VersionControl +import json +# Create your views here. + +COMPONENT_LIST = ("DongTai", "DongTai-agent-java", "DongTai-agent-python", + "DongTai-engine", "DongTai-openapi", "DongTai-webapi") + + +class VersionListView(UserEndPoint): + def get(self, request): + component_datas = VersionControl.objects.filter( + component_name__in=COMPONENT_LIST).all() + data = {} + for component_data in component_datas: + data[component_data.component_name] = { + "version": component_data.version, + "commit_hash": component_data.component_version_hash + } + if not data[component_data.component_name]['commit_hash']: + del data[component_data.component_name]['commit_hash'] + if component_data.additional: + additional_data = json.loads(component_data.additional) + data[component_data.component_name].update(additional_data) + return R.success(data=data) diff --git a/dongtai_web/views/__init__.py b/dongtai_web/views/__init__.py new file mode 100644 index 000000000..3485d0176 --- /dev/null +++ b/dongtai_web/views/__init__.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python +#-*- coding:utf-8 -*- +# author:shengnanwu +# datetime:2020/5/21 15:50 +# software: PyCharm +# project: webapi + + +AGENT_STATUS = { + 0: { + "key": "无下发指令", + "value": "notcmd", + }, + 2: { + "key": "注册启动引擎", + "value": "coreRegisterStart", + }, + 3: { + "key": "开启引擎核心", + "value": "coreStart", + }, + 4: { + "key": "关闭引擎核心", + "value": "coreStop", + }, + 5: { + "key": "卸载引擎核心", + "value": "coreUninstall", + }, + 6: { + "key": "强制开启引擎核心性能熔断", + "value": "corePerformanceForceOpen", + }, + 7: { + "key": "强制关闭引擎核心性能熔断", + "value": "corePerformanceForceClose", + }, + 8: { + "key": "Agent升级", + "value": "update", + }, +} diff --git a/dongtai_web/views/agent.py b/dongtai_web/views/agent.py new file mode 100644 index 000000000..1d6e8c847 --- /dev/null +++ b/dongtai_web/views/agent.py @@ -0,0 +1,87 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad + +# software: PyCharm +# project: lingzhi-webapi +import logging + +from dongtai_common.endpoint import UserEndPoint, R + +from dongtai_common.utils import const +from dongtai_web.utils import get_model_field +from dongtai_common.models.agent import IastAgent +from django.forms.models import model_to_dict +from django.utils.translation import gettext_lazy as _ +from rest_framework import serializers + +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer + + +class _AgentResponseDataAgentSerializer(serializers.ModelSerializer): + token = serializers.CharField(help_text=_('The name of agent')) + id = serializers.CharField(help_text=_('The id of agent')) + version = serializers.CharField(help_text=_('The version of agent')) + latest_time = serializers.IntegerField( + help_text=_('The latest update time of agent')) + is_running = serializers.IntegerField( + help_text=_('The running status of agent')) + is_core_running = serializers.IntegerField( + help_text=_('The running status of agent')) + control = serializers.IntegerField( + help_text=_('agent control bit, 1-install, 2-uninstall, 0-no control')) + is_control = serializers.IntegerField( + help_text=_("Whether it is in control, 0-No, 1-Yes")) + bind_project_id = serializers.IntegerField( + help_text=_('Bundled project ID, if it exists, it will be bundled.'), + default=0) + project_name = serializers.CharField(help_text=_( + "Project name, used to start the agent first and then create the project" + )) + online = serializers.IntegerField(help_text=_( + "1 is running online, 0 is not running, same token, only one online")) + project_version_id = serializers.IntegerField(help_text=_( + "Bundled project version ID, if it exists, it will be bundled"), + default=0) + language = serializers.CharField( + help_text=_("Agent language currently included in the project")) + is_audit = serializers.IntegerField( + help_text=_("Agent audit status")) + + class Meta: + model = IastAgent + fields = [ + 'id', 'token', 'version', 'latest_time', 'is_running', + 'is_core_running', 'control', 'is_control', 'bind_project_id', + 'project_name', 'online', 'project_version_id', 'language', 'is_audit' + ] + + +class _AgentResponseDataSerializer(serializers.Serializer): + agent = _AgentResponseDataAgentSerializer() + + +_ResponseSerializer = get_response_serializer( + data_serializer=_AgentResponseDataSerializer(), ) + + +class Agent(UserEndPoint): + @extend_schema_with_envcheck( + tags=[_('Agent')], + summary=_('Agent Detail'), + description=_( + "Delete the specified project version according to the conditions." + ), + response_schema=_ResponseSerializer, + ) + def get(self, request, id_): + try: + a = int(id_) > 0 + if not a: + return R.failure(msg=_("Can't find relevant data")) + except BaseException: + return R.failure(msg=_("Can't find relevant data")) + agent = IastAgent.objects.filter(pk=id_).first() + if agent: + return R.success(data={'agent': model_to_dict(agent)}) + return R.failure(msg=_("Can't find relevant data")) diff --git a/dongtai_web/views/agent_alias_modified.py b/dongtai_web/views/agent_alias_modified.py new file mode 100644 index 000000000..ff7295e1d --- /dev/null +++ b/dongtai_web/views/agent_alias_modified.py @@ -0,0 +1,46 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : agent_alias_modified +# @created : 星期三 10月 20, 2021 11:20:20 CST +# +# @description : +###################################################################### + +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from dongtai_common.endpoint import UserEndPoint, R +from rest_framework import serializers +from dongtai_common.models.agent import IastAgent +from rest_framework.serializers import ValidationError +from django.utils.translation import gettext_lazy as _ + +class AgentAliasArgsSerializer(serializers.Serializer): + id = serializers.IntegerField( + help_text=_('The id corresponding to the agent.')) + alias = serializers.CharField( + help_text=_('The alias corresponding to the agent.')) + + +_ResponseSerializer = get_response_serializer(status_msg_keypair=( + ((201, _('modified successfully')), ''), + ((202, _('Agent does not exist or no permission to access')), ''), + ((202, _('Error while deleting, please try again later')), ''), +)) + +class AgentAliasModified(UserEndPoint): + @extend_schema_with_envcheck( + tags=[_('Agent')], + request=AgentAliasArgsSerializer, + summary=_('Agent Alias Modified'), + description=_("Modified the agent alias"), + response_schema=_ResponseSerializer, + ) + def post(self, request): + ser = AgentAliasArgsSerializer(data=request.data) + try: + if ser.is_valid(True): + id_ = ser.validated_data['id'] + alias = ser.validated_data['alias'] + except ValidationError as e: + return R.failure(data=e.detail) + IastAgent.objects.filter(pk=id_).update(alias=alias) + return R.success(msg=_("modified successfully")) diff --git a/dongtai_web/views/agent_delete.py b/dongtai_web/views/agent_delete.py new file mode 100644 index 000000000..104b6b221 --- /dev/null +++ b/dongtai_web/views/agent_delete.py @@ -0,0 +1,132 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad + +# software: PyCharm +# project: lingzhi-webapi + +import logging + +from dongtai_common.endpoint import UserEndPoint, R +from dongtai_common.models.asset import Asset +from dongtai_common.models.errorlog import IastErrorlog +from dongtai_common.models.heartbeat import IastHeartbeat +from dongtai_common.models.iast_overpower_user import IastOverpowerUserAuth +from dongtai_common.models.replay_method_pool import IastAgentMethodPoolReplay +from dongtai_common.models.replay_queue import IastReplayQueue +from dongtai_common.models.vulnerablity import IastVulnerabilityModel + +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.agent_method_pool import MethodPool +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer + +logger = logging.getLogger('dongtai-webapi') + +_ResponseSerializer = get_response_serializer(status_msg_keypair=( + ((201, _('Agent and related data deleted successfully')), ''), + ((202, _('Agent does not exist or no permission to access')), ''), + ((202, _('Error while deleting, please try again later')), ''), +)) + + +class AgentDeleteEndPoint(UserEndPoint): + name = "api-v1-agent--delete" + description = _("Delete Agent") + + @extend_schema_with_envcheck( + tags=[_('Agent')], + summary=_('Agent Delete'), + description=_( + "Delete the agent by specifying the id." + ), + response_schema=_ResponseSerializer) + def get(self, request, pk=None): + try: + user = request.user + queryset = IastAgent.objects.filter(user=user, id=pk).first() + if queryset: + self.agent = queryset + self.delete_error_log() + self.delete_heart_beat() + # self.delete_vul_overpower() + self.delete_sca() + self.delete_vul() + self.delete_method_pool() + self.delete_method_pool_replay() + self.delete_replay_queue() + self.agent.delete() + + return R.success(msg=_("Agent and related data deleted successfully")) + else: + return R.failure(msg=_("Agent does not exist or no permission to access")) + except Exception as e: + logger.error('user_id:{request.user.id} msg:{e}') + return R.failure(msg=_("Error while deleting, please try again later")) + + def delete_error_log(self): + try: + deleted, _rows_count = IastErrorlog.objects.filter(agent=self.agent).delete() + logger.error(_('Error logs deleted successfully, Deletion Amount: {}').format(deleted)) + except Exception as e: + logger.error(_('Failed to delete error logs, probe ID: {}, error message: {}').format(self.agent.id,e)) + + def delete_heart_beat(self): + try: + deleted, _rows_count = IastHeartbeat.objects.filter(agent=self.agent).delete() + logger.error(_('The replay request method pool data was successfully deleted, A total of {} replay requests are deleted').format(deleted)) + except Exception as e: + logger.error(_('Failed to delete heartbeat data, error message: {}').format(e)) + + def delete_vul_overpower(self): + try: + deleted, _rows_count = IastOverpowerUserAuth.objects.filter(agent=self.agent).delete() + logger.error(_('The replay request method pool data was successfully deleted, A total of {} replay requests are deleted').format(deleted)) + except Exception as e: + logger.error(_('Failed to delete unauthorized data, error message: {}').format(e)) + + def delete_vul(self): + try: + deleted, _rows_count = IastVulnerabilityModel.objects.filter(agent=self.agent).delete() + logger.error(_('The replay request method pool data was successfully deleted, A total of {} replay requests are deleted').format(deleted)) + except Exception as e: + logger.error(_('Failed to delete vulnerability data, error message: {}'.format(e))) + + def delete_sca(self): + try: + deleted, _rows_count = Asset.objects.filter(agent=self.agent).delete() + logger.error(_('The replay request method pool data was successfully deleted, A total of {} replay requests are deleted').format(deleted)) + except Exception as e: + logger.error(_('Failed to delete third-party component data, error message: {}').format(e)) + + def delete_method_pool(self): + try: + deleted, _rows_count = MethodPool.objects.filter(agent=self.agent).delete() + logger.error(_('The replay request method pool data was successfully deleted, A total of {} replay requests are deleted').format(deleted)) + except Exception as e: + logger.error(_('Failed to delete method pool data, error message: {}').format(e)) + + def delete_method_pool_replay(self): + try: + deleted, _rows_count = IastAgentMethodPoolReplay.objects.filter(agent=self.agent).delete() + logger.error(_('The replay request method pool data was successfully deleted, A total of {} replay requests are deleted').format(deleted)) + except Exception as e: + logger.error(_('Failed to delete replay request method pool data, error message: {}'.format(e))) + + def delete_replay_queue(self): + try: + deleted, _rows_count = IastReplayQueue.objects.filter(agent=self.agent).delete() + logger.error(_('Replay request queue deleted successfully, Deletion amount: {}').format(deleted)) + except Exception as e: + logger.error(_('Failed to delete replay request queue, error message: {}').format(e)) + + +if __name__ == '__main__': + + MethodPool.objects.count() + IastErrorlog.objects.count() + IastHeartbeat.objects.count() + IastOverpowerUserAuth.objects.count() + Asset.objects.count() + IastVulnerabilityModel.objects.count() + MethodPool.objects.count() diff --git a/dongtai_web/views/agent_deploy.py b/dongtai_web/views/agent_deploy.py new file mode 100644 index 000000000..299c5fe6e --- /dev/null +++ b/dongtai_web/views/agent_deploy.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:Bidaya0 +# datetime:2021/7/27 11:36 +# software: Vim8 +# project: webapi + +import time +from dongtai_common.endpoint import UserEndPoint, R +from dongtai_common.models.deploy import IastDeployDesc +from dongtai_common.models.system import IastSystem +from rest_framework.authtoken.models import Token +from dongtai_web.utils import get_model_field +from django.forms.models import model_to_dict +from django.utils.translation import gettext_lazy as _ +from rest_framework.serializers import ValidationError + +from rest_framework import serializers +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer + + + +class AgentDeployArgsSerializer(serializers.Serializer): + middleware = serializers.CharField(required=False) + language = serializers.CharField(required=False) + + +_ResponseSerializer = get_response_serializer( + status_msg_keypair=(((201, _("Corresponding deployment document could not be found")), ''), )) + +class AgentDeploy(UserEndPoint): + @extend_schema_with_envcheck([AgentDeployArgsSerializer], + tags=[_('Documents')], + summary=_('Document of Agent Deploy'), + description=_("Document of Agent Deploy"), + response_schema=_ResponseSerializer) + def get(self, request): + ser = AgentDeployArgsSerializer(data=request.GET) + try: + ser.is_valid(True) + except ValidationError as e: + return R.failure(data=e.detail) + desc = IastDeployDesc.objects.filter(**ser.validated_data).first() + if desc: + return R.success(data=model_to_dict(desc)) + return R.failure( + msg=_("Corresponding deployment document could not be found")) diff --git a/dongtai_web/views/agent_deploy_doc.py b/dongtai_web/views/agent_deploy_doc.py new file mode 100644 index 000000000..51f0ed18c --- /dev/null +++ b/dongtai_web/views/agent_deploy_doc.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/6/3 11:36 +# software: PyCharm +# project: webapi +from dongtai_common.endpoint import UserEndPoint, R +from dongtai_common.models.deploy import IastDeployDesc +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck + +class AgentDeployDesc(UserEndPoint): + name = "api-v1-iast-deploy-desc" + description = _("Agent deployment document") + + @extend_schema_with_envcheck([{ + 'name': 'os', + 'type': str + }, { + 'name': 'server', + 'type': str + }]) + def get(self, request): + queryset = IastDeployDesc.objects.all() + + os = request.query_params.get('os', 'linux') + if os: + queryset = queryset.filter(os=os) + + middle = request.query_params.get('server', 'tomcat') + if middle: + queryset = queryset.filter(middleware=middle) + + queryset = queryset.last() + if queryset: + return R.success(msg=queryset.desc) + else: + return R.failure(msg=_('No data')) diff --git a/dongtai_web/views/agent_deploy_info.py b/dongtai_web/views/agent_deploy_info.py new file mode 100644 index 000000000..2ae231885 --- /dev/null +++ b/dongtai_web/views/agent_deploy_info.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/6/3 11:36 +# software: PyCharm +# project: webapi +from dongtai_common.endpoint import UserEndPoint, R +from dongtai_common.models.deploy import IastDeployDesc +from django.utils.translation import gettext_lazy as _ + + +class AgentDeployInfo(UserEndPoint): + name = "api-v1-iast-deploy-info" + description = _("Agent deployment document") + + def get(self, request): + condition = { + "agents": ["Java", ".Net Core", "C#"], + "java_version": ["Java 1.6", "Java 1.7", "Java 1.8", "Java 9", "Java 10", "Java 11", "Java 13", "Java 14", + "Java 15", "Java 16"], + "middlewares": [], + "system": ["windows", "linux"] + } + queryset = IastDeployDesc.objects.all() + for item in queryset: + if item.middleware not in condition['middlewares']: + condition['middlewares'].append(item.middleware) + return R.success(data=condition) diff --git a/dongtai_web/views/agent_deploy_submit.py b/dongtai_web/views/agent_deploy_submit.py new file mode 100644 index 000000000..34f96a883 --- /dev/null +++ b/dongtai_web/views/agent_deploy_submit.py @@ -0,0 +1,84 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/6/3 11:36 +# software: PyCharm +# project: webapi + +import time +from dongtai_common.endpoint import UserEndPoint, R +from dongtai_common.models.deploy import IastDeployDesc +from dongtai_common.models.system import IastSystem +from rest_framework.authtoken.models import Token +from django.utils.translation import gettext_lazy as _ + + +class AgentDeploySave(UserEndPoint): + name = "api-v1-iast-deploy-submit" + description = _("Uploading Agent configuration") + + def get(self, request): + end = { + "status": 201, + "msg": "success", + "user_token": "", + "desc": "", + "data": {} + } + systemInfo = IastSystem.objects.filter(id__gt=0).order_by("-id").first() + if not systemInfo: + step = 1 + else: + token, success = Token.objects.get_or_create(user=request.user) + end['user_token'] = token.key + if systemInfo.system: + step = 3 + desInfo = IastDeployDesc.objects.filter(middleware=systemInfo.middleware, os=systemInfo.system).first() + if desInfo: + end['desc'] = desInfo.desc + else: + step = 2 + end['data'] = { + "agent_value": systemInfo.agent_value, + "java_version": systemInfo.java_version, + "middleware": systemInfo.middleware, + "system": systemInfo.system, + } + end['step'] = step + return R.success(step=step, data=end['data'], user_token=end['user_token'], desc=end['desc']) + + + def post(self, request): + user = request.user + token, success = Token.objects.get_or_create(user=user) + agent_value = request.data.get("agent_value", 0) + java_version = request.data.get("java_version", 0) + middleware = request.data.get("middleware", 0) + system = request.data.get("system", 0) + result = { + "user_token": "", + "status": 201, + "msg": "success", + "desc": "" + } + systemInfo = IastSystem.objects.filter(id__gt=0).order_by("-id").first() + if not systemInfo: + systemInfo = IastSystem.objects.create() + if agent_value: + systemInfo.deploy_status = 1 + systemInfo.agent_value = agent_value + if java_version: + systemInfo.java_version = java_version + if middleware: + systemInfo.middleware = middleware + if system: + systemInfo.system = system + systemInfo.deploy_status = 2 + result['user_token'] = token.key + desInfo = IastDeployDesc.objects.filter(middleware=systemInfo.middleware, os=systemInfo.system).first() + if desInfo: + result['desc'] = desInfo.desc + systemInfo.user = user + systemInfo.update_at = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + systemInfo.save() + return R.success(user_token=result['user_token'], desc=result['desc']) diff --git a/dongtai_web/views/agent_download.py b/dongtai_web/views/agent_download.py new file mode 100644 index 000000000..7a16726ad --- /dev/null +++ b/dongtai_web/views/agent_download.py @@ -0,0 +1,81 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# project: lingzhi-webapi +import logging +import os + +import requests +from django.http import FileResponse +from dongtai_common.endpoint import UserEndPoint, R +from rest_framework.authtoken.models import Token +from django.utils.translation import gettext_lazy as _ +from dongtai_common.models.profile import IastProfile +from dongtai_web.utils import get_openapi +from requests.exceptions import ConnectionError + +logger = logging.getLogger('dongtai-webapi') + + +class AgentDownload(UserEndPoint): + name = "download_iast_agent" + description = _("Downloading DongTai Agent") + + def __init__(self): + super().__init__() + self.common_info = { + "java": { + "extension": "jar", + "filename": "agent.jar" + }, + "python": { + "extension": "tar.gz", + "filename": "dongtai-agent-python.tar.gz" + }, + "php": { + "extension": "tar.gz", + "filename": "php-agent.tar.gz" + }, + "go": { + "extension": ".yaml", + "filename": "dongtai-go-agent-config.yaml" + } + } + + def res_by_language(self, language, token, resp): + temp_filename = f'temp/dongtai-agent-{language}-{token["key"]}.{self.common_info[language]["extension"]}' + with open(temp_filename, 'wb') as f: + f.write(resp.content) + response = FileResponse(open(temp_filename, 'rb')) + response['content_type'] = 'application/octet-stream' + + response['Content-Disposition'] = "attachment; filename={}".format(self.common_info[language]['filename']) + os.remove(temp_filename) + return response + + def get(self, request): + """ + :param request: + :return: + """ + base_url = request.query_params.get('url', 'https://www.huoxian.cn') + language = request.query_params.get('language', 'java') + project_name = request.query_params.get('projectName', 'Demo Project') + token, success = Token.objects.values('key').get_or_create(user=request.user) + AGENT_SERVER_PROXY={'HOST':''} + AGENT_SERVER_PROXY['HOST'] = get_openapi() + try: + resp = requests.get( + url=f'{AGENT_SERVER_PROXY["HOST"]}/api/v1/agent/download?url={base_url}&language={language}&projectName={project_name}', + headers={ + 'Authorization': f'Token {token["key"]}' + }) + except ConnectionError as e: + return R.failure(msg='conncet error,please check config.ini') + except Exception as e: + logger.error(e) + return R.failure(msg='download error,please check deployment') + + response = self.res_by_language(language, token, resp) + + return response diff --git a/dongtai_web/views/agent_install.py b/dongtai_web/views/agent_install.py new file mode 100644 index 000000000..8469ae594 --- /dev/null +++ b/dongtai_web/views/agent_install.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +import time + +from dongtai_common.endpoint import UserEndPoint, R +from dongtai_common.models.agent import IastAgent +from django.utils.translation import gettext_lazy as _ +from rest_framework import serializers +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from dongtai_web.serializers.agent import AgentInstallArgsSerializer + + +_ResponseSerializer = get_response_serializer(status_msg_keypair=( + ((201, _('The installation is complete')), ''), + ((202, _('The engine is being installed or uninstalled, please try again later')), ''), + ((202, _('Engine does not exist or no permission to access')), + ''), +)) + + +class AgentInstall(UserEndPoint): + name = "api-v1-agent-install" + description = _("Installing an Agent") + + @extend_schema_with_envcheck( + request=AgentInstallArgsSerializer, + tags=[_('Agent')], + summary=_('Agent Install'), + description=_("Install the running agent by specifying the id."), + response_schema=_ResponseSerializer) + def post(self, request): + agent_id = request.data.get('id') + agent = IastAgent.objects.filter(user=request.user, id=agent_id).first() + if agent: + if agent.control != 1 and agent.is_control == 0: + agent.control = 1 + agent.is_control = 1 + agent.latest_time = int(time.time()) + agent.save(update_fields=['latest_time', 'control', 'is_control']) + return R.success(msg=_('The installation is complete')) + else: + return R.failure(msg=_('The engine is being installed or uninstalled, please try again later')) + else: + return R.failure(msg=_('Engine does not exist or no permission to access')) diff --git a/dongtai_web/views/agent_search.py b/dongtai_web/views/agent_search.py new file mode 100644 index 000000000..a689ae021 --- /dev/null +++ b/dongtai_web/views/agent_search.py @@ -0,0 +1,83 @@ +from functools import reduce + +from django.core.paginator import Paginator +from django.db.models import Q +from dongtai_common.endpoint import R, AnonymousAndUserEndPoint +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.heartbeat import IastHeartbeat +from dongtai_common.models.server import IastServer +from rest_framework import serializers +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from django.forms.models import model_to_dict + +from dongtai_web.utils import get_model_field + +class _AgentSearchQuerysSerializer(serializers.Serializer): + page_size = serializers.IntegerField(default=20, + help_text=_('Number per page')) + page = serializers.IntegerField(default=1, help_text=_('Page index')) + token = serializers.CharField(help_text=_('The name of agent')) + project_name = serializers.CharField(help_text=_( + "Project name, used to start the agent first and then create the project" + )) + + +_ResponseSerializer = get_response_serializer( + status_msg_keypair=(((201, _('Suspending ...')), ''), )) + +class AgentSearch(AnonymousAndUserEndPoint): + @extend_schema_with_envcheck( + [_AgentSearchQuerysSerializer], + tags=[_('Agent')], + summary=_('Agent Search'), + description=_( + "Search for the agent corresponding to the user according to the following parameters" + ), + response_schema=_ResponseSerializer, + ) + def get(self, request): + page_size = int(request.query_params.get('page_size', 10)) + page = int(request.query_params.get('page', 1)) + fields = get_model_field( + IastAgent, + include=['token', 'project_name'], + ) + searchfields = dict( + filter(lambda k: k[0] in fields, request.query_params.items())) + searchfields_ = {k: v for k, v in searchfields.items() if k in fields} + q = reduce( + lambda x, y: x | y, + map( + lambda x: Q(**x), + map( + lambda kv_pair: + {'__'.join([kv_pair[0], 'icontains']): kv_pair[1]}, + searchfields_.items())), Q()) + agents = self.get_auth_and_anonymous_agents(request.user) + q = q & Q(id__in=[_['id'] for _ in agents]) + queryset = IastAgent.objects.filter(q).order_by('-latest_time').all() + summary, agents = self.get_paginator(queryset, page, page_size) + servers = IastServer.objects.filter(pk__in=[_['server_id'] for _ in agents]).all().values() + heartbeats = IastHeartbeat.objects.filter(agent_id__in=[_['id'] for _ in agents]).all().values() + servers = {_['id']: _ for _ in servers} + heartbeats = {_['agent_id']: _ for _ in heartbeats} + relations = [] + for agent in agents: + item = {} + item['agent_id'] = agent['id'] + server = servers.get(agent['server_id'], None) + if server: + for k, v in server.items(): + item['_'.join(['server', k])] = v + heartbeat = heartbeats.get(agent['id'], None) + if heartbeat: + for k, v in heartbeat.items(): + item['_'.join(['heartbeat', k])] = v + relations.append(item) + return R.success( + data={ + 'agents': [model_to_dict(agent)for agent in agents], + 'summary': summary, + 'relations': relations, + }) diff --git a/dongtai_web/views/agent_start.py b/dongtai_web/views/agent_start.py new file mode 100644 index 000000000..b7b3e9fea --- /dev/null +++ b/dongtai_web/views/agent_start.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +import time +from dongtai_common.endpoint import UserEndPoint, R + +from dongtai_common.models.agent import IastAgent +from django.utils.translation import gettext_lazy as _ +from rest_framework import serializers +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from dongtai_web.serializers.agent import AgentToggleArgsSerializer +class _AgentStopBodyArgsSerializer(serializers.Serializer): + id = serializers.IntegerField(help_text=_( + 'The id corresponding to the agent.')) + ids = serializers.CharField(help_text=_( + 'The id corresponding to the agent, use"," for segmentation.')) + + +_ResponseSerializer = get_response_serializer( + status_msg_keypair=(((201, _('Suspending ...')), ''), )) + +class AgentStart(UserEndPoint): + name = "api-v1-agent-start" + description = _("Start Agent") + + @extend_schema_with_envcheck( + request=AgentToggleArgsSerializer, + tags=[_('Agent')], + summary=_('Agent Start'), + description=_( + "Start the stopped agent by specifying the id." + ), + response_schema=_ResponseSerializer) + def post(self, request): + agent_id = request.data.get('id') + agent_ids = request.data.get('ids', None) + if agent_ids: + try: + agent_ids = [int(i) for i in agent_ids.split(',')] + except BaseException: + return R.failure(_("Parameter error")) + if agent_id: + agent = IastAgent.objects.filter(user=request.user, id=agent_id).first() + if agent is None: + return R.failure(msg=_('Engine does not exist or no permission to access')) + if agent.is_control == 1 and agent.control != 3 and agent.control != 4: + return R.failure(msg=_('Agent is stopping service, please try again later')) + agent.control = 3 + agent.is_control = 1 + agent.latest_time = int(time.time()) + agent.save(update_fields=['latest_time', 'control', 'is_control']) + if agent_ids: + for agent_id in agent_ids: + agent = IastAgent.objects.filter(user=request.user, id=agent_id).first() + if agent is None: + continue + if agent.is_control == 1 and agent.control != 3 and agent.control != 4: + continue + agent.control = 3 + agent.is_control = 1 + agent.latest_time = int(time.time()) + agent.save(update_fields=['latest_time', 'control', 'is_control']) + return R.success(msg=_('Starting…')) diff --git a/dongtai_web/views/agent_status_update.py b/dongtai_web/views/agent_status_update.py new file mode 100644 index 000000000..93cd06684 --- /dev/null +++ b/dongtai_web/views/agent_status_update.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# project: dongtai-webapi +import time + +from django.db.models import Q +from dongtai_common.endpoint import UserEndPoint, R + +from dongtai_common.utils import const +from dongtai_common.models.agent import IastAgent +from django.utils.translation import gettext_lazy as _ +from rest_framework import serializers +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer + +_ResponseSerializer = get_response_serializer( + status_msg_keypair=(((201, _("Engine status was updated successfully.")), + ''), )) + + +class AgentStatusUpdate(UserEndPoint): + def get(self, request): + timestamp = int(time.time()) + queryset = IastAgent.objects.filter(user=request.user) + no_heart_beat_queryset = queryset.filter((Q(server=None) & Q(latest_time__lt=(timestamp - 600))), + online=const.RUNNING) + no_heart_beat_queryset.update(online=0) + + heart_beat_queryset = queryset.filter(server__update_time__lt=(timestamp - 600), online=const.RUNNING) + heart_beat_queryset.update(online=0) + + return R.success(msg=_('Engine status was updated successfully.')) diff --git a/dongtai_web/views/agent_stop.py b/dongtai_web/views/agent_stop.py new file mode 100644 index 000000000..c0495e419 --- /dev/null +++ b/dongtai_web/views/agent_stop.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +import time +from dongtai_common.endpoint import UserEndPoint, R + +from dongtai_common.models.agent import IastAgent +from django.utils.translation import gettext_lazy as _ +from rest_framework import serializers +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from dongtai_web.serializers.agent import AgentToggleArgsSerializer + + + +_ResponseSerializer = get_response_serializer( + status_msg_keypair=(((201, _('Suspending ...')), ''), )) + +class AgentStop(UserEndPoint): + name = "api-v1-agent-stop" + description = _("Suspend Agent") + + @extend_schema_with_envcheck( + request=AgentToggleArgsSerializer, + tags=[_('Agent')], + summary=_('Agent Stop'), + description=_( + "Stop the running agent by specifying the id." + ), + response_schema=_ResponseSerializer) + def post(self, request): + agent_id = request.data.get('id', None) + agent_ids = request.data.get('ids', None) + if agent_ids: + try: + agent_ids = [int(i) for i in agent_ids.split(',')] + except BaseException: + return R.failure(_("Parameter error")) + if agent_id: + agent = IastAgent.objects.filter(user=request.user, + id=agent_id).first() + if agent is None: + return R.failure(msg=_('Engine does not exist or no permission to access')) + if agent.is_control == 1 and agent.control != 3 and agent.control != 4: + return R.failure(msg=_('Agent is stopping service, please try again later')) + agent.control = 4 + agent.is_control = 1 + agent.latest_time = int(time.time()) + agent.save(update_fields=['latest_time', 'control', 'is_control']) + if agent_ids: + for agent_id in agent_ids: + agent = IastAgent.objects.filter(user=request.user, + id=agent_id).first() + if agent is None: + continue + if agent.is_control == 1 and agent.control != 3 and agent.control != 4: + continue + agent.control = 4 + agent.is_control = 1 + agent.latest_time = int(time.time()) + agent.save( + update_fields=['latest_time', 'control', 'is_control']) + + return R.success(msg=_('Suspending ...')) diff --git a/dongtai_web/views/agent_summary.py b/dongtai_web/views/agent_summary.py new file mode 100644 index 000000000..7aadcc91d --- /dev/null +++ b/dongtai_web/views/agent_summary.py @@ -0,0 +1,33 @@ +from dongtai_common.endpoint import (UserEndPoint, R) +from django.utils.translation import gettext_lazy as _ +from dongtai_common.models.agent import IastAgent +from dongtai_web.views.utils.commonstats import get_summary_by_agent_ids +from dongtai_common.models.project_version import IastProjectVersion + +class AgentSummary(UserEndPoint): + name = "api-v1-agent-summary-" + description = _("Item details - Summary") + + def get(self, request, pk): + try: + pk = int(pk) + except Exception as e: + return R.failure() + agent = IastAgent.objects.filter(pk=pk).only( + 'server__ip', 'server__container', 'bind_project_id', + 'language', 'token').first() + if not agent: + return R.failure() + project_version = IastProjectVersion.objects.filter( + project_id=agent.bind_project_id, + current_version=1).only('project__name', 'version_name').first() + data = get_summary_by_agent_ids([agent.id]) + data['ip'] = agent.server.ip + data['middleware'] = agent.server.container + data[ + 'project_name'] = project_version.project.name if project_version else '' + data[ + 'version_name'] = project_version.version_name if project_version else '' + data['token'] = agent.token + data['language'] = agent.language + return R.success(data=data) diff --git a/dongtai_web/views/agent_uninstall.py b/dongtai_web/views/agent_uninstall.py new file mode 100644 index 000000000..c0c9dbd85 --- /dev/null +++ b/dongtai_web/views/agent_uninstall.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +import time + +from dongtai_common.endpoint import UserEndPoint, R +from dongtai_common.models.agent import IastAgent +from django.utils.translation import gettext_lazy as _ +from dongtai_web.serializers.agent import AgentInstallArgsSerializer +from rest_framework import serializers +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer + +_ResponseSerializer = get_response_serializer(status_msg_keypair=( + ((201, _('Uninstalling ...')), ''), + ((202, _('The engine is being installed or uninstalled, please try again later')), ''), + ((202, _('Engine does not exist or no permission to access')), + ''), +)) + +class AgentUninstall(UserEndPoint): + name = "api-v1-agent-uninstall" + description = _("Uninstall Agent") + + @extend_schema_with_envcheck( + request=AgentInstallArgsSerializer, + tags=[_('Agent')], + summary=_('Agent Uninstall'), + description=_("Uninstall the running agent by specifying the id."), + response_schema=_ResponseSerializer) + def post(self, request): + agent_id = request.data.get('id') + agent = IastAgent.objects.filter(user=request.user, id=agent_id).first() + if agent: + if agent.control != 2 and agent.is_control == 0: + agent.control = 2 + agent.is_control = 1 + agent.latest_time = int(time.time()) + agent.save(update_fields=['latest_time', 'control', 'is_control']) + return R.success(msg=_('Uninstalling ...')) + else: + return R.failure(msg=_('Agent is being installed or uninstalled, please try again later')) + else: + return R.failure(msg=_('Engine does not exist or no permission to access')) diff --git a/dongtai_web/views/agent_upgrade_offline.py b/dongtai_web/views/agent_upgrade_offline.py new file mode 100644 index 000000000..fe96f3483 --- /dev/null +++ b/dongtai_web/views/agent_upgrade_offline.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +from dongtai_common.endpoint import TalentAdminEndPoint, R +from django.utils.translation import gettext_lazy as _ + + +class AgentUpgradeOffline(TalentAdminEndPoint): + name = "api-v1-agent-offline-upgrade" + description = _("Offline Upgrade Agent") + + def post(self, request): + file = request.FILES['file'] + status, filename = AgentUpgradeOffline.check_file(file.name) + if status: + AgentUpgradeOffline.handle_uploaded_file(filename, file) + return R.success(msg=_('Upload successful')) + return R.failure(msg=_('{} files not supported').format(filename)) + @staticmethod + def handle_uploaded_file(filename, file): + with open(f'iast/upload/iast-package/{filename}', 'wb+') as destination: + for chunk in file.chunks(): + destination.write(chunk) + + @staticmethod + def check_file(filename): + if filename in ['iast-agent.jar', 'iast-core.jar', 'iast-inject.jar']: + return True, filename + return False, filename diff --git a/dongtai_web/views/agent_upgrade_online.py b/dongtai_web/views/agent_upgrade_online.py new file mode 100644 index 000000000..2c8ef7c11 --- /dev/null +++ b/dongtai_web/views/agent_upgrade_online.py @@ -0,0 +1,71 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +from urllib.parse import urljoin + +import requests +from dongtai_common.endpoint import TalentAdminEndPoint, R +from dongtai_common.models import User +from django.utils.translation import gettext_lazy as _ + +from rest_framework import serializers +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer + + +class AgentUpgradeArgsSerializer(serializers.Serializer): + url = serializers.CharField( + help_text=_('The resource link corresponding to the Agent.')) + token = serializers.CharField(help_text=_( + 'The Token corresponding to the user is the same as when connecting to openapi.' + )) + + +_ResponseSerializer = get_response_serializer(status_msg_keypair=( + ((201, _('Online upgrade successful')), ''), + ((202, + _('Token verification failed, please confirm your input address and token are correct' + )), ''), +)) + + +class AgentUpgradeOnline(TalentAdminEndPoint): + name = "api-v1-agent-install" + description = _("Online Upgrade Agent") + + + + @extend_schema_with_envcheck(request=AgentUpgradeArgsSerializer, + tags=[_('Agent')], + summary=_('Agent Upgrade Online'), + description=_("Agent upgrade"), + response_schema=_ResponseSerializer) + def post(self, request): + url = request.data['url'] + token = request.data['token'] + try: + self.download(url, token) + User.objects.filter(id=request.user.id).update(upgrade_url=url) + return R.success(msg=_('Online upgrade successful')) + except Exception as e: + return R.failure(msg=_('Token verification failed, please confirm your input address and token are correct')) + + def token_verify(self, url, token): + req_url = urljoin(url, 'token/verify') + resp = requests.get(req_url, headers={'Authorization': f'Token {token}'}) + return (resp.status_code == 200 and resp.json()['status'] == 201) + + def download(self, url, token): + headers = {'Authorization': f'Token {token}'} + resp = requests.get(url=urljoin(url, "iast-agent.jar"), headers=headers) + with open("iast/upload/iast-package/iast-agent.jar", 'wb') as f: + f.write(resp.content) + + resp = requests.get(url=urljoin(url, "iast-inject.jar"), headers=headers) + with open("iast/upload/iast-package/iast-inject.jar", 'wb') as f: + f.write(resp.content) + + resp = requests.get(url=urljoin(url, "iast-core.jar"), headers=headers) + with open("iast/upload/iast-package/iast-core.jar", 'wb') as f: + f.write(resp.content) diff --git a/dongtai_web/views/agents.py b/dongtai_web/views/agents.py new file mode 100644 index 000000000..a23956583 --- /dev/null +++ b/dongtai_web/views/agents.py @@ -0,0 +1,224 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +import logging +from django.db.models import Prefetch + +from dongtai_common.endpoint import UserEndPoint, R +from django.forms.models import model_to_dict +from dongtai_common.utils import const +from dongtai_web.serializers.agent import AgentSerializer +from dongtai_web.utils import get_model_field +from dongtai_common.models.agent import IastAgent +from collections import defaultdict +from functools import reduce +from django.db.models import Q +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from dongtai_web.base.paginator import ListPageMaker +from django.core.cache import cache +logger = logging.getLogger('dongtai-webapi') + +_ResponseSerializer = get_response_serializer( + data_serializer=AgentSerializer(many=True), ) + + +class AgentList(UserEndPoint): + name = "api-v1-agents" + description = _("Agent list") + SERVER_MAP = dict() + @extend_schema_with_envcheck( + [ + { + 'name': "page", + 'type': int, + 'default': 1, + 'required': False, + }, + { + 'name': "pageSize", + 'type': int, + 'default': 1, + 'required': False, + }, + { + 'name': "state", + 'type': int, + 'default': 1, + 'required': False, + }, + { + 'name': "token", + 'type': str, + 'required': False, + }, + { + 'name': "project_name", + 'type': str, + 'required': False, + }, + ], + tags=[_('Agent')], + summary=_('Agent List'), + description=_( + "Get a list containing Agent information according to conditions." + ), + response_schema=_ResponseSerializer, + ) + def get_running_status(self, obj): + mapping = defaultdict(str) + mapping.update({1: _("Online"), 0: _("Offline")}) + return mapping[obj.online] + + def get_server(self, obj): + def get_server_addr(): + if obj.server_id not in self.SERVER_MAP: + if obj.server.ip and obj.server.port and obj.server.port != 0: + self.SERVER_MAP[ + obj.server_id] = f'{obj.server.ip}:{obj.server.port}' + else: + return _('No flow is detected by the probe') + return self.SERVER_MAP[obj.server_id] + + if obj.server_id: + return get_server_addr() + return _('No flow is detected by the probe') + + def make_key(self, request): + self.cache_key = f"{request.user.id}_total_agent_id" + self.cache_key_max_id = f"{request.user.id}_max_agent_id" + + def get_query_cache(self): + total = cache.get(self.cache_key) + max_id = cache.get(self.cache_key_max_id) + return total, max_id + + def set_query_cache(self, q): + total = IastAgent.objects.filter(q).count() + if total > 0: + max_id = IastAgent.objects.filter(q).values_list('id', flat=True).order_by('-id')[0] + else: + max_id = 0 + cache.set(self.cache_key, total, 60 * 60) + cache.set(self.cache_key_max_id, max_id, 60 * 60) + return total, max_id + + def parse_args(self, request): + page = int(request.query_params.get('page', 1)) + page_size = int(request.query_params.get('pageSize', 20)) + page_size = page_size if page_size < 50 else 50 + return page, page_size, request.user + + def get(self, request): + try: + page = int(request.query_params.get('page', 1)) + page_size = int(request.query_params.get('pageSize', 20)) + running_state = request.query_params.get('state', None) + if running_state is not None: + running_state = int(running_state) + project_id = request.query_params.get('project_id', None) + if project_id: + project_id = int(project_id) + + fields = get_model_field( + IastAgent, + include=['token', 'project_name'], + ) + searchfields = dict( + filter(lambda k: k[0] in fields, request.query_params.items())) + searchfields_ = {k: v for k, v in searchfields.items() if k in fields} + q = reduce( + lambda x, y: x | y, + map( + lambda x: Q(**x), + map( + lambda kv_pair: + {'__'.join([kv_pair[0], 'icontains']): kv_pair[1]}, + searchfields_.items())), Q()) + if running_state is not None: + q = q & Q(online=running_state) + # return self.is_superuser == 2 or self.is_superuser == 1 + if request.user.is_superuser == 1: + pass + elif request.user.is_superuser == 2: + q = q & Q(user__in=self.get_auth_users(request.user)) + else: + q = q & Q(user_id=request.user.id) + if project_id: + q = q & Q(bind_project_id=project_id) + + self.make_key(request) + if page == 1: + total, max_id = self.set_query_cache(q) + else: + total, max_id = self.get_query_cache() + if not total or not max_id: + total, max_id = self.set_query_cache(q) + + if page > 1: + before_id = page * page_size + q = q & Q(id__gt=before_id) + baseQuery = IastAgent.objects.filter(q) + cur_data = baseQuery.filter(id__lte=max_id).values_list('id', flat=True).order_by('-id')[(page - 1) * page_size: page * page_size] + cur_ids = [] + for item in cur_data: + cur_ids.append(item) + + queryset = IastAgent.objects.filter(id__in=cur_ids).order_by('-id').select_related("server","user").prefetch_related( + "heartbeats" + ) + end = [] + for item in queryset: + one = model_to_dict(item) + server_data = model_to_dict(item.server) + one['cluster_name'] = server_data.get("cluster_name", "") + one['cluster_version'] = server_data.get("cluster_version", "") + one['owner'] = item.user.username + one['server'] = self.get_server(item) + if not one.get("alias", ""): + one['alias'] = one['token'] + all = item.heartbeats.all() + if all: + one['report_queue'] = all[0].report_queue + one['method_queue'] = all[0].method_queue + one['replay_queue'] = all[0].replay_queue + one['system_load'] = all[0].cpu + one['flow'] = all[0].req_count + one['latest_time'] = all[0].dt + else: + one['report_queue'] = 0 + one['method_queue'] = 0 + one['replay_queue'] = 0 + one['system_load'] = _("Load data is not uploaded") + one['flow'] = 0 + del one['online'] + one['running_status'] = self.get_running_status(item) + end.append(one) + + # summery, queryset = self.get_paginator(queryset, page=page, page_size=page_size) + # data = AgentSerializer(queryset, many=True).data + # if not request.user.is_talent_admin(): + # data = list(map(lambda x:removestartup(x),data)) + page_info = { + "alltotal": total, + "num_pages": page, + "page_size": page_size + } + return R.success( + msg="success", + data=end, + page=page_info + ) + except ValueError as e: + logger.error(e,exc_info=True) + return R.failure(msg=_('Incorrect format parameter, please check again')) + except Exception as e: + logger.error(e,exc_info=True) + return R.failure(msg=_('Program error')) + + +def removestartup(dic): + del dic['startup_time'] + return dic diff --git a/dongtai_web/views/agents_delete.py b/dongtai_web/views/agents_delete.py new file mode 100644 index 000000000..bdc769e78 --- /dev/null +++ b/dongtai_web/views/agents_delete.py @@ -0,0 +1,149 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi + +import logging + +from dongtai_common.endpoint import UserEndPoint, R +from dongtai_common.models.asset import Asset +from dongtai_common.models.errorlog import IastErrorlog +from dongtai_common.models.heartbeat import IastHeartbeat +from dongtai_common.models.iast_overpower_user import IastOverpowerUserAuth +from dongtai_common.models.replay_method_pool import IastAgentMethodPoolReplay +from dongtai_common.models.replay_queue import IastReplayQueue +from dongtai_common.models.vulnerablity import IastVulnerabilityModel + +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.agent_method_pool import MethodPool +from django.utils.translation import gettext_lazy as _ +from rest_framework import serializers +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer + +logger = logging.getLogger('dongtai-webapi') +class AgentDeleteQuerysSerializer(serializers.Serializer): + ids = serializers.CharField(help_text=_( + 'The id corresponding to the agent, use"," for segmentation.')) + + +_ResponseSerializer = get_response_serializer(status_msg_keypair=( + ((201, _('Deleted Successfully')), ''), + ((201, _('Deletion failed')), ''), + ((201, _('Successfully deleted {} strips, failed to deleted {} strips')), + ''), +)) + + +class AgentsDeleteEndPoint(UserEndPoint): + name = "api-v1-agent--delete" + description = _("Delete Agent") + + @extend_schema_with_envcheck( + [AgentDeleteQuerysSerializer], + tags=[_('Agent')], + summary=_('Agent Delete batch'), + description=_("Stop the running agent by specifying the id."), + response_schema=_ResponseSerializer) + def get(self, request): + agent_ids = request.GET.get('ids') + try: + agent_ids = [int(i) for i in agent_ids.split(',')] + except Exception as e: + return R.failure('agent_ids should able to numeric') + result = [] + for pk in agent_ids: + try: + user = request.user + queryset = IastAgent.objects.filter(user=user, pk=pk).first() + if queryset: + self.agent = queryset + self.delete_error_log() + self.delete_heart_beat() + self.delete_sca() + self.delete_vul() + self.delete_method_pool() + self.delete_method_pool_replay() + self.delete_replay_queue() + self.agent.delete() + result.append(True) + else: + result.append(False) + pass + except Exception as e: + result.append(False) + logger.warning(f'user_id:{request.user.id} msg:{e}') + success = list(filter(lambda x: x is True, result)) + failure = list(filter(lambda x: x is False, result)) + if len(success) == len(agent_ids): + return R.success(msg=_('Deleted Successfully')) + if len(failure) == len(agent_ids): + return R.success(msg=_('Deletion failed')) + return R.success(msg=_('Successfully deleted {} strips, failed to deleted {} strips').format(len(success), len(failure))) + + def delete_error_log(self): + try: + deleted, _rows_count = IastErrorlog.objects.filter(agent=self.agent).delete() + logger.warning(_('Error logs deleted successfully, Deletion Amount: {}').format(deleted)) + except Exception as e: + logger.warning(_('Failed to delete error logs, probe ID: {}, error message: {}').format(self.agent.id,e)) + + def delete_heart_beat(self): + try: + deleted, _rows_count = IastHeartbeat.objects.filter(agent=self.agent).delete() + logger.warning(_('The replay request method pool data was successfully deleted, A total of {} replay requests are deleted').format(deleted)) + except Exception as e: + logger.warning(_('Failed to delete heartbeat data, error message: {}').format(e)) + + def delete_vul_overpower(self): + try: + deleted, _rows_count = IastOverpowerUserAuth.objects.filter(agent=self.agent).delete() + logger.warning(_('The replay request method pool data was successfully deleted, A total of {} replay requests are deleted').format(deleted)) + except Exception as e: + logger.warning(_('Failed to delete unauthorized data, error message: {}').format(e)) + + def delete_vul(self): + try: + deleted, _rows_count = IastVulnerabilityModel.objects.filter(agent=self.agent).delete() + logger.warning(_('The replay request method pool data was successfully deleted, A total of {} replay requests are deleted').format(deleted)) + except Exception as e: + logger.warning(_('Failed to delete vulnerability data, error message: {}').format(e)) + + def delete_sca(self): + try: + deleted, _rows_count = Asset.objects.filter(agent=self.agent).delete() + logger.warning(_('The replay request method pool data was successfully deleted, A total of {} replay requests are deleted').format(deleted)) + except Exception as e: + logger.warning(_('Failed to delete third-party component data, error message: {}').format(e)) + + def delete_method_pool(self): + try: + deleted, _rows_count = MethodPool.objects.filter(agent=self.agent).delete() + logger.warning(_('The replay request method pool data was successfully deleted, A total of {} replay requests are deleted').format(deleted)) + except Exception as e: + logger.warning(_('Failed to delete method pool data, error message: {}').format(e)) + + def delete_method_pool_replay(self): + try: + deleted, _rows_count = IastAgentMethodPoolReplay.objects.filter(agent=self.agent).delete() + logger.warning(_('The replay request method pool data was successfully deleted, A total of {} replay requests are deleted').format(deleted)) + except Exception as e: + logger.warning(_('Failed to delete replay request queue, error message: {}').format(e)) + + def delete_replay_queue(self): + try: + deleted, _rows_count = IastReplayQueue.objects.filter(agent=self.agent).delete() + logger.warning(_('Replay request queue deleted successfully, Deletion amount: {}').format(deleted)) + except Exception as e: + logger.warning(_('Failed to delete replay request queue, error message: {}').format(e)) + + +if __name__ == '__main__': + + MethodPool.objects.count() + IastErrorlog.objects.count() + IastHeartbeat.objects.count() + IastOverpowerUserAuth.objects.count() + Asset.objects.count() + IastVulnerabilityModel.objects.count() + MethodPool.objects.count() diff --git a/dongtai_web/views/agents_user.py b/dongtai_web/views/agents_user.py new file mode 100644 index 000000000..455759704 --- /dev/null +++ b/dongtai_web/views/agents_user.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +from dongtai_common.endpoint import UserEndPoint, R +from dongtai_common.models.agent import IastAgent +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from django.utils.translation import gettext_lazy as _ +from rest_framework import serializers + +class _UserAgentResponseDataSerializer(serializers.Serializer): + token = serializers.CharField(help_text=_('The name of agent')) + id = serializers.CharField(help_text=_('The id of agent')) + + +_AgentResponseSerializer = get_response_serializer( + data_serializer=_UserAgentResponseDataSerializer(many=True), ) + + +class UserAgentList(UserEndPoint): + @extend_schema_with_envcheck( + tags=[_('Agent')], + summary=_('Agent (with user)'), + description=_("Stop the running agent by specifying the id."), + response_schema=_AgentResponseSerializer) + def get(self, request): + user = request.user + if user.is_talent_admin(): + queryset = IastAgent.objects.all() + else: + queryset = IastAgent.objects.filter(user=user) + queryset_datas = queryset.values("id", "token") + data = [] + if queryset_datas: + for item in queryset_datas: + data.append({ + "id": item['id'], + "name": item['token'] + }) + return R.success(data=data) diff --git a/dongtai_web/views/agents_v2.py b/dongtai_web/views/agents_v2.py new file mode 100644 index 000000000..8268945f1 --- /dev/null +++ b/dongtai_web/views/agents_v2.py @@ -0,0 +1,183 @@ +import logging +from django.db.models import Prefetch + +from dongtai_common.endpoint import UserEndPoint, R +from django.forms.models import model_to_dict +from dongtai_common.utils import const +from dongtai_web.serializers.agent import AgentSerializer +from dongtai_web.utils import get_model_field +from dongtai_common.models.agent import IastAgent +from collections import defaultdict +from functools import reduce +from django.db.models import Q +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from dongtai_web.base.paginator import ListPageMaker +from django.core.cache import cache +from enum import IntEnum +from django.db.models.query import QuerySet +from rest_framework.viewsets import ViewSet +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.models.api_route import IastApiRoute, FromWhereChoices +from dongtai_common.models.asset import Asset +from dongtai_common.utils.user import get_auth_users__by_id +import json + +logger = logging.getLogger('dongtai-webapi') + + +class StateType(IntEnum): + ALL = 1 + RUNNING = 2 + STOP = 3 + UNINSTALL = 4 + + +class AgentListv2(UserEndPoint, ViewSet): + name = "api-v1-agents" + description = _("Agent list") + + def pagenation_list(self, request): + try: + page = int(request.query_params.get('page', 1)) + page_size = int(request.query_params.get('page_size', 20)) + state = StateType(int(request.query_params.get('state', 1))) + project_name = request.query_params.get('project_name', '') + project_id = int(request.query_params.get('project_id', 0)) + filter_condiction = generate_filter(state) & Q( + user__in=get_auth_users__by_id(request.user.id)) + if project_name: + filter_condiction = filter_condiction & Q( + bind_project__name__icontains=project_name) + if project_id: + filter_condiction = filter_condiction & Q( + bind_project_id=project_id) + page = page if page else 1 + page_size = page_size if page_size else 20 + + summary, queryset = self.get_paginator(query_agent(filter_condiction), + page, page_size) + queryset = list(queryset) + for agent in queryset: + agent['state'] = cal_state(agent) + agent['memory_rate'] = get_memory(agent['heartbeat__memory']) + agent['cpu_rate'] = get_cpu(agent['heartbeat__cpu']) + agent['disk_rate'] = get_disk(agent['heartbeat__disk']) + agent['is_control'] = get_is_control( + agent['actual_running_status'], + agent['except_running_status'], + agent['online'], + ) + data = {'agents': queryset, "summary": summary} + except Exception as e: + logger.error("agents pagenation_list error:{}".format(e)) + data = dict() + return R.success(data=data) + + def summary(self, request): + res = {} + for type_ in StateType: + res[type_] = IastAgent.objects.filter( + generate_filter(type_), + user__in=get_auth_users__by_id(request.user.id)).count() + return R.success(data=res) + + def agent_stat(self, request): + try: + agent_id = int(request.query_params.get('id', 0)) + res = get_agent_stat(agent_id, request.user.id) + except Exception as e: + logger.error("agent_stat error:{}".format(e)) + res = dict() + return R.success(data=res) + + +def get_agent_stat(agent_id: int, user_id: int) -> dict: + res = {} + res['api_count'] = IastApiRoute.objects.filter( + agent__id=agent_id, + from_where=FromWhereChoices.FROM_AGENT, + agent__user__in=get_auth_users__by_id(user_id)).count() + res['sca_count'] = Asset.objects.filter( + agent__id=agent_id, + agent__user__in=get_auth_users__by_id(user_id)).count() + res['vul_count'] = IastVulnerabilityModel.objects.filter( + agent__id=agent_id, + agent__user__in=get_auth_users__by_id(user_id)).count() + return res + + +def generate_filter(state: StateType) -> Q: + if state == StateType.ALL: + return Q() + elif state == StateType.RUNNING: + return Q(online=1) & Q(actual_running_status=1) + elif state == StateType.STOP: + return Q(online=1) & Q(actual_running_status=2) + elif state == StateType.UNINSTALL: + return Q(online=0) + return Q() + +def get_is_control(actual_running_status: int, except_running_status: int, + online: int) -> int: + if online and actual_running_status != except_running_status: + return 1 + return 0 + +def get_disk(jsonstr: str) -> str: + if not jsonstr: + return '' + dic = json.loads(jsonstr) + try: + dic = json.loads(jsonstr) + res = dic['info'][0]['rate'] + res.replace("%", '') + except Exception as e: + logger.debug(e, exc_info=True) + return '0' + return res + + +def get_cpu(jsonstr: str) -> str: + if not jsonstr: + return '' + try: + dic = json.loads(jsonstr) + res = dic['rate'] + except Exception as e: + logger.debug(e, exc_info=True) + return '0' + return res + + +def get_memory(jsonstr: str) -> str: + if not jsonstr: + return '' + try: + dic = json.loads(jsonstr) + res = dic['rate'] + except Exception as e: + logger.debug(e, exc_info=True) + return '0' + dic = json.loads(jsonstr) + return res + + +def cal_state(agent: dict) -> StateType: + if agent['online'] == 1 and agent['actual_running_status'] == 1: + return StateType.RUNNING + elif agent['online'] == 1 and agent['actual_running_status'] == 2: + return StateType.STOP + #elif agent['online'] == 0: + # return StateType.UNINSTALL + return StateType.UNINSTALL + + +def query_agent(filter_condiction=Q()) -> QuerySet: + return IastAgent.objects.filter(filter_condiction).values( + 'alias', 'token', 'bind_project__name', 'bind_project__user__username', + 'language', 'server__ip', 'server__port', 'server__path', + 'server__hostname', 'heartbeat__memory', 'heartbeat__cpu', + 'heartbeat__disk', 'register_time', 'is_core_running', 'is_control', + 'online', 'id', 'bind_project__id', 'version', 'except_running_status', + 'actual_running_status', 'state_status').order_by('-latest_time') diff --git a/dongtai_web/views/api_route_cover_rate.py b/dongtai_web/views/api_route_cover_rate.py new file mode 100644 index 000000000..49603f4b0 --- /dev/null +++ b/dongtai_web/views/api_route_cover_rate.py @@ -0,0 +1,71 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : api_route_cover_rate +# @created : Friday Aug 20, 2021 16:20:10 CST +# +# @description : +###################################################################### + +from dongtai_web.base.project_version import get_project_version, get_project_version_by_id +from dongtai_common.models.agent import IastAgent +from dongtai_common.endpoint import R, UserEndPoint +from django.db.models import Q +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import batch_queryset, checkcover_batch +from dongtai_web.utils import extend_schema_with_envcheck +from dongtai_common.models.api_route import IastApiRoute, FromWhereChoices +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from rest_framework import serializers + + +class ApiRouteCoverRateResponseSerializer(serializers.Serializer): + cover_rate = serializers.IntegerField( + help_text=_("The api cover_rate of the project"), ) + + +_GetResponseSerializer = get_response_serializer(ApiRouteCoverRateResponseSerializer()) + + +class ApiRouteCoverRate(UserEndPoint): + @extend_schema_with_envcheck( + [{ + 'name': 'project_id', + 'type': int + }, { + 'name': 'version_id', + 'type': int + }], + tags=[_('API Route')], + summary=_('API Route Coverrate'), + description=_( + "Get the API route coverrate of the project corresponding to the specified id." + ), + response_schema=_GetResponseSerializer, + + ) + def get(self, request): + project_id = request.query_params.get('project_id', None) + version_id = request.query_params.get('version_id', None) + auth_users = self.get_auth_users(request.user) + if not version_id: + current_project_version = get_project_version( + project_id, auth_users) + else: + current_project_version = get_project_version_by_id(version_id) + agents = IastAgent.objects.filter( + user__in=auth_users, + bind_project_id=project_id, + project_version_id=current_project_version.get("version_id", + 0)).values("id") + q = Q(agent__in=agents) + queryset = IastApiRoute.objects.filter(q) + total = queryset.count() + cover_count = checkcover_batch(queryset, agents) + try: + cover_rate = "{:.2%}".format(cover_count / total) + except ZeroDivisionError as e: + print(e) + cover_rate = "{:.2%}".format(1.0) + + return R.success(msg=_('API coverage rate obtained successfully'), + data={'cover_rate': cover_rate}) diff --git a/dongtai_web/views/api_route_related_request.py b/dongtai_web/views/api_route_related_request.py new file mode 100644 index 000000000..6167a5f73 --- /dev/null +++ b/dongtai_web/views/api_route_related_request.py @@ -0,0 +1,80 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : api_route_related_request +# @created : Saturday Aug 21, 2021 13:54:14 CST +# +# @description : +###################################################################### + +from dongtai_common.models.api_route import IastApiRoute, IastApiMethod, IastApiRoute, HttpMethod, IastApiResponse, IastApiMethodHttpMethodRelation, IastApiParameter +from dongtai_common.models.agent_method_pool import MethodPool +from dongtai_web.base.project_version import get_project_version, get_project_version_by_id +from dongtai_common.endpoint import R, UserEndPoint +from dongtai_common.models.agent import IastAgent +from django.utils.translation import gettext_lazy as _ +from django.db.models import Q +from django.forms.models import model_to_dict +from dongtai_web.utils import sha1 +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from rest_framework import serializers + +class ApiRouteCoverRelationSerializer(serializers.ModelSerializer): + class Meta: + model = MethodPool + fields = serializers.ALL_FIELDS + + +_GetResponseSerializer = get_response_serializer(ApiRouteCoverRelationSerializer()) + + +class ApiRouteRelationRequest(UserEndPoint): + @extend_schema_with_envcheck( + [{ + 'name': 'api_route_id', + 'type': int + }, { + 'name': 'project_id', + 'type': int + }, { + 'name': 'version_id', + 'type': int + }], + tags=[_('API Route')], + summary=_('API Route Relation Request'), + description= + _("Get the coverrate of the project corresponding to the specified id." + ), + response_schema=_GetResponseSerializer, + ) + def get(self, request): + try: + page_size = int(request.query_params.get('page_size', 1)) + page_index = int(request.query_params.get('page_index', 1)) + api_route_id = int(request.query_params.get('api_route_id', 1)) + api_route = IastApiRoute.objects.filter(pk=api_route_id).first() + if api_route is None: + return R.failure(msg=_("API not Fould")) + project_id = int(request.query_params.get('project_id', None)) + auth_users = self.get_auth_users(request.user) + version_id = int(request.query_params.get('version_id', None)) + except BaseException: + return R.failure(_("Parameter error")) + if project_id: + if not version_id: + current_project_version = get_project_version( + project_id, auth_users) + else: + current_project_version = get_project_version_by_id(version_id) + agents = IastAgent.objects.filter( + user__in=auth_users, + bind_project_id=project_id, + project_version_id=current_project_version.get( + "version_id", 0)).values("id") + q = Q() + q = q & Q(agent_id__in=[_['id'] for _ in agents]) if project_id else q + q = q & Q(uri_sha1=sha1(api_route.path)) + q = q & Q( + http_method__in=[_.method for _ in api_route.method.http_method.all()]) + method = MethodPool.objects.filter(q).order_by('-update_time')[0:1].values() + data = list(method)[0] if method else {} + return R.success(data=data) diff --git a/dongtai_web/views/api_route_search.py b/dongtai_web/views/api_route_search.py new file mode 100644 index 000000000..22051c456 --- /dev/null +++ b/dongtai_web/views/api_route_search.py @@ -0,0 +1,331 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : api_route_search +# @created : Wednesday Aug 18, 2021 14:31:17 CST +# +# @description : +###################################################################### + +from django.db.models import Q +from dongtai_common.endpoint import R, UserEndPoint +from dongtai_common.models.api_route import ( + IastApiRoute, + IastApiMethod, + IastApiRoute, + HttpMethod, + IastApiResponse, + IastApiMethodHttpMethodRelation, + IastApiParameter, + FromWhereChoices, +) +from dongtai_common.models.agent import IastAgent +from dongtai_web.base.project_version import get_project_version, get_project_version_by_id +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +import hashlib +from dongtai_common.models.agent_method_pool import MethodPool +from django.forms.models import model_to_dict +from dongtai_web.utils import checkcover, batch_queryset +from django.core.cache import caches +from functools import partial +from dongtai_common.models.hook_type import HookType +from rest_framework import serializers +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +import logging +from dongtai_common.models.strategy import IastStrategyModel + +logger = logging.getLogger('dongtai-webapi') + +class ApiRouteSearchRequestBodySerializer(serializers.Serializer): + page_size = serializers.IntegerField( + help_text=_("number per page"), + required=False, + default=1) + uri = serializers.CharField(help_text=_("The uri of the api route"), + required=False) + http_method = serializers.CharField( + help_text=_("The http method of the api route"), required=False) + project_id = serializers.IntegerField(help_text=_("The id of the project"), ) + version_id = serializers.IntegerField( + help_text=_("The version id of the project"), required=False) + exclude_ids = serializers.CharField(help_text=_( + "Exclude the api route entry with the following id, this field is used to obtain the data of the entire project in batches." + ), + required=False) + is_cover = serializers.ChoiceField( + (1, 0), + help_text= + _("Whether the api is covered by detection, that is, there is associated request data in the record." + ), + required=False, + ) + +class ApiRouteHttpMethodSerialier(serializers.Serializer): + httpmethod = serializers.CharField() + + +class ApiRouteMethodSerialier(serializers.Serializer): + apimethod = serializers.CharField( + help_text=_("The method bound to this API")) + httpmethods = ApiRouteHttpMethodSerialier( + help_text=_("The method bound to this API, in array form"), many=True) + + +class ApiRouteParameterSerialier(serializers.Serializer): + id = serializers.IntegerField(help_text=_("The id of api route")) + name = serializers.CharField(help_text=_("The name of api route")) + parameter_type = serializers.CharField( + help_text=_("The type of the parameter")) + parameter_type_shortcut = serializers.CharField(help_text=_( + "The shortcut of the parameter_type,e.g. java.lang.String -> String")) + annotaion = serializers.CharField( + help_text=_("The annotaion of the parameter")) + route = serializers.IntegerField(help_text=_("The route id of parameter")) + + +class ApiRouteResponseSerialier(serializers.Serializer): + id = serializers.IntegerField(help_text=_("The id of api response")) + return_type = serializers.CharField( + help_text=_("The return type of api route")) + route = serializers.IntegerField( + help_text=_("The route id of api response")) + return_type_shortcut = serializers.CharField( + help_text=_("The shortcut of return_type")) + + +class ApiRouteVulnerabitySerialier(serializers.Serializer): + level_id = serializers.IntegerField( + help_text=_("The vulnerablity level id ")) + hook_type_name = serializers.CharField( + help_text=_("The vulnerablity type name")) + + +class ApiRouteSearchResponseSerializer(serializers.Serializer): + id = serializers.IntegerField(help_text=_("The id of api route")) + path = serializers.CharField(help_text=_("The uri of api route")) + code_class = serializers.CharField(help_text=_("The class of api route")) + description = serializers.CharField( + help_text=_("The description of the api route")) + code_file = serializers.CharField( + help_text=_("The code file of the api route")) + controller = serializers.CharField( + help_text=_("The controller of the api route")) + agent = serializers.IntegerField( + help_text=_("The id of the agent reported the api route")) + is_cover = serializers.ChoiceField( + (1, 0), + help_text= + _("Whether the api is covered by detection, that is, there is associated request data in the record." + ), + required=False, + ) + responses = ApiRouteResponseSerialier(many=True) + parameters = ApiRouteParameterSerialier(many=True) + vulnerablities = ApiRouteVulnerabitySerialier(many=True) + method = ApiRouteMethodSerialier() + + +_GetResponseSerializer = get_response_serializer( + ApiRouteSearchResponseSerializer()) + + +class ApiRouteSearch(UserEndPoint): + @extend_schema_with_envcheck( + request=ApiRouteSearchRequestBodySerializer, + tags=[_('API Route')], + summary=_('API Route Search'), + description= + _("Get the API list corresponding to the project according to the following parameters. By default, there is no sorting. Please use the exclude_ids field for pagination." + ), + response_schema=_GetResponseSerializer, + ) + def post(self, request): + try: + page_size = int(request.data.get('page_size', 1)) + page_index = int(request.data.get('page_index', 1)) + uri = request.data.get('uri', None) + http_method = request.data.get('http_method', None) + project_id = request.data.get('project_id', None) + project_id = int(project_id) if project_id else None + version_id = request.data.get('version_id', None) + version_id = int(version_id) if version_id else None + exclude_id = request.data.get('exclude_ids', None) + exclude_id = [int(i) + for i in exclude_id.split(',')] if exclude_id else None + is_cover = request.data.get('is_cover', None) + is_cover_dict = {1: True, 0: False} + is_cover = is_cover_dict[int(is_cover)] if is_cover is not None and is_cover != '' else None + except Exception as e: + logger.error(e) + return R.failure(_("Parameter error")) + auth_users = self.get_auth_users(request.user) + + if http_method: + http_method_obj = HttpMethod.objects.filter(method=http_method.upper())[0:1] + if http_method_obj: + api_methods = IastApiMethod.objects.filter( + http_method__id=http_method_obj[0].id).all().values('id') + else: + api_methods = [] + else: + api_methods = [] + + if not version_id: + current_project_version = get_project_version( + project_id, auth_users) + else: + current_project_version = get_project_version_by_id(version_id) + agents = IastAgent.objects.filter( + user__in=auth_users, + bind_project_id=project_id, + project_version_id=current_project_version.get("version_id", + 0)).values("id") + q = Q(agent_id__in=[_['id'] for _ in agents]) + q = q & Q( + method_id__in=[_['id'] + for _ in api_methods]) if api_methods != [] else q + q = q & Q(path__icontains=uri) if uri else q + q = q & ~Q(pk__in=exclude_id) if exclude_id else q + api_routes = IastApiRoute.objects.filter(q).order_by('id').all() + distinct_fields = ["path", "method_id"] + distinct_exist_list = [] if not exclude_id else list( + set( + filter(lambda x: x != '', [ + distinct_key( + IastApiRoute.objects.filter(pk=i).values( + "path", "method_id").first(), distinct_fields) + for i in exclude_id + ]))) + _filter_and_label_partial = partial( + _filter_and_label, + distinct=True, + distinct_fields=distinct_fields, + distinct_exist_list=distinct_exist_list) + api_routes = _filter_and_label_partial( + api_routes, page_size, agents, http_method, + is_cover) if is_cover is not None else _filter_and_label_partial( + api_routes, page_size, agents, http_method) + return R.success( + data=[_serialize(api_route, agents) for api_route in api_routes]) + + +def _filter_and_label(api_routes, + limit, + agents, + http_method, + is_cover=None, + distinct=True, + distinct_fields=['path', 'method_id'], + distinct_exist_list=[]): + api_routes_after_filter = [] + distinct_exist_list = distinct_exist_list.copy() + for api_route in batch_queryset(api_routes): + distinct_key_ = distinct_key( + { + 'path': api_route.path, + 'method_id': api_route.method.id + }, distinct_fields) + if distinct_key_ in distinct_exist_list: + continue + else: + distinct_exist_list.append(distinct_key_) + api_route.is_cover = checkcover(api_route, agents, http_method) + if is_cover is not None: + api_routes_after_filter += [ + api_route + ] if api_route.is_cover == is_cover else [] + else: + api_routes_after_filter += [api_route] + if limit == len(api_routes_after_filter): + break + return api_routes_after_filter + + +def distinct_key(objects, fields): + if objects is None: + return '' + sequence = [objects.get(field, 'None') for field in fields] + sequence = [ + item if isinstance(item, str) else str(item) for item in sequence + ] + return '_'.join(sequence) + + +def _serialize(api_route, agents): + item = model_to_dict(api_route) + is_cover_dict = {1: True, 0: False} + is_cover_dict = _inverse_dict(is_cover_dict) + item['is_cover'] = is_cover_dict[api_route.is_cover] + item['parameters'] = _get_parameters(api_route) + item['responses'] = _get_responses(api_route) + item['method'] = _get_api_method(item['method']) + item['vulnerablities'] = _get_vuls(item['path'], agents) + return item + + +def serialize(api_route): + item = model_to_dict(api_route) + item['parameters'] = _get_parameters(api_route) + item['responses'] = _get_responses(api_route) + item['method'] = _get_api_method(item['method']) + return item + +def _get_vuls(uri, agents): + vuls = IastVulnerabilityModel.objects.filter( + uri=uri, agent_id__in=[_['id'] for _ in agents + ]).distinct().values('hook_type_id', + 'level_id', + 'strategy_id').all() + return [_get_hook_type(vul) for vul in vuls] + + +def _get_hook_type(vul): + + hook_type = HookType.objects.filter(pk=vul['hook_type_id']).first() + hook_type_name = hook_type.name if hook_type else None + strategy = IastStrategyModel.objects.filter(pk=vul['strategy_id']).first() + strategy_name = strategy.vul_name if strategy else None + type_ = list( + filter(lambda x: x is not None, [strategy_name, hook_type_name])) + type_name = type_[0] if type_ else '' + if hook_type: + return {'hook_type_name': type_name, 'level_id': vul['level_id']} + + +def _get_parameters(api_route): + parameters = IastApiParameter.objects.filter(route=api_route).all() + parameters = [model_to_dict(parameter) for parameter in parameters] + parameters = [_get_parameters_type(parameter) for parameter in parameters] + return parameters + + +def _get_parameters_type(api_route): + api_route['parameter_type_shortcut'] = api_route['parameter_type'].split( + '.')[-1] + return api_route + + +def _get_responses(api_route): + responses = IastApiResponse.objects.filter(route=api_route).all() + responses = [model_to_dict(response) for response in responses] + responses = [_get_responses_type(response) for response in responses] + return responses + + +def _get_responses_type(api_route): + api_route['return_type_shortcut'] = api_route['return_type'].split('.')[-1] + return api_route + + +def _get_api_method(api_method_id): + apimethod = IastApiMethod.objects.filter(pk=api_method_id).first() + if apimethod: + res = {} + res['apimethod'] = apimethod.method + res['httpmethods'] = [_.method for _ in apimethod.http_method.all()] + return res + return {} + + +def _inverse_dict(dic: dict) -> dict: + return {v: k for k, v in dic.items()} diff --git a/dongtai_web/views/captcha_create.py b/dongtai_web/views/captcha_create.py new file mode 100644 index 000000000..23601d823 --- /dev/null +++ b/dongtai_web/views/captcha_create.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +from captcha.helpers import captcha_image_url +from captcha.models import CaptchaStore +from dongtai_common.endpoint import R +from rest_framework.views import APIView + + +class CaptchaCreate(APIView): + def get(self, request): + hash_key = CaptchaStore.generate_key() + image_url = captcha_image_url(hash_key) + return R.success(data={'hash_key': hash_key, 'image_url': image_url}) diff --git a/dongtai_web/views/captcha_verify.py b/dongtai_web/views/captcha_verify.py new file mode 100644 index 000000000..4a263fef6 --- /dev/null +++ b/dongtai_web/views/captcha_verify.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +from captcha.models import CaptchaStore +from dongtai_common.endpoint import R +from rest_framework.views import APIView + + +class CaptchaVerify(APIView): + def get(self, request): + hash_key = request.query_params.get('key', None) + captcha = request.query_params.get('captcha', None) + status = 0 + if hash_key and captcha: + get_captcha = CaptchaStore.objects.get(hashkey=hash_key) + + if get_captcha.response == captcha.lower(): + status = 1 + return R.success(data={'status': status}) diff --git a/dongtai_web/views/demo.py b/dongtai_web/views/demo.py new file mode 100644 index 000000000..0f4d7ebe9 --- /dev/null +++ b/dongtai_web/views/demo.py @@ -0,0 +1,26 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : demo +# @created : Wednesday Aug 04, 2021 15:00:46 CST +# +# @description : +###################################################################### + +from dongtai_common.endpoint import R, UserEndPoint +from dongtai_common.models.user import User +from django.contrib.auth import authenticate, login +from django.http import HttpResponseRedirect +from django.conf import settings + + +class Demo(UserEndPoint): + permission_classes = [] + authentication_classes = [] + name = "user_views_login" + description = "用户登录" + + def get(self, request): + user = User.objects.filter(username="demo").first() + login(request, user) + res = HttpResponseRedirect(settings.DOMAIN + "project/projectManage") + return res diff --git a/dongtai_web/views/details_id.py b/dongtai_web/views/details_id.py new file mode 100644 index 000000000..51d3d40e4 --- /dev/null +++ b/dongtai_web/views/details_id.py @@ -0,0 +1,141 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : details_id +# @created : 星期一 12月 27, 2021 16:32:12 CST +# +# @description : +###################################################################### + + + +import logging + +from dongtai_common.endpoint import UserEndPoint, R + +from dongtai_common.utils import const +from dongtai_web.serializers.agent import AgentSerializer +from dongtai_web.serializers.project import ProjectSerializer +from dongtai_web.serializers.sca import ScaSerializer +from dongtai_web.serializers.vul import VulSerializer +from dongtai_common.models.asset import Asset +from dongtai_web.utils import get_model_field +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.project import IastProject +from functools import reduce +from django.db.models import Q +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from rest_framework import serializers +from rest_framework.serializers import ValidationError +from dongtai_common.models.vulnerablity import IastVulnerabilityModel + + +class IdsSerializer(serializers.Serializer): + ids = serializers.ListField(child=serializers.IntegerField()) + + +class DetailListWithid(UserEndPoint): + serializers = serializers.Serializer + def parse_ids(self, request): + ser = IdsSerializer(data=request.data) + try: + if ser.is_valid(True): + ids = ser.validated_data['ids'] + except ValidationError as e: + return R.failure(data=e.detail) + return ids + + def query(self, ids, request): + return [] + + def get(self, request): + res = self.parse_ids(request) + if not isinstance(res, list): + return res + ids = res + items = self.query(ids, request) + return R.success(data=self.serializer(items, many=True).data) + + +class AgentListWithid(DetailListWithid): + serializer = AgentSerializer + + def query(self, ids, request): + agents = IastAgent.objects.filter(pk__in=ids, + user__in=self.get_auth_users( + request.user)).all() + return agents + @extend_schema_with_envcheck( + request=IdsSerializer, + tags=[_('Agent')], + summary=_('Agent List with id'), + description= + _("Get the item corresponding to the user, support fuzzy search based on name." + ), + ) + def post(self, request): + return super().get(request) + + +class ProjectListWithid(DetailListWithid): + serializer = ProjectSerializer + + def query(self, ids, request): + projects = IastProject.objects.filter(pk__in=ids, + user__in=self.get_auth_users( + request.user)).all() + return projects + + @extend_schema_with_envcheck( + request=IdsSerializer, + tags=[_('Project')], + summary=_('Project List with id'), + description= + _("Get the item corresponding to the user, support fuzzy search based on name." + ), + ) + def post(self, request): + return super().get(request) + + +class ScaListWithid(DetailListWithid): + serializer = ScaSerializer + + def query(self, ids, request): + auth_users = self.get_auth_users(request.user) + auth_agents = self.get_auth_agents(auth_users) + scas = Asset.objects.filter(pk__in=ids, agent__in=auth_agents).all() + return scas + + @extend_schema_with_envcheck( + request=IdsSerializer, + tags=[_('Component')], + summary=_('Component List with id'), + description= + _("Get the item corresponding to the user, support fuzzy search based on name." + ), + ) + def post(self, request): + return super().get(request) + + +class VulsListWithid(DetailListWithid): + serializer = VulSerializer + + def query(self, ids, request): + auth_users = self.get_auth_users(request.user) + auth_agents = self.get_auth_agents(auth_users) + vuls = IastVulnerabilityModel.objects.filter( + pk__in=ids, agent__in=auth_agents).values().all() + return vuls + + @extend_schema_with_envcheck( + request=IdsSerializer, + tags=[_('Vulnerability')], + summary=_('Vulnerability List with id'), + description= + _("Get the item corresponding to the user, support fuzzy search based on name." + ), + ) + def post(self, request): + return super().get(request) diff --git a/dongtai_web/views/documents.py b/dongtai_web/views/documents.py new file mode 100644 index 000000000..85b4888a3 --- /dev/null +++ b/dongtai_web/views/documents.py @@ -0,0 +1,63 @@ +from dongtai_common.utils import const +from dongtai_common.models.hook_type import HookType +from dongtai_common.models.strategy import IastStrategyModel +from dongtai_common.models.document import IastDocument + +from dongtai_common.endpoint import R +from dongtai_common.utils import const +from dongtai_common.endpoint import UserEndPoint +from django.forms.models import model_to_dict +from django.db.models import Q +from rest_framework import serializers +from rest_framework.serializers import ValidationError +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from django.utils.translation import gettext_lazy as _ + + +class _DocumentArgsSerializer(serializers.Serializer): + page_size = serializers.IntegerField(default=20, + help_text=_('Number per page')) + page = serializers.IntegerField(default=1, help_text=_('Page index')) + language = serializers.CharField( + default=None, + help_text=_("Document's corresponding programming language")) + + +class DocumentSerializer(serializers.ModelSerializer): + class Meta: + model = IastDocument + fields = ['id', 'title', 'url', 'language', 'weight'] + + +class ResponseDataSerializer(serializers.Serializer): + documents = DocumentSerializer(many=True) + + +_SuccessSerializer = get_response_serializer(ResponseDataSerializer()) + + +class DocumentsEndpoint(UserEndPoint): + @extend_schema_with_envcheck([_DocumentArgsSerializer], + response_schema=_SuccessSerializer, + summary=_('Get documents'), + description=_("Get help documentation."), + tags=[_('Documents')]) + def get(self, request): + ser = _DocumentArgsSerializer(data=request.GET) + try: + if ser.is_valid(True): + page_size = ser.validated_data['page_size'] + page = ser.validated_data['page'] + language = ser.validated_data['language'] + except ValidationError as e: + return R.failure(data=e.detail) + if language: + q = Q(language=language) + else: + q = Q() + _, documents = self.get_paginator( + IastDocument.objects.filter(q).order_by('-weight').all(), page, + page_size) + return R.success(data={ + 'documents': [model_to_dict(document) for document in documents] + }) diff --git a/dongtai_web/views/engine_hook_rule_add.py b/dongtai_web/views/engine_hook_rule_add.py new file mode 100644 index 000000000..dde7d8108 --- /dev/null +++ b/dongtai_web/views/engine_hook_rule_add.py @@ -0,0 +1,120 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +import time + +from dongtai_common.endpoint import UserEndPoint, R +from dongtai_common.models.hook_strategy import HookStrategy +from dongtai_common.models.hook_type import HookType +from dongtai_common.utils import const +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from django.utils.text import format_lazy +from dongtai_web.serializers.hook_strategy import SINK_POSITION_HELP_TEXT +from rest_framework import serializers + +class _HookRuleAddBodyargsSerializer(serializers.Serializer): + rule_type_id = serializers.IntegerField( + help_text=_('The id of hook rule type.')) + rule_value = serializers.CharField( + help_text=_('The value of strategy'), + max_length=255, + ) + rule_source = serializers.CharField( + help_text=format_lazy("{}\n{}", _("Source of taint"), + SINK_POSITION_HELP_TEXT), + max_length=255, + ) + rule_target = serializers.CharField( + help_text=format_lazy("{}\n{}", _("Target of taint"), + SINK_POSITION_HELP_TEXT), + max_length=255, + ) + inherit = serializers.CharField( + help_text= + _('Inheritance type, false-only detect current class, true-inspect subclasses, all-check current class and subclasses' + ), + max_length=255, + ) + track = serializers.CharField( + help_text= + _("Indicates whether taint tracking is required, true-required, false-not required." + ), + max_length=5, + ) + + +_ResponseSerializer = get_response_serializer(status_msg_keypair=( + ((201, _('Policy enabled success, total {} hook rules')), ''), + ((202, _('Incomplete parameter, please check again')), ''), + ((202, _('Failed to create strategy')), ''), +)) + + +class EngineHookRuleAddEndPoint(UserEndPoint): + def parse_args(self, request): + """ + :param request: + :return: + """ + try: + rule_type = request.data.get('rule_type_id') + rule_value = request.data.get('rule_value').strip() + rule_source = request.data.get('rule_source').strip() + rule_target = request.data.get('rule_target').strip() + inherit = request.data.get('inherit').strip() + is_track = request.data.get('track').strip() + + return rule_type, rule_value, rule_source, rule_target, inherit, is_track + except Exception as e: + + return None, None, None, None, None, None + + def create_strategy(self, value, source, target, inherit, track, created_by): + try: + + + timestamp = int(time.time()) + strategy = HookStrategy( + value=value, + source=source, + target=target, + inherit=inherit, + track=track, + create_time=timestamp, + update_time=timestamp, + created_by=created_by, + enable=const.ENABLE + ) + strategy.save() + return strategy + except Exception as e: + return None + + @extend_schema_with_envcheck( + + request=_HookRuleAddBodyargsSerializer, + tags=[_('Hook Rule')], + summary=_('Hook Rule Add'), + description=_( + "Generate corresponding strategy group according to the strategy selected by the user." + ), + response_schema=_ResponseSerializer, + ) + def post(self, request): + rule_type, rule_value, rule_source, rule_target, inherit, is_track = self.parse_args(request) + if all((rule_type, rule_value, rule_source, inherit, is_track)) is False: + return R.failure(msg=_('Incomplete parameter, please check again')) + + strategy = self.create_strategy(rule_value, rule_source, rule_target, inherit, is_track, request.user.id) + if strategy: + hook_type = HookType.objects.filter( + id=rule_type, + created_by__in=(request.user.id, const.SYSTEM_USER_ID) + ).first() + if hook_type: + hook_type.strategies.add(strategy) + return R.success(msg=_('Strategy has been created successfully')) + return R.failure(msg=_('Failed to create strategy')) diff --git a/dongtai_web/views/engine_hook_rule_modify.py b/dongtai_web/views/engine_hook_rule_modify.py new file mode 100644 index 000000000..aa1e7965d --- /dev/null +++ b/dongtai_web/views/engine_hook_rule_modify.py @@ -0,0 +1,108 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +import time + +from dongtai_common.endpoint import UserEndPoint, R +from dongtai_common.utils import const +from dongtai_common.models.hook_strategy import HookStrategy +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from rest_framework import serializers +from django.utils.text import format_lazy +from dongtai_web.serializers.hook_strategy import SINK_POSITION_HELP_TEXT +from dongtai_common.models.hook_type import HookType + + +_PostResponseSerializer = get_response_serializer(status_msg_keypair=( + ((201, _('strategy has been created successfully')), ''), + ((202, _('Incomplete parameter, please check again')), ''), + ((202, _('Failed to create strategy')), ''), +)) + + +class _EngineHookRuleModifySerializer(serializers.Serializer): + rule_id = serializers.IntegerField( + help_text=_('The id of hook rule')) + rule_type_id = serializers.IntegerField( + help_text=_('The id of hook rule type.')) + rule_value = serializers.CharField( + help_text=_('The value of strategy'), + max_length=255, + ) + rule_source = serializers.CharField( + help_text=format_lazy("{}\n{}", _("Source of taint"), + SINK_POSITION_HELP_TEXT), + max_length=255, + ) + rule_target = serializers.CharField( + help_text=format_lazy("{}\n{}", _("Target of taint"), + SINK_POSITION_HELP_TEXT), + max_length=255, + ) + inherit = serializers.CharField( + help_text= + _('Inheritance type, false-only detect current class, true-inspect subclasses, all-check current class and subclasses' + ), + max_length=255, + ) + track = serializers.CharField( + help_text= + _("Indicates whether taint tracking is required, true-required, false-not required." + ), + max_length=5, + ) + + +class EngineHookRuleModifyEndPoint(UserEndPoint): + def parse_args(self, request): + """ + :param request: + :return: + """ + try: + rule_id = request.data.get('rule_id') + rule_type = request.data.get('rule_type_id') + rule_value = request.data.get('rule_value').strip() + rule_source = request.data.get('rule_source').strip() + rule_target = request.data.get('rule_target').strip() + inherit = request.data.get('inherit').strip() + is_track = request.data.get('track').strip() + + return rule_id, rule_type, rule_value, rule_source, rule_target, inherit, is_track + except Exception as e: + return None, None, None, None, None, None, None + + @extend_schema_with_envcheck( + request=_EngineHookRuleModifySerializer, + tags=[_('Hook Rule')], + summary=_('Hook Rule Modify'), + description=_("Modify the rule corresponding to the specified id"), + response_schema=_PostResponseSerializer, + ) + def post(self, request): + rule_id, rule_type, rule_value, rule_source, rule_target, inherit, is_track = self.parse_args(request) + hook_type = HookType.objects.filter( + id=rule_type, + created_by__in=(request.user.id, const.SYSTEM_USER_ID) + ).first() + if all((rule_id, rule_type, rule_value, rule_source, inherit, is_track, hook_type)) is False: + return R.failure(msg=_('Incomplete parameter, please check again')) + + strategy = HookStrategy.objects.filter(id=rule_id, created_by=request.user.id).first() + if strategy: + if hook_type: + strategy.type.get(strategy=strategy).strategies.remove(strategy) + hook_type.strategies.add(strategy) + strategy.value = rule_value + strategy.source = rule_source + strategy.target = rule_target + strategy.inherit = inherit + strategy.track = is_track + strategy.update_time = int(time.time()) + strategy.save() + + return R.success(msg=_('strategy has been created successfully')) + return R.failure(msg=_('Failed to create strategy')) diff --git a/dongtai_web/views/engine_hook_rule_status.py b/dongtai_web/views/engine_hook_rule_status.py new file mode 100644 index 000000000..793afdf5b --- /dev/null +++ b/dongtai_web/views/engine_hook_rule_status.py @@ -0,0 +1,163 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +import logging + +from dongtai_common.endpoint import UserEndPoint, R +from dongtai_common.models.hook_strategy import HookStrategy +from dongtai_common.utils import const +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from rest_framework import serializers +from dongtai_common.models.hook_type import HookType + +logger = logging.getLogger('dongtai-webapi') + +OP_CHOICES = ('enable', 'disable', 'delete') +SCOPE_CHOICES = ('all',) + +class EngineHookRuleStatusGetQuerySerializer(serializers.Serializer): + rule_id = serializers.IntegerField(required=False, + help_text=_("The id of hook rule")) + type = serializers.IntegerField( + required=False, help_text=_("The id of hook rule type")) + op = serializers.ChoiceField(OP_CHOICES, + required=False, + help_text=_("The state of the hook rule")) + scope = serializers.ChoiceField(SCOPE_CHOICES, + required=False, + help_text=_("The scope of the hook rule")) + language_id = serializers.IntegerField(required=False, + help_text=_("The language_id")) + hook_rule_type = serializers.IntegerField( + required=False, help_text=_("The type of hook rule")) + + +class EngineHookRuleStatusPostBodySerializer(serializers.Serializer): + ids = serializers.CharField(help_text=_( + 'The id corresponding to the hook type, use"," for segmentation.')) + op = serializers.ChoiceField(OP_CHOICES, + help_text=_("The state of the hook rule")) + + +_GetResponseSerializer = get_response_serializer(status_msg_keypair=( + ((201, _('Operation success')), ''), + ((202, _('Operation type does not exist')), ''), + ((202, _('Strategy does not exist')), ''), +)) + +_PostResponseSerializer = get_response_serializer(status_msg_keypair=( + ((201, _('Operation success')), ''), + ((202, _('Operation type does not exist')), ''), + ((202, _('Incorrect parameter')), ''), +)) + +class EngineHookRuleEnableEndPoint(UserEndPoint): + def parse_args(self, request): + rule_id = request.query_params.get('rule_id') + rule_type = request.query_params.get('type') + scope = request.query_params.get('scope') + op = request.query_params.get('op') + return rule_id, rule_type, scope, op, request.query_params.get( + 'language_id'), request.query_params.get('hook_rule_type') + + @staticmethod + def set_strategy_status(strategy_id, strategy_ids, user_id, enable_status): + if strategy_id: + rule = HookStrategy.objects.filter(id=strategy_id, created_by=user_id).first() + if rule: + rule.enable = enable_status + rule.save() + return 1 + elif strategy_ids: + count = HookStrategy.objects.filter(id__in=strategy_ids, created_by=user_id).update(enable=enable_status) + return count + return 0 + + @staticmethod + def check_op(op): + if op == 'enable': + op = const.ENABLE + elif op == 'disable': + op = const.DISABLE + elif op == 'delete': + op = const.DELETE + else: + op = None + return op + + @extend_schema_with_envcheck( + [EngineHookRuleStatusGetQuerySerializer], + tags=[_('Hook Rule')], + summary=_('Hook Rule Status Modify'), + description=_("Modify the status of the rule corresponding to the specified id."), + response_schema=_GetResponseSerializer, + ) + def get(self, request): + rule_id, rule_type, scope, op, language_id, hook_rule_type = self.parse_args( + request) + try: + if rule_id: + rule_id = int(rule_id) + if rule_type: + rule_type = int(rule_type) + except BaseException: + return R.failure(_("Parameter error")) + user_id = request.user.id + status = False + + op = self.check_op(op) + if op is None: + return R.failure(msg=_('Operation type does not exist')) + if rule_type is not None and scope == 'all': + count = HookStrategy.objects.filter(type__id=rule_type, created_by=user_id).update(enable=op) + logger.info(_('Policy type {} operation success, total of {} Policy types').format(rule_type, count)) + status = True + if hook_rule_type is not None and language_id is not None and scope == 'all': + users = self.get_auth_users(request.user) + user_ids = (user.id for user in users) + hook_type_ids = HookType.objects.filter( + language_id=language_id, + type=hook_rule_type).values_list('id', flat=True).all() + count = HookStrategy.objects.filter( + type__id__in=hook_type_ids, + created_by__in=user_ids).update(enable=op) + logger.info(_('total of {} Policy types').format(count)) + status = True + elif rule_id is not None: + status = self.set_strategy_status(strategy_id=rule_id, strategy_ids=None, user_id=user_id, + enable_status=op) + logger.info(_('Policy {} succeed').format(rule_id)) + + if status: + return R.success(msg=_('Operation success')) + else: + return R.failure(msg=_('Strategy does not exist')) + + @extend_schema_with_envcheck( + request=EngineHookRuleStatusPostBodySerializer, + tags=[_('Hook Rule')], + summary=_('Hook Rule Status Modify (Batch)'), + description=_("Batch modify the status of the rule corresponding to the specified id"), + response_schema=_PostResponseSerializer, + ) + def post(self, request): + op = request.data.get('op') + op = self.check_op(op) + if op is None: + return R.failure(msg=_('Operation type does not exist')) + + strategy_ids = request.data.get('ids') + try: + strategy_ids = [int(i) for i in strategy_ids.split(',')] + except BaseException: + return R.failure(_("Parameter error")) + if strategy_ids: + count = self.set_strategy_status(strategy_id=None, strategy_ids=strategy_ids, user_id=request.user.id, + enable_status=op) + logger.info(_('Strategy operation success, total {}').format(count)) + return R.success(msg=_('Operation success')) + else: + return R.failure(msg=_('Incorrect parameter')) diff --git a/dongtai_web/views/engine_hook_rule_summary.py b/dongtai_web/views/engine_hook_rule_summary.py new file mode 100644 index 000000000..ea0442ecd --- /dev/null +++ b/dongtai_web/views/engine_hook_rule_summary.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +from dongtai_common.endpoint import UserEndPoint, R +from dongtai_common.models.hook_strategy import HookStrategy +from dongtai_common.models.hook_type import HookType +from dongtai_common.utils import const +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from rest_framework import serializers +from django.utils.translation import gettext_lazy as _ +from rest_framework.serializers import ValidationError + + +class EngineHookRuleSummarySerializer(serializers.Serializer): + typeCount = serializers.IntegerField( + help_text=_("Total number of rule types")) + ruleCount = serializers.IntegerField(help_text=_("Total number of rules")) + sinkCount = serializers.IntegerField( + help_text=_("Total number of sink type rules")) + + +class _EngineHookRuleSummaryQuerySerializer(serializers.Serializer): + language_id = serializers.IntegerField( + help_text=_('The id of programming language'), + required=False,allow_null=True) + + +_ResponseSerializer = get_response_serializer( + EngineHookRuleSummarySerializer(many=True)) + + +class EngineHookRuleSummaryEndPoint(UserEndPoint): + @extend_schema_with_envcheck( + [_EngineHookRuleSummaryQuerySerializer], + tags=[_('Hook Rule')], + summary=_('Hook Rule Summary'), + description=_("Statistics on the number of hook rules"), + response_schema=_ResponseSerializer, + ) + def get(self, request): + ser = _EngineHookRuleSummaryQuerySerializer(data=request.GET) + try: + ser.is_valid(True) + except ValidationError as e: + return R.failure(msg=_('Parameter error')) + rule_type_queryset = HookType.objects.filter(created_by__in=[request.user.id, const.SYSTEM_USER_ID]) + if ser.validated_data.get('language_id', None): + rule_type_queryset = rule_type_queryset.filter( + language_id=ser.validated_data['language_id'], enable__gt=0) + rule_type_count = rule_type_queryset.values('id').count() + + sink_type_queryset = rule_type_queryset.filter(type=const.RULE_SINK) + sink_count = HookStrategy.objects.values('id').filter(type__in=sink_type_queryset,enable__gt=0).count() + + rule_count = HookStrategy.objects.values('id').filter(type__in=rule_type_queryset,enable__gt=0).count() + return R.success(data={ + 'typeCount': rule_type_count, + 'ruleCount': rule_count, + 'sinkCount': sink_count + }) diff --git a/dongtai_web/views/engine_hook_rule_type_add.py b/dongtai_web/views/engine_hook_rule_type_add.py new file mode 100644 index 000000000..0c437c7ff --- /dev/null +++ b/dongtai_web/views/engine_hook_rule_type_add.py @@ -0,0 +1,111 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +import time +import logging + +from dongtai_common.endpoint import UserEndPoint, R +from dongtai_common.models.hook_type import HookType +from dongtai_common.utils import const +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from rest_framework import serializers +from dongtai_web.serializers.hook_strategy import HOOK_TYPE_CHOICE +from rest_framework.serializers import ValidationError + +ENABLE_CHOICE = (const.ENABLE, const.DISABLE) +logger = logging.getLogger('dongtai-webapi') + + +class _EngineHookRuleTypeAddSerializer(serializers.Serializer): + type = serializers.ChoiceField( + HOOK_TYPE_CHOICE, + help_text= + _("type of hook rule \n 1 represents the propagation method, 2 represents the source method, 3 represents the filter method, and 4 represents the taint method" + ), + required=True) + enable = serializers.ChoiceField( + ENABLE_CHOICE, + help_text=_( + "The enabled state of the hook strategy: 0-disabled, 1-enabled"), + required=True) + name = serializers.CharField(help_text=_("The name of hook type"), + max_length=255, + required=True) + short_name = serializers.CharField( + help_text=_("The short name of hook type"), + max_length=255, + required=True) + language_id = serializers.ChoiceField( + (1, 2, 3, 4), + default=1, + help_text= + _('The id of programming language,find it in the programming language api' + ), + ) + + +_ResponseSerializer = get_response_serializer(status_msg_keypair=( + ((201, _('Rule type successfully saved')), ''), + ((202, _('Incomplete data')), ''), +)) + + +class EngineHookRuleTypeAddEndPoint(UserEndPoint): + def parse_args(self, request): + try: + ser = _EngineHookRuleTypeAddSerializer(data=request.data) + try: + ser.is_valid(True) + except ValidationError as e: + return None, None, None, None, None + rule_type = ser.validated_data.get('type') + rule_type = int(rule_type) + if rule_type not in ( + const.RULE_SOURCE, + const.RULE_ENTRY_POINT, + const.RULE_PROPAGATOR, + ): + rule_type = None + + name = ser.validated_data.get('name') + + short_name = ser.validated_data.get('short_name') + + enable = ser.validated_data.get('enable') + enable = int(enable) + language_id = ser.validated_data.get('language_id', 1) + if enable not in ENABLE_CHOICE: + return None, None, None, None, None + return rule_type, name, short_name, enable, language_id + except Exception as e: + logger.error( + _("Parameter parsing failed, error message: {}").format(e)) + return None, None, None, None, None + + @extend_schema_with_envcheck( + request=_EngineHookRuleTypeAddSerializer, + tags=[_('Hook Rule')], + summary=_('Hook Rule Type Add'), + description=_("Create hook rule type based on incoming parameters"), + response_schema=_ResponseSerializer, + ) + def post(self, request): + rule_type, name, short_name, enable, language_id = self.parse_args( + request) + if all((rule_type, name, short_name, language_id)) is False: + return R.failure(msg=_('Incomplete data')) + timestamp = int(time.time()) + hook_type = HookType(enable=enable, + type=rule_type, + name=short_name, + value=name, + create_time=timestamp, + update_time=timestamp, + created_by=request.user.id, + language_id=language_id, + vul_strategy_id=-1,) + hook_type.save() + return R.success(msg=_('Rule type successfully saved')) diff --git a/dongtai_web/views/engine_hook_rule_type_disable.py b/dongtai_web/views/engine_hook_rule_type_disable.py new file mode 100644 index 000000000..27c0a9d27 --- /dev/null +++ b/dongtai_web/views/engine_hook_rule_type_disable.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +from dongtai_common.endpoint import UserEndPoint, R +from dongtai_common.models.hook_strategy import HookStrategy +from dongtai_common.utils import const +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from rest_framework import serializers + + +class EngineHookRuleTypeEnableSerializer(serializers.Serializer): + rule_id = serializers.IntegerField(help_text=_("The id of hook type"), + default=const.RULE_PROPAGATOR) + + +_GetResponseSerializer = get_response_serializer(status_msg_keypair=( + ((201, _('Forbidden success')), ''), + ((202, _('Strategy type does not exist')), ''), + ((202, _('Strategy does not exist')), ''), +)) + +class EngineHookRuleTypeDisableEndPoint(UserEndPoint): + def parse_args(self, request): + try: + rule_id = request.query_params.get('rule_id', const.RULE_PROPAGATOR) + rule_type = int(rule_id) + return rule_type + except Exception as e: + + return None + + @extend_schema_with_envcheck( + [EngineHookRuleTypeEnableSerializer], + tags=[_('Hook Rule')], + summary=_('Hook Rule Status Disable'), + description=_( + "Disable the status of the rule corresponding to the specified id." + ), + response_schema=_GetResponseSerializer, + ) + def get(self, request): + rule_id = self.parse_args(request) + if rule_id is None: + return R.failure(msg=_('Strategy does not exist')) + + rule = HookStrategy.objects.filter(id=rule_id, created_by=request.user.id).first() + if rule: + rule_type = rule.type.first() + if rule_type: + rule_type.enable = const.DISABLE + rule.save() + return R.success(msg=_('Forbidden success')) + return R.failure(msg=_('Strategy type does not exist')) diff --git a/dongtai_web/views/engine_hook_rule_type_enable.py b/dongtai_web/views/engine_hook_rule_type_enable.py new file mode 100644 index 000000000..327488fe5 --- /dev/null +++ b/dongtai_web/views/engine_hook_rule_type_enable.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +import logging + +from dongtai_common.endpoint import UserEndPoint, R +from dongtai_common.models.hook_strategy import HookStrategy +from dongtai_common.utils import const +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from rest_framework import serializers + +logger = logging.getLogger('dongtai-webapi') + + +class EngineHookRuleTypeEnableSerializer(serializers.Serializer): + rule_id = serializers.IntegerField(help_text=_("The id of hook type"), + default=const.RULE_PROPAGATOR) + + +_GetResponseSerializer = get_response_serializer(status_msg_keypair=( + ((201, _('Enable successfully')), ''), + ((202, _('Operation type does not exist')), ''), + ((202, _('Strategy type does not exist')), ''), +)) + + +class EngineHookRuleTypeEnableEndPoint(UserEndPoint): + def parse_args(self, request): + try: + rule_id = request.query_params.get('rule_id', const.RULE_PROPAGATOR) + rule_type = int(rule_id) + return rule_type + except Exception as e: + logger.error( + _("Parameter processing failed, error message: {}").format(e)) + return None + + @extend_schema_with_envcheck( + [EngineHookRuleTypeEnableSerializer], + tags=[_('Hook Rule')], + summary=_('Hook Rule Status Enable'), + description=_( + "Enable the status of the rule corresponding to the specified id." + ), + response_schema=_GetResponseSerializer, + ) + def get(self, request): + rule_id = self.parse_args(request) + if rule_id is None: + return R.failure(msg=_('Strategy does not exist')) + + rule = HookStrategy.objects.filter(id=rule_id, created_by=request.user.id).first() + if rule: + rule_type = rule.type.first() + if rule_type: + rule_type.enable = const.ENABLE + rule.save() + return R.success(msg=_('Enable successfully')) + return R.failure(msg=_('Strategy type does not exist')) diff --git a/dongtai_web/views/engine_hook_rule_types.py b/dongtai_web/views/engine_hook_rule_types.py new file mode 100644 index 000000000..35a440982 --- /dev/null +++ b/dongtai_web/views/engine_hook_rule_types.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +import logging + +from dongtai_common.endpoint import UserEndPoint, R +from dongtai_common.models.hook_type import HookType +from dongtai_common.utils import const + +from dongtai_web.serializers.hook_type_strategy import HookTypeSerialize +from django.utils.translation import gettext_lazy as _ +from rest_framework import serializers +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from django.utils.text import format_lazy +from rest_framework.serializers import ValidationError +from dongtai_web.serializers.hook_strategy import HOOK_TYPE_CHOICE +logger = logging.getLogger('dongtai-webapi') + + +class _EngineHookRuleTypeArgsSerializer(serializers.Serializer): + pageSize = serializers.IntegerField(default=20, + help_text=_('Number per page')) + page = serializers.IntegerField(default=1, help_text=_('Page index')) + type = serializers.ChoiceField( + HOOK_TYPE_CHOICE, + help_text= + _("type of hook rule \n 1 represents the propagation method, 2 represents the source method, 3 represents the filter method, and 4 represents the taint method" + )) + language_id = serializers.IntegerField( + default=1, + help_text=_('The id of programming language'), + required=False) + + +_SuccessSerializer = get_response_serializer( + HookTypeSerialize(many=True, allow_null=True)) + + +class EngineHookRuleTypesEndPoint(UserEndPoint): + def parse_args(self, request): + try: + ser = _EngineHookRuleTypeArgsSerializer(data=request.GET) + try: + ser.is_valid(True) + except ValidationError as e: + return None, None, None, None + rule_type = ser.validated_data.get('type', const.RULE_PROPAGATOR) + rule_type = int(rule_type) + if rule_type not in (const.RULE_SOURCE, const.RULE_ENTRY_POINT, + const.RULE_PROPAGATOR, const.RULE_FILTER, + const.RULE_SINK): + rule_type = None + + page = ser.validated_data.get('page', 1) + page = int(page) + + page_size = ser.validated_data.get('pageSize', 20) + page_size = int(page_size) + if page_size > const.MAX_PAGE_SIZE: + page_size = const.MAX_PAGE_SIZE + language_id = ser.validated_data.get('language_id', 1) + return rule_type, page, page_size, language_id + except Exception as e: + logger.error( + _("Parameter parsing failed, error message: {}").format(e)) + return None, None, None, None + + @extend_schema_with_envcheck([_EngineHookRuleTypeArgsSerializer], + response_schema=_SuccessSerializer, + summary=_('Hook Types List'), + description=_("Get Hook Types List"), + tags=[_('Hook Rule')]) + def get(self, request): + rule_type, page, page_size, language_id = self.parse_args(request) + if all( + map(lambda x: x is not None, [rule_type, page, page_size, + language_id])) is False: + return R.failure(msg=_('Parameter error')) + if rule_type is None: + return R.failure(msg=_('Strategy type does not exist')) + + queryset = HookType.objects.filter( + created_by__in=[request.user.id, const.SYSTEM_USER_ID], + type=rule_type, + language_id=language_id) + data = HookTypeSerialize(queryset, many=True).data + return R.success(data=data) diff --git a/dongtai_web/views/engine_hook_rules.py b/dongtai_web/views/engine_hook_rules.py new file mode 100644 index 000000000..654d7c775 --- /dev/null +++ b/dongtai_web/views/engine_hook_rules.py @@ -0,0 +1,120 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +import logging + +from dongtai_common.endpoint import UserEndPoint, R +from django.db.models import Q +from dongtai_common.models.hook_strategy import HookStrategy +from dongtai_common.models.hook_type import HookType +from dongtai_common.utils import const + +from dongtai_web.serializers.hook_strategy import HookRuleSerializer +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from dongtai_web.serializers.hook_strategy import HOOK_TYPE_CHOICE +from rest_framework.serializers import ValidationError + +from rest_framework import serializers + +class _EngineHookRulesQuerySerializer(serializers.Serializer): + type = serializers.ChoiceField( + HOOK_TYPE_CHOICE, + help_text= + _("type of hook rule \n 1 represents the propagation method, 2 represents the source method, 3 represents the filter method, and 4 represents the taint method" + )) + pageSize = serializers.IntegerField(default=20, + help_text=_('number per page')) + page = serializers.IntegerField(default=1, help_text=_('page index')) + strategy_type = serializers.IntegerField( + help_text=_("The id of hook_type"), required=False) + language_id = serializers.IntegerField( + default=1, + help_text=_('The id of programming language'), + required=False) + keyword = serializers.CharField(help_text=_('The keyword for search'), + required=False) + + +_ResponseSerializer = get_response_serializer( + data_serializer=HookRuleSerializer(many=True), ) + +logger = logging.getLogger('dongtai-webapi') + + +class EngineHookRulesEndPoint(UserEndPoint): + def parse_args(self, request): + try: + ser = _EngineHookRulesQuerySerializer(data=request.GET) + try: + ser.is_valid(True) + except ValidationError as e: + return None, None, None, None, None, None + rule_type = ser.validated_data.get('type', const.RULE_PROPAGATOR) + rule_type = int(rule_type) + if rule_type not in ( + const.RULE_SOURCE, const.RULE_ENTRY_POINT, const.RULE_PROPAGATOR, const.RULE_FILTER, + const.RULE_SINK): + rule_type = None + + page = ser.validated_data.get('page', 1) + page = int(page) + + page_size = ser.validated_data.get('pageSize', 20) + page_size = int(page_size) + if page_size > const.MAX_PAGE_SIZE: + page_size = const.MAX_PAGE_SIZE + language_id = ser.validated_data.get('language_id', 1) + keyword = ser.validated_data.get('keyword', None) + + strategy_type = ser.validated_data.get('strategy_type') + return rule_type, page, page_size, strategy_type, language_id, keyword + except Exception as e: + logger.error(_("Parameter parsing failed, error message: {}").format(e)) + return None, None, None, None, None, None + + @extend_schema_with_envcheck( + querys=[_EngineHookRulesQuerySerializer], + tags=[_('Hook Rule')], + summary=_('Hook Rule List'), + description=_("Get the list of hook strategies"), + response_schema=_ResponseSerializer, + ) + def get(self, request): + res = self.parse_args(request) + if res is None: + return R.failure(msg=_('Parameter error')) + rule_type, page, page_size, strategy_type, language_id, keyword = res + if all( + map(lambda x: x is not None, + [rule_type, page, page_size, language_id + ])) is False: + return R.failure(msg=_('Parameter error')) + if rule_type is None: + return R.failure(msg=_('Strategy type does not exist')) + + try: + user_id = request.user.id + if strategy_type: + rule_type_queryset = HookType.objects.filter( + id=strategy_type, + created_by__in=(user_id, const.SYSTEM_USER_ID), + type=rule_type, + language_id=language_id) + else: + rule_type_queryset = HookType.objects.filter( + created_by__in=(user_id, const.SYSTEM_USER_ID), + type=rule_type, + language_id=language_id) + q = Q(type__in=rule_type_queryset) & Q(created_by=user_id) & Q(enable__in=(const.ENABLE,const.DISABLE)) + if keyword: + q = Q(value__icontains=keyword) & q + rule_queryset = HookStrategy.objects.filter(q) + page_summary, queryset = self.get_paginator(rule_queryset, page=page, page_size=page_size) + data = HookRuleSerializer(queryset, many=True).data + return R.success(data=data, page=page_summary) + except Exception as e: + logger.error(_("Rule read error, error message: {}").format(e)) + return R.failure() diff --git a/dongtai_web/views/engine_method_pool_detail.py b/dongtai_web/views/engine_method_pool_detail.py new file mode 100644 index 000000000..61b561ea9 --- /dev/null +++ b/dongtai_web/views/engine_method_pool_detail.py @@ -0,0 +1,188 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +import json +import logging + +from dongtai_common.endpoint import R, AnonymousAndUserEndPoint +from dongtai_common.engine.vul_engine import VulEngine +from dongtai_common.models.agent_method_pool import MethodPool + +from dongtai_web.serializers.method_pool import MethodPoolListSerialize +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer + +logger = logging.getLogger('dongtai-webapi') + + +class MethodPoolDetailProxy(AnonymousAndUserEndPoint): + name = "api-engine-search" + description = _("Engine - search data according to policy") + + def post(self, request): + """ + :param request: + :return: + token: agent-ip-port-path + """ + + + + try: + latest_id, page_size, rule_name, rule_msg, rule_level, source_set, sink_set, propagator_set = \ + self.parse_search_condition(request) + auth_agents = self.get_auth_and_anonymous_agents(request.user).values('id') + + auth_agent_ids = [agent['id'] for agent in auth_agents] + method_pool_ids = self.get_match_methods( + agents=auth_agent_ids, + source_set=source_set, + propagator_set=propagator_set, + sink_set=sink_set, + latest_id=latest_id, + page_size=page_size, + size=page_size * 5) + if method_pool_ids is None: + return R.success(msg=_('Not queried'), data=list(), latest=0) + + return R.success( + data=self.get_result_data(method_pool_ids, rule_name, rule_level, source_set, sink_set, propagator_set), + latest=method_pool_ids[-1] + ) + except Exception as e: + return R.failure(msg=_("Acquisition fail")) + + @staticmethod + def parse_search_condition(request): + """ + :param request: + :return: + """ + latest_id = int(request.query_params.get('latest', 0)) + page_size = int(request.query_params.get('pageSize', 20)) + if page_size > 100: + page_size = 100 + + rule_id = request.data.get('name', _('Temporary search')) + rule_msg = request.data.get('msg') + rule_level = request.data.get('level') + rule_sources = request.data.get('sources') + rule_sinks = request.data.get('sinks') + rule_propagators = request.data.get('propagators') + + sink_set = set(rule_sinks) if rule_sinks else set() + source_set = set(rule_sources) if rule_sources else set() + propagator_set = set(rule_propagators) if rule_propagators else set() + + return latest_id, page_size, rule_id, rule_msg, rule_level, source_set, sink_set, propagator_set + + def get_match_methods(self, agents, source_set, propagator_set, sink_set, latest_id=0, page_size=20, index=0, + size=20): + queryset = MethodPool.objects.order_by('id') + if latest_id == 0: + queryset = queryset.filter(agent_id__in=agents) + else: + queryset = queryset.filter(id__gt=latest_id, agent_id__in=agents) + if queryset.values('id').exists() is False: + return None + + + matches = list() + while True: + logger.debug(_('Searching, current {} page').format(index +1)) + page = queryset.values('id', 'method_pool')[index * size:(index + 1) * size - 1] + if page: + if len(matches) == page_size: + break + for method_pool in page: + if len(matches) == page_size: + break + method_caller_set = self.convert_method_pool_to_set(method_pool['method_pool']) + if self.check_match(method_caller_set, source_set, propagator_set, sink_set): + matches.append(method_pool['id']) + else: + break + index = index + 1 + return matches + + def convert_method_pool_to_set(self, method_pool): + method_callers = json.loads(method_pool) + return self.convert_to_set(method_callers) + + @staticmethod + def convert_to_set(method_callers): + def signature_concat(method_caller): + return f'{method_caller.get("className").replace("/", ".")}.{method_caller.get("methodName")}' + + method_caller_set = set() + for method_caller in method_callers: + if isinstance(method_caller, list): + for node in method_caller: + method_caller_set.add(signature_concat(node)) + elif isinstance(method_caller, dict): + method_caller_set.add(signature_concat(method_caller)) + return method_caller_set + + def check_match(self, method_caller_set, sink_set=None, source_set=None, propagator_set=None): + """ + :param method_caller_set: + :param sink_set: + :param source_set: + :param propagator_set: + :return: + """ + status = True + if sink_set: + result = method_caller_set & sink_set + status = status and result is not None and len(result) > 0 + if source_set: + result = method_caller_set & source_set + status = status and result is not None and len(result) > 0 + if propagator_set: + result = method_caller_set & propagator_set + status = status and result is not None and len(result) > 0 + return status + + def get_result_data(self, method_pool_ids, rule_name, rule_level, source_set, sink_set, propagator_set): + data = list() + + method_pools = MethodPool.objects.filter(id__in=method_pool_ids) + if method_pools.values('id').exists() is False: + return data + + if len(sink_set) == 0: + return MethodPoolListSerialize(rule=rule_name, level=rule_level, instance=method_pools, many=True).data + + engine = VulEngine() + for method_pool in method_pools: + for sink in sink_set: + engine.search( + method_pool=json.loads(method_pool.method_pool), + vul_method_signature=sink + ) + status, links, source, sink = engine.result() + if status is False: + continue + + method_caller_set = self.convert_to_set(links) + if self.check_match(method_caller_set, source_set, propagator_set) is False: + continue + + top_link = links[0] + data.append({ + 'id': method_pool.id, + 'url': method_pool.url, + 'req_params': method_pool.req_params, + 'language': method_pool.agent.language, + 'update_time': method_pool.update_time, + 'rule': rule_name, + 'level': rule_level, + 'agent_name': method_pool.agent.token, + 'top_stack': f"{top_link[0]['className'].replace('/', '.')}.{top_link[0]['methodName']}", + 'bottom_stack': f"{top_link[-1]['className'].replace('/', '.')}.{top_link[-1]['methodName']}", + 'link_count': len(links) + }) + + return data diff --git a/dongtai_web/views/engine_method_pool_sca.py b/dongtai_web/views/engine_method_pool_sca.py new file mode 100644 index 000000000..cf62639ae --- /dev/null +++ b/dongtai_web/views/engine_method_pool_sca.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# project: dongtai-webapi +from dongtai_common.endpoint import AnonymousAndUserEndPoint, R +from dongtai_common.models.agent_method_pool import MethodPool +from dongtai_common.models.asset import Asset + +from dongtai_web.serializers.sca import ScaSerializer +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from dongtai_common.models.sca_maven_db import ScaMavenDb + +_ResponseSerializer = get_response_serializer( + data_serializer=ScaSerializer(many=True), ) + + +class EngineMethodPoolSca(AnonymousAndUserEndPoint): + @extend_schema_with_envcheck( + [{ + 'name': "method_pool_id", + 'type': int, + 'required': True + }], + tags=[_('Method Pool')], + summary=_('Method Pool Component'), + description=_("Get the component information list of the tainted call chain."), + response_schema=_ResponseSerializer, + ) + def get(self, request): + method_pool_id = request.query_params.get('method_pool_id') + + if method_pool_id is None: + return R.failure(msg=_('method_pool_id is empty')) + + method_pool = MethodPool.objects.filter( + id=method_pool_id).values('agent_id').first() + if method_pool is None: + return R.failure(msg=_('method_pool does not exist')) + + agent_id = method_pool['agent_id'] + auth_agents = self.get_auth_and_anonymous_agents(request.user) + if auth_agents is None or auth_agents.filter( + id=agent_id).values('id').exists() is False: + return R.failure(msg=_('method_pool has no permission')) + + project_data = auth_agents.filter(id=agent_id).values( + 'bind_project_id', 'project_version_id').first() + project_id = project_data['bind_project_id'] + project_version_id = project_data['project_version_id'] + + queryset = Asset.objects.filter(agent_id=agent_id) + license_dict = { + i['sha_1']: i['license'] + for i in ScaMavenDb.objects.filter(sha_1__in=queryset.values( + 'signature_value')).values('license', 'sha_1') + } + return R.success( + data=ScaSerializer(queryset.select_related('level', 'agent'), + context={ + 'license_dict': license_dict + }, + many=True).data) diff --git a/dongtai_web/views/engine_method_pool_search.py b/dongtai_web/views/engine_method_pool_search.py new file mode 100644 index 000000000..e9cc779a0 --- /dev/null +++ b/dongtai_web/views/engine_method_pool_search.py @@ -0,0 +1,419 @@ +from functools import reduce + +from django.db.models import Q +from dongtai_common.endpoint import R, AnonymousAndUserEndPoint +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.agent_method_pool import MethodPool +from dongtai_common.models.project import IastProject +from dongtai_common.models.user import User +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.models.hook_type import HookType +from dongtai_common.models.strategy import IastStrategyModel + +from dongtai_web.utils import get_model_field, assemble_query,assemble_query_2 +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from django.utils.translation import gettext_lazy +from django.db.utils import OperationalError +import re +import operator +import time +from rest_framework import serializers +from django.utils.translation import gettext_lazy as _ +from django.utils.translation import gettext_lazy +from dongtai_conf.settings import ELASTICSEARCH_STATE + +class MethodPoolSearchProxySer(serializers.Serializer): + page_size = serializers.IntegerField(min_value=1, + help_text=_("number per page")) + highlight = serializers.IntegerField( + default=1, + help_text= + _("Whether to enable highlighting, the text where the regular expression matches will be highlighted" + )) + exclude_ids = serializers.ListField( + child=serializers.IntegerField(), + help_text= + _("Exclude the method_pool entry with the following id, this field is used to obtain the data of the entire project in batches." + ), + required=False) + time_range = serializers.ListField( + child=serializers.IntegerField( + min_value=1, help_text=_('time format such as 1,1628190947242')), + min_length=2, + max_length=2, + help_text= + _("Time range, the default is the current time to the previous seven days, separated by',', format such as 1,1628190947242" + )) + url = serializers.CharField( + help_text=_("The url of the method pool, search using regular syntax"), + required=False) + res_header = serializers.CharField(help_text=_( + "The response header of the method pood, search using regular syntax"), + required=False) + res_body = serializers.CharField(help_text=_( + "The response body of the calling chain, search using regular syntax"), + required=False) + req_header_fs = serializers.CharField(help_text=_( + "The request header of the calling chain, search using regular syntax" + ), + required=False) + req_data = serializers.CharField(help_text=_( + "The request data of the calling chain, search using regular syntax"), + required=False) + sinkvalues = serializers.CharField(help_text=_( + "The sinkvalues of the calling chain, search using regular syntax"), + required=False) + signature = serializers.CharField(help_text=_( + "The signature of the calling chain, search using regular syntax"), + required=False) + update_time = serializers.CharField(help_text=_( + "The filter field will return the method call chain with the update time after this time, which can be combined with the exclude_ids field to handle paging" + ), + required=False) + search_mode = serializers.IntegerField( + help_text=_("the search_mode , 1-regex match ,2-regex not match "), + default=1, + required=False) + + +class MethodPoolSearchResponseRelationVulnerablitySer(serializers.Serializer): + vulnerablity_type = serializers.CharField() + vulnerablity_hook_type_id = serializers.IntegerField() + vulnerablity_id = serializers.IntegerField() + level_id = serializers.IntegerField() + + +class MethodPoolSearchResponseMethodPoolSer(serializers.Serializer): + id = serializers.IntegerField() + agent_id = serializers.IntegerField() + url = serializers.CharField() + uri = serializers.CharField() + http_method = serializers.CharField() + http_scheme = serializers.CharField() + http_protocol = serializers.CharField() + req_header = serializers.CharField() + req_header_fs = serializers.CharField() + req_params = serializers.CharField() + req_data = serializers.CharField() + res_header = serializers.CharField() + res_body = serializers.CharField() + context_path = serializers.CharField() + method_pool = serializers.CharField() + pool_sign = serializers.CharField() + client_ip = serializers.CharField() + update_time = serializers.IntegerField() + create_time = serializers.IntegerField() + uri_sha1 = serializers.CharField() + uri_highlight = serializers.CharField() + res_header_highlight = serializers.CharField() + res_body_highlight = serializers.CharField() + req_header_fs_highlight = serializers.CharField() + req_data_highlight = serializers.CharField() + + +class MethodPoolSearchResponseRelationSer(serializers.Serializer): + method_pool_id = serializers.IntegerField() + agent_id = serializers.IntegerField() + agent_name = serializers.CharField() + agent_is_running = serializers.IntegerField() + project_name = serializers.CharField() + user_id = serializers.IntegerField() + user_name = serializers.CharField() + vulnerablities = MethodPoolSearchResponseRelationVulnerablitySer(many=True) + + +class MethodPoolSearchResponseAggregationSer(serializers.Serializer): + method_pool_id = serializers.IntegerField() + count = serializers.IntegerField() + + +class MethodPoolSearchResponseAfterkeySer(serializers.Serializer): + update_time = serializers.IntegerField() + + +class MethodPoolSearchResponseSer(serializers.Serializer): + method_pools = MethodPoolSearchResponseMethodPoolSer(many=True) + relations = MethodPoolSearchResponseRelationSer(many=True) + aggregation = MethodPoolSearchResponseAggregationSer(many=True) + afterkeys = MethodPoolSearchResponseAfterkeySer(many=True) + + +_GetResponseSerializer = get_response_serializer(MethodPoolSearchResponseSer()) + + +class MethodPoolSearchProxy(AnonymousAndUserEndPoint): + @extend_schema_with_envcheck( + request=MethodPoolSearchProxySer, + tags=[_('Method Pool')], + summary=_('Method Pool Search'), + description= + _("Search for the method pool information according to the following conditions, the default is regular expression input, regular specifications refer to REGEX POSIX 1003.2" + ), + response_schema=_GetResponseSerializer, + ) + def post(self, request): + page_size = int(request.data.get('page_size', 1)) + page = request.data.get('page_index', 1) + highlight = request.data.get('highlight', 1) + fields = ['url', 'res_body'] + model_fields = [ + 'url', 'res_header', 'res_body', 'req_header_fs', 'req_data' + ] + fields = get_model_field( + MethodPool, + include=model_fields, + ) + #fields.extend(['sinkvalues', 'signature']) + search_after_keys = ['update_time'] + exclude_ids = request.data.get('exclude_ids', None) + time_range = request.data.get('time_range', None) + try: + search_mode = int(request.data.get('search_mode', 1)) + if page_size <= 0: + return R.failure(gettext_lazy("Parameter error")) + [start_time, + end_time] = time_range if time_range is not None and len( + time_range) == 2 else [ + int(time.time()) - 60 * 60 * 24 * 7, + int(time.time()) + # and 0 < time_range[1] - time_range[ + # 0] <= 60 * 60 * 24 * 7 else [ + # int(time.time()) - 60 * 60 * 24 * 7, + # int(time.time()) + ] + ids = exclude_ids if isinstance(exclude_ids, list) and all( + map(lambda x: isinstance(x, int), exclude_ids)) else [] + except BaseException: + return R.failure(gettext_lazy("Parameter error")) + search_fields = dict( + filter(lambda k: k[0] in fields, request.data.items())) + search_fields_ = [] + for k, v in search_fields.items(): + search_fields_.append((k, v)) + search_after_fields = list( + filter( + lambda x: x[0] in search_after_keys, + map( + lambda x: (x[0].replace('search_after_', ''), x[1]), + filter(lambda x: x[0].startswith('search_after_'), + request.data.items())))) + if 'id' in request.data.keys(): + q = q if 'q' in vars() else Q() + q = assemble_query(search_after_fields, 'lte', q, operator.and_) + if search_mode == 1: + q = assemble_query(search_fields_, 'regex', Q(), operator.or_) + elif search_mode == 2: + q = assemble_query_2(search_fields_, 'regex', Q(), + operator.and_) + if 'id' in request.data.keys(): + q = q & Q(pk=request.data['id']) + q = q & Q(agent_id__in=[ + item['id'] for item in list( + self.get_auth_agents_with_user(request.user).values('id')) + ]) + if time_range: + q = (q & (Q(update_time__gte=start_time) + & Q(update_time__lte=end_time))) + q = (q & (~Q(pk__in=ids))) if ids is not None and ids != [] else q + queryset = MethodPool.objects.filter(q).order_by( + '-update_time')[:page_size] + try: + method_pools = list(queryset.values()) + except OperationalError as e: + return R.failure(msg=gettext_lazy( + "The regular expression format is wrong, please use REGEX POSIX 1003.2" + )) + elif ELASTICSEARCH_STATE: + method_pools = search_generate( + search_fields_, time_range, + self.get_auth_users(request.user).values_list('id', flat=True), + search_after_fields, exclude_ids, page_size, search_mode) + method_pools = [i._d_ for i in method_pools] + for method_pool in method_pools: + method_pool['req_header_fs'] = method_pool['req_header_for_search'] + method_pools = list( + MethodPool.objects.filter( + agent_id__in=[i['agent_id'] for i in method_pools], + pool_sign__in=[i['pool_sign'] for i in method_pools + ]).order_by('-update_time').values().all()) + else: + q = q if 'q' in vars() else Q() + q = assemble_query(search_after_fields, 'lte', q, operator.and_) + if search_mode == 1: + q = assemble_query(search_fields_, 'regex', Q(), operator.or_) + elif search_mode == 2: + q = assemble_query_2(search_fields_, 'regex', Q(), + operator.and_) + if 'id' in request.data.keys(): + q = q & Q(pk=request.data['id']) + q = q & Q(agent_id__in=[ + item['id'] for item in list( + self.get_auth_agents_with_user(request.user).values('id')) + ]) + if time_range: + q = (q & (Q(update_time__gte=start_time) + & Q(update_time__lte=end_time))) + q = (q & (~Q(pk__in=ids))) if ids is not None and ids != [] else q + queryset = MethodPool.objects.filter(q).order_by( + '-update_time')[:page_size] + try: + method_pools = list(queryset.values()) + except OperationalError as e: + return R.failure(msg=gettext_lazy( + "The regular expression format is wrong, please use REGEX POSIX 1003.2" + )) + afterkeys = {} + for i in method_pools[-1:]: + afterkeys['update_time'] = i['update_time'] + agents = IastAgent.objects.filter( + pk__in=[i['agent_id'] for i in method_pools]).all().values( + 'bind_project_id', 'token', 'id', 'user_id', 'online') + projects = IastProject.objects.filter( + pk__in=[i['bind_project_id'] + for i in agents]).values('id', 'name', 'user_id') + vulnerablity = IastVulnerabilityModel.objects.filter( + method_pool_id__in=[i['id'] for i in method_pools]).all().values( + 'id', 'hook_type_id','hook_type__name', 'strategy__vul_name','strategy_id','method_pool_id', 'level_id').distinct() + users = User.objects.filter(pk__in=[_['user_id'] + for _ in agents]).values( + 'id', 'username') + vulnerablities = list(vulnerablity) + relations = [] + [agents, projects, users] = _transform([agents, projects, users], 'id') + for method_pool in method_pools: + item = {} + item['method_pool_id'] = method_pool['id'] + agent = agents.get(method_pool['agent_id'], None) + if agent: + item['agent_id'] = agent['id'] + item['agent_name'] = agent['token'] + item['agent_is_running'] = agent['online'] + project = projects.get(agent['bind_project_id'], None) + if project: + item['project_id'] = project['id'] + item['project_name'] = project['name'] + user = users.get(agent['user_id'], None) + if user: + item['user_id'] = user['id'] + item['user_name'] = user['username'] + item['vulnerablities'] = [] + for vulnerablity in list( + filter(lambda _: _['method_pool_id'] == method_pool['id'], + vulnerablities)): + _ = {} + type_ = list( + filter(lambda x: x is not None, [vulnerablity['strategy__vul_name'], vulnerablity['hook_type__name']])) + _['vulnerablity_type'] = type_[0] if type_ else '' + _['vulnerablity_id'] = vulnerablity['id'] + _['vulnerablity_hook_type_id'] = vulnerablity['hook_type_id'] + _['level_id'] = vulnerablity['level_id'] + item['vulnerablities'].append(_) + relations.append(item) + aggregation = {} + aggregation['vulnerablities_count'] = aggregation_count( + relations, 'method_pool_id', 'vulnerablities') + if highlight: + for method_pool in method_pools: + for field in model_fields: + if field in search_fields.keys() and request.data.get( + field, None) and search_mode == 1: + if method_pool[field] is None: + continue + method_pool['_'.join([field, 'highlight' + ])] = highlight_matches( + request.data[field], + method_pool[field], + "{0}") + elif field in fields: + if method_pool[field] is None: + continue + method_pool['_'.join([field, 'highlight' + ])] = method_pool[field].replace( + '<', '<') + else: + if method_pool[field] is None: + continue + method_pool['_'.join([field, 'highlight' + ])] = method_pool[field].replace( + '<', '<') + return R.success( + data={ + 'method_pools': method_pools, + 'relations': relations, + 'aggregation': aggregation, + 'afterkeys': afterkeys + }) + + +def _transform(models: list, reindex_id: str): + return [{_[reindex_id]: _ for _ in model} for model in models] + + +def aggregation_count(list_, primary_key, count_key): + """ + params + list_ : [{},{}] + """ + return list( + map( + lambda x: { + primary_key: x[primary_key], + 'count': len(x[count_key]) + }, list_)) + + + +def highlight_matches(query, text, html): + text = text.replace('<', '<') + + def span_matches(match): + return html.format(match.group(0)) + return re.sub(query, span_matches, text, flags=re.I) + + +def search_generate(search_fields, time_range, user_ids, search_after_fields, filter_ids, + size, search_mode): + from elasticsearch_dsl import Q, Search + from elasticsearch import Elasticsearch + from dongtai_common.models.agent_method_pool import MethodPoolDocument + from copy import deepcopy + start_time, end_time = time_range + must_query = [ + Q('range', update_time={ + 'gte': start_time, + 'lte': end_time + }), + ] + must_not_query = [Q('terms', ids=filter_ids)] + should_query = [] + search_fields = dict(search_fields) + if 'req_header_fs' in search_fields.keys(): + search_fields['req_header_for_search'] = search_fields['req_header_fs'] + del search_fields['req_header_fs'] + search_fields = [(k, v) for k, v in dict(search_fields).items()] + if search_mode == 1: + should_query = [ + Q('match', **(dict([search_field]))) for search_field in search_fields + ] + elif search_mode == 2: + must_not_query.extend([ + Q('match', **(dict([search_field]))) + for search_field in search_fields + ]) + if user_ids: + must_query.append(Q('terms', user_id=list(user_ids))) + if search_after_fields: + must_query.extend( + [Q('range', **{k: { + 'gte': v, + }}) for k, v in search_after_fields]) + a = Q('bool', + must=must_query, + must_not=must_not_query, + should=should_query, + minimum_should_match=1) + from dongtai_conf import settings + return list(MethodPoolDocument.search( + ).query(a).sort('-update_time')[:size].using( + Elasticsearch( + settings.ELASTICSEARCH_DSL['default']['hosts']))) diff --git a/dongtai_web/views/engine_method_pool_time_range.py b/dongtai_web/views/engine_method_pool_time_range.py new file mode 100644 index 000000000..59e8c034c --- /dev/null +++ b/dongtai_web/views/engine_method_pool_time_range.py @@ -0,0 +1,51 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : engine_method_pool_time_range +# @created : 星期四 10月 21, 2021 17:57:16 CST +# +# @description : +###################################################################### + + + +from functools import reduce + +from django.db.models import Q +from dongtai_common.endpoint import R, AnonymousAndUserEndPoint +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.agent_method_pool import MethodPool +from dongtai_common.models.project import IastProject +from dongtai_common.models.user import User +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.models.hook_type import HookType + +from dongtai_web.utils import get_model_field, assemble_query +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from django.utils.translation import gettext_lazy +from django.db.utils import OperationalError +import re +import operator +import time +from rest_framework import serializers +from django.utils.translation import gettext_lazy as _ +from django.utils.translation import gettext_lazy + +_GetResponseSerializer = get_response_serializer( + serializers.IntegerField(help_text=_('the eariest time of method_pool'))) + + +class MethodPoolTimeRangeProxy(AnonymousAndUserEndPoint): + @extend_schema_with_envcheck(tags=[_('Method Pool')], + summary=_('Method Pool Time Range'), + description=_("get method_pool eariest time"), + response_schema=_GetResponseSerializer) + def get(self, request): + q = Q(agent_id__in=[ + item['id'] for item in list( + self.get_auth_agents_with_user(request.user).values('id')) + ]) + mintime = MethodPool.objects.filter(q).values_list( + 'update_time').order_by('-update_time').first() + if mintime is None: + return R.failure() + return R.success(data=mintime) diff --git a/dongtai_web/views/engine_method_pools.py b/dongtai_web/views/engine_method_pools.py new file mode 100644 index 000000000..3294093e6 --- /dev/null +++ b/dongtai_web/views/engine_method_pools.py @@ -0,0 +1,13 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi + +from base.endpoint import SessionAuthProxyView +from dongtai_common.endpoint import UserPermission + + +class MethodPoolProxy(SessionAuthProxyView): + permission_classes = (UserPermission,) + source = 'api/engine/method_pools' diff --git a/dongtai_web/views/filereplace.py b/dongtai_web/views/filereplace.py new file mode 100644 index 000000000..11f5c2a6b --- /dev/null +++ b/dongtai_web/views/filereplace.py @@ -0,0 +1,74 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : filereplace +# @created : Friday Oct 08, 2021 11:42:57 CST +# +# @description : +###################################################################### + + + + +from dongtai_common.endpoint import R, TalentAdminEndPoint +import logging +from django.utils.translation import gettext_lazy as _ +import os +from dongtai_conf.settings import MEDIA_ROOT +from rest_framework.parsers import FileUploadParser +logger = logging.getLogger('dongtai-webapi') + +FILES_ALLOWED_MODIFIY = ("logo.png", "logo_en.png", "favicon.ico") +FILES_PATH = 'assets/img/' +FILES_PATH_BACKUP = 'backup' +FILES_SIZE_LIMIT = { + "logo.png": 1024 * 1024 * 2, + "favicon.ico": 1024 * 128, + "logo_en.png": 1024 * 1024 * 2 +} +FILES_FORMAT = { + "logo.png": '.png', + "favicon.ico": '.ico', + "logo_en.png": '.png', +} + +FILES_CONTENT_TYPE = { + "logo.png": ['image/png'], + "favicon.ico": [ + 'image/vnd.microsoft.icon', 'image/x-icon', 'image/ico', 'image/icon', + 'text/ico', 'application/ico' + ], + "logo_en.png": ['image/png'], +} + + +class FileReplace(TalentAdminEndPoint): + def post(self, request, filename: str): + if filename not in FILES_ALLOWED_MODIFIY: + return R.failure(msg=_( + "this file is disallowed to modifyupload failed,this file is disallowed to modify." + )) + try: + file_size = FILES_SIZE_LIMIT[filename] + file_format = FILES_FORMAT[filename] + file_content_type = FILES_CONTENT_TYPE[filename] + uploadfile = request.data['file'] + if uploadfile.name.endswith( + file_format + ) and uploadfile.size <= file_size and uploadfile.content_type in file_content_type: + pass + else: + return R.failure(msg=_("upload error")) + filepath = os.path.join(MEDIA_ROOT, FILES_PATH, filename) + with open(filepath, 'wb+') as fp: + for chunk in uploadfile.chunks(): + fp.write(chunk) + return R.success(msg=_("upload sussess")) + except Exception as e: + logger.error(e) + with open(filepath, 'wb+') as fp: + backup_filepath = os.path.join(MEDIA_ROOT, FILES_PATH, + FILES_PATH_BACKUP, filename) + with open(backup_filepath, 'rb+') as backup_fp: + write_obj = backup_fp.read() + fp.write(write_obj) + return R.failure(msg=_("upload error, fail back to default")) diff --git a/dongtai_web/views/github_contributors.py b/dongtai_web/views/github_contributors.py new file mode 100644 index 000000000..19c676ba4 --- /dev/null +++ b/dongtai_web/views/github_contributors.py @@ -0,0 +1,40 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : github_contributors +# @created : Thursday Sep 16, 2021 15:34:42 CST +# +# @description : +###################################################################### + +from dongtai_common.endpoint import R, AnonymousAndUserEndPoint +from dongtai_web.github_contributors import get_github_contributors +import threading +import asyncio +from functools import partial +import os + +async def delay(time): + await asyncio.sleep(time) + +async def timer(time, function): + while True: + future = asyncio.ensure_future(delay(time)) + future.add_done_callback(function) + await future + +_update = partial(get_github_contributors, update=True) + + +def corotheard(): + _update() + asyncio.run(timer(60 * 90, _update)) + + +t1 = threading.Thread(target=corotheard, daemon=True) +t1.start() + + +class GithubContributorsView(AnonymousAndUserEndPoint): + def get(self, request): + dic = get_github_contributors() + return R.success(data=dic) diff --git a/dongtai_web/views/health.py b/dongtai_web/views/health.py new file mode 100644 index 000000000..b43416d51 --- /dev/null +++ b/dongtai_web/views/health.py @@ -0,0 +1,64 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : health +# @created : Wednesday Aug 25, 2021 12:12:02 CST +# +# @description : +###################################################################### + + + +from dongtai_common.endpoint import R +from django.utils.translation import gettext_lazy as _ +from dongtai_common.endpoint import UserEndPoint +from dongtai_web.utils import get_openapi, validate_url +import requests +from urllib.parse import urljoin +from rest_framework.authtoken.models import Token +from requests.exceptions import ConnectionError, ConnectTimeout +import json +import logging +from django.utils.translation import get_language +from dongtai_web.utils import checkopenapistatus +logger = logging.getLogger('dongtai-webapi') + +HEALTHPATH = 'api/v1/health' + + +class HealthView(UserEndPoint): + def get(self, request): + openapi = get_openapi() + if openapi is None: + return R.failure(msg=_("Get OpenAPI configuration failed")) + if not validate_url(openapi): + return R.failure(msg=_("OpenAPI service is down, Please check it.")) + + token, success = Token.objects.get_or_create(user=request.user) + openapistatus, openapi_resp = checkopenapistatus( + urljoin(openapi, HEALTHPATH), token.key) + data = {"dongtai_webapi": 1} + if openapistatus: + data.update(openapi_resp) + else: + data.update({ + "dongtai_openapi": { + "status": 0 + }, + "dongtai_engine": { + "status": 0 + }, + "oss": { + "status": 0 + }, + "engine_monitoring_indicators": [], + }) + cur_language = get_language() + for indicator in data['engine_monitoring_indicators']: + cur_language_field = indicator.get( + '_'.join(['name', cur_language]), None) + indicator[ + 'name'] = cur_language_field if cur_language_field else indicator[ + 'name'] + return R.success(data=data) + + diff --git a/dongtai_web/views/log_clear.py b/dongtai_web/views/log_clear.py new file mode 100644 index 000000000..9dac94503 --- /dev/null +++ b/dongtai_web/views/log_clear.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi + +from django.contrib.admin.models import LogEntry +from dongtai_common.endpoint import UserEndPoint, R +from django.utils.translation import gettext_lazy as _ +import datetime + +class LogClear(UserEndPoint): + name = 'api-v1-log-clear' + description = _('Log clear') + + def get(self, request): + user = request.user + now = datetime.datetime.now() + + if user.is_system_admin(): + LogEntry.objects.filter(action_time__lt=now).delete() + elif user.is_talent_admin(): + users = self.get_auth_users(user) + LogEntry.objects.filter(action_time__lt=now,user__in=users).delete() + else: + return R.failure(status=203, msg=_('no permission')) + return R.success() diff --git a/dongtai_web/views/log_delete.py b/dongtai_web/views/log_delete.py new file mode 100644 index 000000000..7d30ff094 --- /dev/null +++ b/dongtai_web/views/log_delete.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi + +from django.contrib.admin.models import LogEntry +from dongtai_common.endpoint import UserEndPoint, R +from django.utils.translation import gettext_lazy as _ + + +class LogDelete(UserEndPoint): + name = 'api-v1-log-delete' + description = _('Log delete') + + def post(self, request): + ids = request.data.get('ids') + if ids: + ids = [int(id.strip()) for id in ids.split(',')] + user = request.user + if user.is_superuser == 1: + LogEntry.objects.filter(id__in=ids).delete() + elif user.is_superuser == 2: + users = self.get_auth_users(user) + user_ids = list(users.values_list('id', flat=True)) + LogEntry.objects.filter(id__in=ids, user_id__in=user_ids).delete() + else: + return R.failure(msg=_('no permission')) + + return R.success(msg=_('success')) + else: + return R.failure(status=203, msg=_('The data to be deleted should not be empty')) diff --git a/dongtai_web/views/log_download.py b/dongtai_web/views/log_download.py new file mode 100644 index 000000000..8acc859a3 --- /dev/null +++ b/dongtai_web/views/log_download.py @@ -0,0 +1,178 @@ +import os +import logging +import zipfile +from dongtai_common.endpoint import UserEndPoint +from io import BytesIO, StringIO +from dongtai_common.models.agent import IastAgent +from enum import Enum +from django.http import FileResponse,JsonResponse +from rest_framework import viewsets +import logging +from result import Ok, Err, Result +from functools import partial +from wsgiref.util import FileWrapper +from dongtai_common.utils.user import get_auth_users__by_id +from django.http import HttpResponseNotFound +from dongtai_common.models.message import IastMessage +import threading +from django.db.models import Q +from django.db import transaction +from dongtai_conf.settings import TMP_COMMON_PATH +from tempfile import NamedTemporaryFile +from dongtai_conf.settings import TMP_COMMON_PATH +from dongtai_common.endpoint import R + +logger = logging.getLogger('dongtai-webapi') + + +class ResultType(Enum): + OK = 1 + ERR = 2 + +def nothing_resp(): + return HttpResponseNotFound("找不到相关日志数据") + + +class AgentLogDownload(UserEndPoint, viewsets.ViewSet): + + def get_single(self, request, pk): + try: + a = int(pk) > 0 + if not a: + return nothing_resp() + except BaseException: + return nothing_resp() + if IastAgent.objects.filter(pk=pk, + user__in=get_auth_users__by_id( + request.user.id)).exists(): + result = get_newest_log_zip(pk) + if isinstance(result, Err): + return nothing_resp() + file_ = result.value + file_.seek(0) + response = FileResponse(FileWrapper(file_)) + response['content_type'] = 'application/octet-stream' + response['Content-Disposition'] = f"attachment; filename={pk}.zip" + return response + return nothing_resp() + + + def batch_task_add(self, request): + mode = request.data.get('mode', 1) + users = self.get_auth_users(self.request.user) + q = Q(user__in=users) + if mode == 1: + ids = request.data.get('ids', []) + q = q & Q(pk__in=ids) + elif mode == 2: + q = q + + def generate_zip_thread(): + generate_agent_log_zip(q, request.user.id) + + t1 = threading.Thread(target=generate_zip_thread, daemon=True) + t1.start() + return R.success() + + def batch_log_download(self, request, pk): + try: + a = int(pk) > 0 + if not a: + return nothing_resp() + return FileResponse(open( + os.path.join(TMP_COMMON_PATH, f'batchagent/{pk}.zip'), 'rb'), + filename='agentlog.zip') + except FileNotFoundError as e: + logger.info(e) + return nothing_resp() + except Exception as e: + logger.info(e) + return nothing_resp() + + + +def generate_path(agent_id): + return os.path.join(TMP_COMMON_PATH, f'agent/{agent_id}/') + + +def get_newest_log_zip(agent_id: int) -> Result: + path = generate_path(agent_id) + res = file_newest_2_file_under_path(path) + if isinstance(res, Err): + return res + res = getzipfilesinmemorty(res.value) + return res + + +def getzipfilesinmemorty(filenames: list) -> Result[int, BytesIO]: + try: + zip_subdir = "logs" + s = BytesIO() + with zipfile.ZipFile(s, "w") as zf: + for fpath in filenames: + fdir, fname = os.path.split(fpath) + zip_path = os.path.join(zip_subdir, fname) + zf.write(fpath, zip_path) + zf.close() + return Ok(s) + except Exception as e: + logger.error(e, exc_info=True) + return Err('unexcept eror') + + +def file_newest_N_file_under_path(path: str, N: int) -> Result[int, str]: + try: + files = [ + f for f in os.listdir(path) + if os.path.isfile(os.path.join(path, f)) + ] + paths = [os.path.join(path, basename) for basename in files] + return Ok(sorted(paths, key=os.path.getctime, reverse=True)[:N]) + except (FileNotFoundError, ValueError) as e: + return Err('file path error') + except Exception as e: + logger.error(e, exc_info=True) + return Err('unexcept error') + + +file_newest_file_under_path = partial(file_newest_N_file_under_path, N=1) +file_newest_2_file_under_path = partial(file_newest_N_file_under_path, N=2) + + +def zip_file_write(msg_id, items): + from zipfile import ZipFile + zipfilepath = os.path.join(TMP_COMMON_PATH, f'batchagent/{msg_id}.zip') + zip_subdir = "logs" + with ZipFile(zipfilepath, 'w') as zipObj: + with NamedTemporaryFile() as tmpfile: + zipObj.write(tmpfile.name) + for i in items: + for k in i: + path1, filename = os.path.split(k) + path2, agent_id = os.path.split(path1) + zipObj.write( + k, os.path.join(zip_subdir, f'/{agent_id}/', filename)) + return zipfilepath + + +def get_zip_together(agents_ids, msg_id): + from zipfile import ZipFile + res = map( + lambda x: x.value, + filter( + lambda x: isinstance(x, Ok), + map(file_newest_2_file_under_path, map(generate_path, + agents_ids)))) + filepath = zip_file_write(msg_id, res) + return filepath + +@transaction.atomic +def generate_agent_log_zip(q, user_id): + agent_ids = IastAgent.objects.filter(q).values_list('id', flat=True) + msg = IastMessage.objects.create(message='AGENT日志导出成功', + message_type_id=2, + relative_url='/api/v1/agent/log/tmp', + to_user_id=user_id) + get_zip_together(agent_ids, msg.id) + msg.relative_url = f'/api/v1/agent/log/batch/{msg.id}' + msg.save() diff --git a/dongtai_web/views/log_export.py b/dongtai_web/views/log_export.py new file mode 100644 index 000000000..81c1b6f2e --- /dev/null +++ b/dongtai_web/views/log_export.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +from django.contrib.admin.models import LogEntry +from django.http import HttpResponse +from django.utils.encoding import escape_uri_path +from import_export import resources +from rest_framework.generics import GenericAPIView +from django.utils.translation import gettext_lazy as _ +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.endpoint import R + + +class LogResurce(resources.ModelResource): + def get_export_headers(self): + return [ + u'时间', u'用户', u'操作记录' + ] + + class Meta: + model = LogEntry + fields = ('user', 'action_time', 'change_message') + + +class ExportMixin(object): + @staticmethod + def attachment_response(export_data, filename='download.xls', content_type='application/vnd.ms-excel'): + """ + - https://segmentfault.com/q/1010000009719860 + - https://blog.csdn.net/qq_34309753/article/details/99628474 + + :param export_data: + :param filename: + :param content_type: + :return: + """ + response = HttpResponse(export_data, content_type=content_type) + response['content_type'] = content_type + response['Content-Disposition'] = "attachment; filename*=utf-8''{}".format(escape_uri_path(filename)) + return response + + def get(self, request): + ids = request.query_params.get('ids') + if ids: + ids = [int(id.strip()) for id in ids.split(',')] + user = request.user + if user.is_system_admin(): + queryset = LogEntry.objects.filter(id__in=ids).filter() + elif user.is_talent_admin(): + auth_users = UserEndPoint.get_auth_users(user) + queryset = LogEntry.objects.filter(id__in=ids, user__in=auth_users).filter() + else: + return R.failure(msg=_('no permission')) + resources = self.resource_class() + export_data = resources.export(queryset, False) + return ExportMixin.attachment_response(getattr(export_data, 'xls'), filename='用户操作日志.xls') + else: + return R.failure(status=202, msg=_('Export failed, error message: Log id should not be empty')) + + +class LogExport(ExportMixin, GenericAPIView): + resource_class = LogResurce diff --git a/dongtai_web/views/logs.py b/dongtai_web/views/logs.py new file mode 100644 index 000000000..4bf0b6f57 --- /dev/null +++ b/dongtai_web/views/logs.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:sjh +# software: PyCharm +# project: webapi +import logging +from django.contrib.admin.models import LogEntry +from dongtai_common.endpoint import UserEndPoint, R +from django.utils.translation import gettext_lazy as _ +from django.core.cache import cache + +logger = logging.getLogger('dongtai-webapi') + + +class LogsEndpoint(UserEndPoint): + name = 'api-v1-logs' + description = _('Log list') + + def make_key(self, request): + self.cache_key = f"{request.user.id}_total_logs_id" + self.cache_key_max_id = f"{request.user.id}_max_logs_id" + + def get_query_cache(self): + total = cache.get(self.cache_key) + max_id = cache.get(self.cache_key_max_id) + return total, max_id + + def set_query_cache(self, queryset): + total = queryset.values('id').count() + if total > 0: + max_id = queryset.values_list('id', flat=True).order_by('-id')[0] + else: + max_id = 0 + cache.set(self.cache_key, total, 60 * 60) + cache.set(self.cache_key_max_id, max_id, 60 * 60) + return total, max_id + + def parse_args(self, request): + page = int(request.query_params.get('page', 1)) + page_size = int(request.query_params.get('pageSize', 20)) + page_size = page_size if page_size < 50 else 50 + return page, page_size, request.user + + def get(self, request): + try: + page, page_size, user = self.parse_args(request) + + if user.is_system_admin(): + queryset = LogEntry.objects.all() + elif user.is_talent_admin(): + users = self.get_auth_users(user) + user_ids = list(users.values_list('id', flat=True)) + queryset = LogEntry.objects.filter(user_id__in=user_ids) + else: + queryset = LogEntry.objects.filter(user=user) + # set cache key + self.make_key(request) + if page == 1: + total, max_id = self.set_query_cache(queryset) + else: + total, max_id = self.get_query_cache() + if not total or not max_id: + total, max_id = self.set_query_cache(queryset) + # only read log_id + cur_data = queryset.filter(id__lte=max_id).values_list('id', flat=True).order_by('-id')[(page -1) *page_size: page *page_size] + cur_ids = [] + for item in cur_data: + cur_ids.append(item) + # read log detail + page_data = LogEntry.objects.filter(id__in=cur_ids).order_by('-id').select_related('content_type', 'user') + if page_data: + data = [] + for item in page_data: + data.append({ + "log_id": item.id, + "user_id": item.user.id, + "username": item.user.username, + "action_time": item.action_time.strftime('%Y-%m-%d %H:%M:%S'), + "content_type": item.content_type.app_labeled_name, + "object_id": item.object_id, + "object_repr": item.object_repr, + "action_flag": item.action_flag, + "change_message": item.change_message, + }) + return R.success(data=data, total=total) + else: + return R.failure(msg=_('No permission to access'), status=203) + except Exception as e: + logger.error(e,exc_info=True) + return R.success(data=list(), msg=_('failure')) diff --git a/dongtai_web/views/messages_del.py b/dongtai_web/views/messages_del.py new file mode 100644 index 000000000..0d682282f --- /dev/null +++ b/dongtai_web/views/messages_del.py @@ -0,0 +1,55 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : messages_del +# @created : 星期三 10月 13, 2021 15:47:31 CST +# +# @description : +###################################################################### + + + +from dongtai_common.utils import const +from dongtai_common.models.message import IastMessage +from dongtai_common.endpoint import R +from dongtai_common.utils import const +from dongtai_common.endpoint import UserEndPoint +from django.forms.models import model_to_dict +from django.db.models import Q +from rest_framework import serializers +from rest_framework.serializers import ValidationError +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from django.utils.translation import gettext_lazy as _ + + + +class _MessagesDelArgsSerializer(serializers.Serializer): + id = serializers.IntegerField(required=False, + default=None, + help_text=_('The id of Message')) + all = serializers.NullBooleanField( + required=False, + default=False, + help_text=_('delete all messages when all is True')) + + +class MessagesDelEndpoint(UserEndPoint): + @extend_schema_with_envcheck( + request=_MessagesDelArgsSerializer, + summary=_('Messages Delete'), + description=_("Used by the user to delete the corresponding message"), + tags=[_('Messages')]) + def post(self, request): + ser = _MessagesDelArgsSerializer(data=request.data) + try: + if ser.is_valid(True): + id_ = ser.validated_data['id'] + all_ = ser.validated_data['all'] + except ValidationError as e: + return R.failure(data=e.detail) + if all_ is True: + IastMessage.objects.filter( + to_user_id=request.user.id).all().delete() + else: + IastMessage.objects.filter(to_user_id=request.user.id, + pk=id_).delete() + return R.success(msg='success') diff --git a/dongtai_web/views/messages_list.py b/dongtai_web/views/messages_list.py new file mode 100644 index 000000000..421b2bed3 --- /dev/null +++ b/dongtai_web/views/messages_list.py @@ -0,0 +1,76 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : messages_list +# @created : 星期三 10月 13, 2021 14:34:14 CST +# +# @description : +###################################################################### + +from dongtai_common.utils import const +from dongtai_common.models.message import IastMessage +from dongtai_common.endpoint import R +from dongtai_common.utils import const +from dongtai_common.endpoint import UserEndPoint +from django.forms.models import model_to_dict +from django.db.models import Q +from rest_framework import serializers +from rest_framework.serializers import ValidationError +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from django.utils.translation import gettext_lazy as _ + + +class _MessagesArgsSerializer(serializers.Serializer): + page_size = serializers.IntegerField(default=20, + help_text=_('Number per page')) + page = serializers.IntegerField(default=1, help_text=_('Page index')) + + +class MessageSerializer(serializers.ModelSerializer): + class Meta: + model = IastMessage + fields = [ + 'id', 'message', 'relative_url', 'create_time', 'read_time', + 'is_read', 'message_type' + ] + + +class PageSerializer(serializers.Serializer): + alltotal = serializers.IntegerField(help_text=_('total_number')) + num_pages = serializers.IntegerField(help_text=_('the number of pages')) + page_size = serializers.IntegerField(help_text=_('Number per page')) + + +class ResponseDataSerializer(serializers.Serializer): + messages = MessageSerializer(many=True) + + +_SuccessSerializer = get_response_serializer(ResponseDataSerializer()) + + +class MessagesEndpoint(UserEndPoint): + @extend_schema_with_envcheck( + [_MessagesArgsSerializer], + response_schema=_SuccessSerializer, + summary=_('Get Messages List'), + description=_( + "Used to get the message list corresponding to the user"), + tags=[_('Messages')]) + def get(self, request): + ser = _MessagesArgsSerializer(data=request.GET) + try: + if ser.is_valid(True): + page_size = ser.validated_data['page_size'] + page = ser.validated_data['page'] + except ValidationError as e: + return R.failure(data=e.detail) + queryset = IastMessage.objects.filter( + to_user_id=request.user.id).order_by('-create_time').all() + page_summary, messages = self.get_paginator(queryset, page, page_size) + messages_data = MessageSerializer(messages, many=True).data + for message in messages: + message.is_read = 1 + message.save(update_fields=['is_read']) + return R.success(data={ + 'messages': messages_data, + 'page': page_summary + }, ) diff --git a/dongtai_web/views/messages_new.py b/dongtai_web/views/messages_new.py new file mode 100644 index 000000000..445ddf2d0 --- /dev/null +++ b/dongtai_web/views/messages_new.py @@ -0,0 +1,43 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : messages_new +# @created : 星期三 10月 13, 2021 15:30:46 CST +# +# @description : +###################################################################### + + + +from dongtai_common.utils import const +from dongtai_common.models.message import IastMessage +from dongtai_common.endpoint import R +from dongtai_common.utils import const +from dongtai_common.endpoint import UserEndPoint +from django.forms.models import model_to_dict +from django.db.models import Q +from rest_framework import serializers +from rest_framework.serializers import ValidationError +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from django.utils.translation import gettext_lazy as _ + + +class ResponseDataSerializer(serializers.Serializer): + new_message_count = serializers.IntegerField( + help_text=_('total number of new messages')) + + +_SuccessSerializer = get_response_serializer(ResponseDataSerializer()) + + +class MessagesNewEndpoint(UserEndPoint): + @extend_schema_with_envcheck( + response_schema=_SuccessSerializer, + summary=_('Messages Count'), + description=_( + "Used to get the number of messages corresponding to the user"), + tags=[_('Messages')]) + def get(self, request): + return R.success(data={ + 'new_message_count': + IastMessage.objects.filter(to_user_id=request.user.id, ).count() + }, ) diff --git a/dongtai_web/views/messages_send.py b/dongtai_web/views/messages_send.py new file mode 100644 index 000000000..4d24c789f --- /dev/null +++ b/dongtai_web/views/messages_send.py @@ -0,0 +1,50 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : messages_send +# @created : 星期四 10月 14, 2021 16:11:22 CST +# +# @description : +###################################################################### + + + +from dongtai_common.utils import const +from dongtai_common.models.message import IastMessage +from dongtai_common.endpoint import R +from dongtai_common.utils import const +from dongtai_common.endpoint import TalentAdminEndPoint +from django.forms.models import model_to_dict +from django.db.models import Q +from rest_framework import serializers +from rest_framework.serializers import ValidationError +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from django.utils.translation import gettext_lazy as _ + + + + +class MessageSerializer(serializers.ModelSerializer): + class Meta: + model = IastMessage + fields = [ + 'id', 'message', 'relative_url', 'create_time', 'read_time', + 'is_read', 'message_type_id', 'to_user_id' + ] + + +class MessagesSendEndpoint(TalentAdminEndPoint): + @extend_schema_with_envcheck( + request=MessageSerializer, + summary=_('Send Message'), + description=_( + "Used to get the message list corresponding to the user"), + tags=[_('Messages')]) + def post(self, request): + ser = MessageSerializer(data=request.data) + try: + if ser.is_valid(True): + pass + except ValidationError as e: + return R.failure(data=e.detail) + IastMessage.create(**ser.validated_data) + return R.success() diff --git a/dongtai_web/views/method_graph.py b/dongtai_web/views/method_graph.py new file mode 100644 index 000000000..1dc39c69b --- /dev/null +++ b/dongtai_web/views/method_graph.py @@ -0,0 +1,181 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# project: dongtai-webapi + +import json +import logging + +from dongtai_common.endpoint import R, AnonymousAndUserEndPoint +from dongtai_common.engine.vul_engine import VulEngine +from dongtai_common.engine.vul_engine_v2 import VulEngineV2 +from dongtai_common.models.agent_method_pool import MethodPool +from dongtai_common.models.replay_method_pool import IastAgentMethodPoolReplay +from dongtai_common.utils import const +from dongtai_common.utils.validate import Validate +from django.utils.translation import gettext_lazy as _ + +logger = logging.getLogger('dongtai-webapi') + + +class MethodGraph(AnonymousAndUserEndPoint): + def get(self, request): + try: + method_pool_id = int(request.query_params.get('method_pool_id')) + method_pool_type = request.query_params.get('method_pool_type') + replay_id = request.query_params.get('method_pool_replay_id', None) + replay_type = request.query_params.get('replay_type', None) + if replay_type is not None and int(replay_type) not in [ + const.API_REPLAY, const.REQUEST_REPLAY + ]: + return R.failure(msg="replay_type error") + replay_type = const.REQUEST_REPLAY if replay_type is None else int( + replay_type) + if Validate.is_empty(method_pool_id) and replay_id is None: + return R.failure(msg=_('Method pool ID is empty')) + + auth_agents = self.get_auth_and_anonymous_agents(request.user).values('id') + auth_agent_ids = auth_agents.values_list('id', flat=True) + + cur_ids = [] + for item in auth_agent_ids: + cur_ids.append(int(item)) + + if method_pool_type == 'normal' and MethodPool.objects.filter( + agent_id__in=cur_ids, id=method_pool_id).exists(): + method_pool = MethodPool.objects.filter( + id=method_pool_id).first() + elif method_pool_type == 'replay' and replay_id: + method_pool = IastAgentMethodPoolReplay.objects.filter( + id=replay_id, replay_type=replay_type).first() + elif method_pool_type == 'replay' and MethodPool.objects.filter( + agent_id__in=cur_ids, + id=method_pool_id).exists(): + method_pool = IastAgentMethodPoolReplay.objects.filter( + relation_id=method_pool_id, + replay_type=replay_type).first() + else: + return R.failure(msg=_('Stain call map type does not exist')) + + if method_pool is None: + return R.failure(msg=_('Data does not exist or no permission to access')) + + data, link_count, method_count = self.search_all_links(method_pool.method_pool) + return R.success(data=data) + + except Exception as e: + logger.error(e,exc_info=True) + return R.failure(msg=_('Page and PageSize can only be numeric')) + + def get_method_pool(self, user, method_pool_id): + """ + :param user: + :param method_pool_id: + :return: + """ + return MethodPool.objects.filter( + agent__in=self.get_auth_and_anonymous_agents(user), + id=method_pool_id + ).first() + + def search_all_links(self, method_pool): + engine = VulEngineV2() + engine.prepare(method_pool=json.loads(method_pool), vul_method_signature='') + engine.search_all_link() + return engine.get_taint_links() + + def search_taint_link(self, method_pool, sources, sinks, propagators): + """ + :param method_pool: + :param sources: + :param sinks: + :param propagators: + :return: + """ + engine = VulEngine() + links = list() + if sinks: + for sink in sinks: + engine.search( + method_pool=json.loads(method_pool.method_pool), + vul_method_signature=sink + ) + status, stack, source, sink = engine.result() + if status is False: + continue + + method_caller_set = MethodGraph.convert_to_set(stack) + if self.check_match( + method_caller_set=method_caller_set, + source_set=sources, + propagator_set=propagators, + sink_set=sinks + ) is False: + continue + + links.append(stack) + else: + method_caller_set = self.convert_method_pool_to_set(method_pool.method_pool) + if self.check_match(method_caller_set, source_set=sources, propagator_set=propagators): + links.append([json.loads(method_pool.method_pool)]) + return links + + def add_taint_links_to_all_links(self, taint_links, all_links): + if taint_links: + for links in taint_links: + for link in links: + left = None + edges = list() + for node in link: + if node['source']: + left = node['invokeId'] + elif left is not None: + right = node['invokeId'] + edges.append({ + 'source': str(left), + 'target': str(right) + }) + left = right + for edge in edges: + for _edge in all_links['edges']: + if 'selected' not in _edge and _edge['source'] == edge['source'] and _edge['target'] == \ + edge['target']: + _edge['selected'] = True + + def convert_method_pool_to_set(self, method_pool): + method_callers = json.loads(method_pool) + return MethodGraph.convert_to_set(method_callers) + + def check_match(self, method_caller_set, sink_set=None, source_set=None, propagator_set=None): + """ + :param method_caller_set: + :param sink_set: + :param source_set: + :param propagator_set: + :return: + """ + status = True + if sink_set: + result = method_caller_set & sink_set + status = status and result is not None and len(result) > 0 + if source_set: + result = method_caller_set & source_set + status = status and result is not None and len(result) > 0 + if propagator_set: + result = method_caller_set & propagator_set + status = status and result is not None and len(result) > 0 + return status + + @staticmethod + def convert_to_set(method_callers): + def signature_concat(method_caller): + return f'{method_caller.get("className").replace("/", ".")}.{method_caller.get("methodName")}' + + method_caller_set = set() + for method_caller in method_callers: + if isinstance(method_caller, list): + for node in method_caller: + method_caller_set.add(signature_concat(node)) + elif isinstance(method_caller, dict): + method_caller_set.add(signature_concat(method_caller)) + return method_caller_set diff --git a/dongtai_web/views/openapi.py b/dongtai_web/views/openapi.py new file mode 100644 index 000000000..700331508 --- /dev/null +++ b/dongtai_web/views/openapi.py @@ -0,0 +1,77 @@ +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from dongtai_conf.settings import config +from dongtai_common.models.profile import IastProfile +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from urllib.parse import urlparse +from rest_framework import serializers +from dongtai_web.utils import get_openapi + +_PostResponseSerializer = get_response_serializer(status_msg_keypair=( + ((201, _('Created success')), ''), + ((202, _('Current users have no permission to modify')), ''), + ((202, _('Parameter error')), ''), +)) + + +class OpenApiEndpointSerializer(serializers.Serializer): + value = serializers.CharField(help_text='The openapi url') + + +class OpenApiEndpointGetResponseSerializer(serializers.Serializer): + url = serializers.CharField(help_text='The openapi url') + + +_GetResponseSerializer = get_response_serializer( + data_serializer=OpenApiEndpointGetResponseSerializer(), + status_msg_keypair=( + ((201, _('success')), ''), + ((202, _('Get OpenAPI configuration failed')), ''), + )) + + +class OpenApiEndpoint(UserEndPoint): + @extend_schema_with_envcheck( + tags=[_('Profile')], + summary=_('Profile DongTai-OpenApi Retrieve'), + description=_("Get the uri of DongTai-OpenApi"), + response_schema=_GetResponseSerializer, + ) + def get(self, request): + profilefromdb = IastProfile.objects.filter( + key='apiserver').values_list('value', flat=True).first() + profilefromini = None + profiles = list( + filter(lambda x: x is not None, [profilefromini, profilefromdb])) + if profiles == [] or not profiles[0]: + return R.failure(msg=_("Get OpenAPI configuration failed")) + return R.success(data={'url': get_openapi()}) + + @extend_schema_with_envcheck( + request=OpenApiEndpointSerializer, + tags=[_('Profile')], + summary=_('Profile DongTai-OpenApi Modify'), + description= + _("To set the url address of DongTai-OpenApi, administrator rights are required" + ), + response_schema=_PostResponseSerializer, + ) + def post(self, request): + if not request.user.is_talent_admin(): + return R.failure( + msg=_("Current users have no permission to modify")) + value = request.data.get('value', '') + parse_re = urlparse(value) + if parse_re.scheme not in ('http', 'https') or parse_re.hostname in ( + '127.0.0.1', 'localhost'): + return R.failure(msg=_("Parameter error")) + profilefromdb = IastProfile.objects.filter(key='apiserver').first() + if profilefromdb: + profilefromdb.value = value + profilefromdb.save() + return R.success(msg=_("Created success")) + profilefromdb = IastProfile.objects.create(key='apiserver', + value=value) + profilefromdb.save() + return R.success(msg=_("Created success")) diff --git a/dongtai_web/views/oss_health.py b/dongtai_web/views/oss_health.py new file mode 100644 index 000000000..a37733389 --- /dev/null +++ b/dongtai_web/views/oss_health.py @@ -0,0 +1,33 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : oss_health +# @created : Thursday Aug 26, 2021 10:51:06 CST +# +# @description : +###################################################################### +from dongtai_web.utils import get_openapi, validate_url +from urllib.parse import urljoin +from rest_framework.authtoken.models import Token +from dongtai_web.utils import checkopenapistatus +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.endpoint import R +from django.utils.translation import gettext_lazy as _ + +OSSHEALTHPATH = 'api/v1/oss/health' + + +class OssHealthView(UserEndPoint): + def get(self, request): + openapi = get_openapi() + if openapi is None: + return R.failure(msg=_("Get OpenAPI configuration failed")) + if not validate_url(openapi): + return R.failure(msg=_("OpenAPI configuration error")) + + token, success = Token.objects.get_or_create(user=request.user) + openapistatus, openapi_resp = checkopenapistatus( + urljoin(openapi, OSSHEALTHPATH), token.key) + if openapistatus: + return R.success(data=openapi_resp) + else: + return R.success(data={"oss": {"status": 0}}) diff --git a/dongtai_web/views/profile.py b/dongtai_web/views/profile.py new file mode 100644 index 000000000..dc88c57a0 --- /dev/null +++ b/dongtai_web/views/profile.py @@ -0,0 +1,116 @@ +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from dongtai_conf.settings import config +from dongtai_common.models.profile import IastProfile +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from django.utils.translation import gettext_lazy as _ +from rest_framework import serializers +from rest_framework.serializers import ValidationError +from django.forms.models import model_to_dict + +class ProfilepostArgsSer(serializers.Serializer): + value = serializers.CharField(help_text=_('profile value')) + +class ProfileEndpoint(UserEndPoint): + @extend_schema_with_envcheck(summary=_('Get Profile'), + description=_("Get Profile with key"), + tags=[_('Profile')]) + def get(self, request, key): + profile = IastProfile.objects.filter(key=key).values_list( + 'value', flat=True).first() + if profile is None: + return R.failure( + msg=_("Failed to get {} configuration").format(key)) + return R.success(data={key: profile}) + + @extend_schema_with_envcheck(summary=_('Profile modify'), + request=ProfilepostArgsSer, + description=_("Modifiy Profile with key"), + tags=[_('Profile')]) + def post(self, request, key): + if not request.user.is_talent_admin(): + return R.failure( + msg=_("Current users have no permission to modify")) + ser = ProfilepostArgsSer(data=request.data) + try: + if ser.is_valid(True): + value = ser.validated_data['value'] + except ValidationError as e: + return R.failure(data=e.detail) + try: + obj, created = IastProfile.objects.update_or_create( + { + 'key': key, + 'value': value + }, key=key) + except Exception as e: + print(e) + return R.failure(msg=_("Update {} failed").format(key)) + return R.success(data={key: obj.value}) + +class ProfileBatchGetArgsSer(serializers.Serializer): + keys = serializers.ListField(help_text=_('profile key')) + + +class ProfileBatchGetResSer(serializers.Serializer): + id = serializers.IntegerField(help_text=_('profile id')) + key = serializers.CharField(help_text=_('profile key')) + value = serializers.CharField(help_text=_('profile value')) + + +class ProfileBatchGetEndpoint(UserEndPoint): + @extend_schema_with_envcheck( + summary=_('GetProfileBatch'), + request=ProfileBatchGetArgsSer, + description=_("Get Profile with key batch"), + response_schema=ProfileBatchGetResSer(many=True), + tags=[_('Profile')]) + def post(self, request): + keys = request.data.get('keys', None) + profiles = IastProfile.objects.filter(key__in=keys).all() + if profiles is None: + return R.failure( + msg=_("Failed to get configuration")) + return R.success(data=[model_to_dict(profile) for profile in profiles]) + + +class ProfileBatchPostArgsSer(serializers.Serializer): + value = serializers.CharField(help_text=_('profile value')) + key = serializers.CharField(help_text=_('profile key')) + + +class ProfileBatchModifiedEndpoint(UserEndPoint): + @extend_schema_with_envcheck(summary=_('Profile modify'), + request=ProfileBatchPostArgsSer(many=True), + description=_("Modifiy Profile with key"), + response_schema=ProfileBatchPostArgsSer, + tags=[_('Profile')]) + def post(self, request): + if not request.user.is_talent_admin(): + return R.failure( + msg=_("Current users have no permission to modify")) + ser = ProfileBatchPostArgsSer(data=request.data, many=True) + try: + if ser.is_valid(True): + data = ser.validated_data + except ValidationError as e: + return R.failure(data=e.detail) + try: + for i in data: + obj, created = IastProfile.objects.update_or_create( + i, key=i['key']) + except Exception as e: + print(e) + return R.failure(msg=_("Update configuration failed")) + return R.success(data=data) + + +def get_model_field(model, exclude=[], include=[]): + fields = [field.name for field in model._meta.fields] + if include: + return [ + include for field in list(set(fields) - set(exclude)) + if field in include + ] + return list(set(fields) - set(exclude)) diff --git a/dongtai_web/views/program_language.py b/dongtai_web/views/program_language.py new file mode 100644 index 000000000..a5d50798b --- /dev/null +++ b/dongtai_web/views/program_language.py @@ -0,0 +1,41 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : program_language +# @created : Tuesday Sep 28, 2021 17:35:55 CST +# +# @description : +###################################################################### + + + +from dongtai_common.endpoint import R, AnonymousAndUserEndPoint +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from rest_framework import serializers +from dongtai_common.models.program_language import IastProgramLanguage +from django.utils.translation import gettext_lazy as _ + + +class IastProgramLanguageSerializers(serializers.ModelSerializer): + id = serializers.IntegerField(help_text=_('The id of program language')) + name = serializers.CharField(help_text=_('The name of program language')) + + class Meta: + model = IastProgramLanguage + fields = ['id', 'name'] + + +_ResponseSerializer = get_response_serializer( + data_serializer=IastProgramLanguageSerializers(many=True), ) + + +class ProgrammingLanguageList(AnonymousAndUserEndPoint): + @extend_schema_with_envcheck( + tags=[_('Program Language')], + summary=_('Program Language List'), + description=_("Get a list of program language."), + response_schema=_ResponseSerializer, + ) + def get(self, request): + queryset = IastProgramLanguage.objects.all() + return R.success( + data=IastProgramLanguageSerializers(queryset, many=True).data) diff --git a/dongtai_web/views/project_add.py b/dongtai_web/views/project_add.py new file mode 100644 index 000000000..4339cf717 --- /dev/null +++ b/dongtai_web/views/project_add.py @@ -0,0 +1,246 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +import logging +import time + +from django.db.models import Q + +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.project_version import IastProjectVersion +from dongtai_common.models.project import (IastProject, VulValidation) +from dongtai_common.models.strategy_user import IastStrategyUser +from dongtai_web.base.project_version import version_modify, ProjectsVersionDataSerializer +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from rest_framework import serializers +from django.db import transaction +from urllib.parse import urlparse, urlunparse +import ipaddress +import requests +from dongtai_common.models.server import IastServer +logger = logging.getLogger("django") + + +class _ProjectsAddBodyArgsSerializer(serializers.Serializer): + name = serializers.CharField(help_text=_('The name of project')) + agent_ids = serializers.CharField(help_text=_( + 'The id corresponding to the agent, use, for segmentation.')) + scan_id = serializers.IntegerField( + help_text=_("The id corresponding to the scanning strategy.")) + version_name = serializers.CharField( + help_text=_("The version name of the project")) + pid = serializers.IntegerField(help_text=_("The id of the project")) + description = serializers.CharField( + help_text=_("Description of the project")) + vul_validation = serializers.IntegerField( + help_text="vul validation switch") + base_url = serializers.CharField() + test_req_header_key = serializers.CharField() + test_req_header_value = serializers.CharField() + + + +_ResponseSerializer = get_response_serializer(status_msg_keypair=( + ((202, _('Parameter error')), ''), + ((201, _('Created success')), ''), + ((202, _('Agent has been bound by other application')), ''), + ((203, _('Failed to create, the application name already exists')), ''), +)) + + +class ProjectAdd(UserEndPoint): + name = "api-v1-project-add" + description = _("New application") + + @extend_schema_with_envcheck( + request=_ProjectsAddBodyArgsSerializer, + tags=[_('Project')], + summary=_('Projects Add'), + description=_( + """Create a new project according to the given conditions; + when specifying the project id, update the item corresponding to the id according to the given condition.""" + ), + response_schema=_ResponseSerializer, + ) + def post(self, request): + try: + with transaction.atomic(): + name = request.data.get("name") + mode = "插桩模式" + scan_id = int(request.data.get("scan_id",0)) + auth_users = self.get_auth_users(request.user) + if scan_id == 5: + scan = IastStrategyUser.objects.filter(id=scan_id).first() + else: + scan = IastStrategyUser.objects.filter(id=scan_id, user__in=auth_users).first() + agent_ids = request.data.get("agent_ids", None) + base_url = request.data.get('base_url', None) + test_req_header_key = request.data.get('test_req_header_key',None) + test_req_header_value = request.data.get('test_req_header_value', None) + description = request.data.get('description', None) + pid = request.data.get("pid", 0) + accessable_ips = [] + if pid and base_url: + ips = filter(lambda x: ip_validate(x), [ + i[0] for i in IastServer.objects.filter( + pid=pid).values_list('ip').distinct().all() + ]) + accessable_ips = _accessable_ips(base_url, ips) + if accessable_ips: + parsed_url = urlparse(base_url) + if parsed_url.netloc not in parsed_url: + return R.failure(status=202, msg=_('base_url validate failed')) + if base_url and not url_validate(base_url): + return R.failure(status=202, msg=_('base_url validate failed')) + if agent_ids: + try: + agents = [int(i) for i in agent_ids.split(',')] + except Exception as e: + print(e) + return R.failure(status=202, msg=_('Agent parse error')) + else: + agents = [] + if not scan_id or not name or not mode: + logger.error('require base scan_id and name') + return R.failure(status=202, msg=_('Required scan strategy and name')) + + version_name = request.data.get("version_name", "") + if not version_name: + version_name = "V1.0" + vul_validation = request.data.get("vul_validation", None) + + if pid: + project = IastProject.objects.filter(id=pid, user__in=auth_users).first() + project.name = name + else: + + project = IastProject.objects.filter(name=name, user=request.user).first() + if not project: + project = IastProject.objects.create(name=name, user=request.user) + else: + return R.failure(status=203, msg=_('Failed to create, the application name already exists')) + versionInfo = IastProjectVersion.objects.filter( + project_id=project.id, + current_version=1, + status=1).first() + if versionInfo: + project_version_id = versionInfo.id + else: + project_version_id = 0 + current_project_version = { + "project_id": project.id, + "version_id": project_version_id, + "version_name": version_name, + "description": request.data.get("description", ""), + "current_version": 1 + } + if not versionInfo or not ( + versionInfo.version_name == version_name + and (versionInfo.description == description or not description)): + result = version_modify(project.user,auth_users, + current_project_version) + if result.get("status", "202") == "202": + logger.error('version update failure') + return R.failure(status=202, + msg=result.get('msg', + _("Version Update Error"))) + else: + project_version_id = result.get("data", {}).get("version_id", 0) + + if agents: + haveBind = IastAgent.objects.filter( + ~Q(bind_project_id=project.id), + id__in=agents, + bind_project_id__gt=0, + user__in=auth_users).exists() + if haveBind: + return R.failure(status=202, msg=_('Agent has been bound by other application')) + + project.scan = scan + project.mode = mode + project.agent_count = len(agents) + # project.user = request.user + project.latest_time = int(time.time()) + if vul_validation is not None: + project.vul_validation = vul_validation + if agents: + project.agent_count = IastAgent.objects.filter( + Q(id__in=agents) | Q(project_name=name), + user__in=auth_users, + ).update(bind_project_id=project.id, project_version_id=project_version_id) + else: + project.agent_count = IastAgent.objects.filter( + project_name=name, user=request.user).update( + bind_project_id=-1, + project_version_id=project_version_id) + + if base_url: + project.base_url = replace_ending(base_url, '/', '') + if test_req_header_key: + project.test_req_header_key = test_req_header_key + if test_req_header_value: + project.test_req_header_value = test_req_header_value + project.save(update_fields=[ + 'name', 'scan_id', 'mode', 'agent_count', + 'latest_time', 'vul_validation', 'base_url', + 'test_req_header_key', 'test_req_header_value' + ]) + return R.success(msg='操作成功') + except Exception as e: + logger.error(e) + return R.failure(status=202, msg=_('Parameter error')) + + +def _accessable_ips(url, ips): + parse_re = urlparse(url) + return list( + filter( + lambda x: url_accessable(urlunparse(parse_re._replace(netloc=x))), + ips)) + + +def url_accessable(url): + try: + requests.get(url, timeout=2) + except Exception as e: + return False + return True + + +def url_validate(url): + parse_re = urlparse(url) + if parse_re.scheme not in ('http', + 'https') or parse_re.hostname in ('127.0.0.1', + 'localhost'): + return False + return ip_validate(parse_re.hostname) if is_ip(parse_re.hostname) else True + + + +def ip_validate(ip): + try: + ipadrs = ipaddress.IPv4Address(ip) + if int(ipaddress.IPv4Address('127.0.0.1')) < int(ipadrs) < int( + ipaddress.IPv4Address('127.255.255.255')): + logger.error('127.x.x.x address not allowed') + return False + if int(ipaddress.IPv4Address('10.0.0.1')) < int(ipadrs) < int( + ipaddress.IPv4Address('10.255.255.255')): + logger.error('10.x.x.x address not allowed') + return False + except (ipaddress.AddressValueError) as e: + pass + return True + +def is_ip(address): + return not address.split('.')[-1].isalpha() + +def replace_ending(sentence, old, new): + if sentence.endswith(old): + return sentence[:-len(old)] + new + return sentence diff --git a/dongtai_web/views/project_delete.py b/dongtai_web/views/project_delete.py new file mode 100644 index 000000000..15ddf47ef --- /dev/null +++ b/dongtai_web/views/project_delete.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi + +import logging +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.project import IastProject +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from rest_framework import serializers + +class _ProjectsDelBodyArgsSerializer(serializers.Serializer): + id = serializers.IntegerField(help_text=_("The id of the project")) + + +logger = logging.getLogger("django") +_ResponseSerializer = get_response_serializer(status_msg_keypair=( + ((201, _('Application has been deleted successfully')), ''), + ((202, _('Failed to delete the project.')), ''), +)) + + +class ProjectDel(UserEndPoint): + name = "api-v1-project-del" + description = _("Delete application") + + @extend_schema_with_envcheck( + request=_ProjectsDelBodyArgsSerializer, + tags=[_('Project')], + summary=_('Projects Delete'), + description=_("Delete the agent by specifying the id."), + response_schema=_ResponseSerializer, + ) + def post(self, request): + try: + project_id = request.data.get('id', None) + if project_id: + auth_users = self.get_auth_users(request.user) + IastAgent.objects.filter( + bind_project_id=project_id, + user__in=auth_users).update(bind_project_id=-1) + IastProject.objects.filter(id=project_id, + user__in=auth_users).delete() + + return R.success(msg=_('Application has been deleted successfully')) + except Exception as e: + logger.error(e) + return R.failure(msg=_('Failed to delete the project.')) diff --git a/dongtai_web/views/project_detail.py b/dongtai_web/views/project_detail.py new file mode 100644 index 000000000..46d159013 --- /dev/null +++ b/dongtai_web/views/project_detail.py @@ -0,0 +1,84 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi + +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.project import (IastProject, VulValidation) +from dongtai_common.utils.systemsettings import get_vul_validate +from dongtai_common.utils import const +from django.utils.translation import gettext_lazy as _ + +from dongtai_web.base.project_version import get_project_version, ProjectsVersionDataSerializer +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from rest_framework import serializers + + +class ProjectsResponseDataSerializer(serializers.Serializer): + name = serializers.CharField(help_text=_('The name of project')) + agent_ids = serializers.CharField(help_text=_( + 'The id corresponding to the agent, use, for segmentation.')) + mode = serializers.ChoiceField(['插桩模式'], + help_text=_('The mode of project')) + scan_id = serializers.IntegerField( + help_text=_("The id corresponding to the scanning strategy.")) + versionData = ProjectsVersionDataSerializer( + help_text=_('Version information about the project')) + id = serializers.IntegerField(help_text=_("The id of the project")) + vul_validation = serializers.IntegerField(help_text="vul validation switch") + + +_ResponseSerializer = get_response_serializer( + ProjectsResponseDataSerializer(help_text=''), + status_msg_keypair=( + ((201, _('success')), ''), + ((203, _('no permission')), ''), + )) + + +class ProjectDetail(UserEndPoint): + name = "api-v1-project-" + description = _("View item details") + + @extend_schema_with_envcheck( + tags=[_('Project')], + summary=_('Projects Detail'), + description= + _("Get project information by project id, including the current version information of the project." + ), + response_schema=_ResponseSerializer, + ) + def get(self, request, id): + auth_users = self.get_auth_users(request.user) + project = IastProject.objects.filter(user__in=auth_users, id=id).first() + + if project: + relations = IastAgent.objects.filter(bind_project_id=project.id, online=const.RUNNING) + agents = [{"id": relation.id, "name": relation.token} for relation in relations] + if project.scan: + scan_id = project.scan.id + scan_name = project.scan.name + else: + scan_id = 0 + scan_name = '' + + current_project_version = get_project_version(project.id, auth_users) + return R.success(data={ + "name": project.name, + "id": project.id, + "mode": project.mode, + "scan_id": scan_id, + "scan_name": scan_name, + "agents": agents, + "versionData": current_project_version, + "vul_validation": project.vul_validation, + 'base_url':project.base_url, + "test_req_header_key":project.test_req_header_key, + "test_req_header_value":project.test_req_header_value, + }) + else: + return R.failure(status=203, msg=_('no permission')) + diff --git a/dongtai_web/views/project_engines.py b/dongtai_web/views/project_engines.py new file mode 100644 index 000000000..7d8725a01 --- /dev/null +++ b/dongtai_web/views/project_engines.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi + +from dongtai_common.endpoint import R +from dongtai_common.utils import const +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.models.agent import IastAgent +from django.utils.translation import gettext_lazy as _ + +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from rest_framework import serializers + + +class _ProjectEnginesDataSerializer(serializers.Serializer): + id = serializers.IntegerField(help_text=_("The id of the agent")) + token = serializers.CharField(help_text=_('The name of agent')) + shortname = serializers.CharField( + help_text=_("The short name of the agent")) + + class Meta: + model = IastAgent + fields = ['id', 'name'] + + +_ProjectEnginesResponseSerializer = get_response_serializer( + _ProjectEnginesDataSerializer(many=True)) + + +class ProjectEngines(UserEndPoint): + name = "api-v1-project-engines" + description = _("View engine list") + + @extend_schema_with_envcheck( + tags=[_('Project')], + summary=_('Projects Agents'), + description=_("Get the agent list corresponding to the project id."), + response_schema=_ProjectEnginesResponseSerializer, + ) + def get(self, request, pid): + auth_users = self.get_auth_users(request.user) + queryset = IastAgent.objects.filter( + user__in=auth_users, + online=const.RUNNING, + bind_project_id__in=[0, pid]).values("id", "token","alias") + data = [] + if queryset: + for item in queryset: + data.append({ + 'id': + item['id'], + 'token': + item['token'], + 'short_name': + item['alias'] if item.get('alias', None) else '-'.join( + item['token'].split('-')[:-1]), + }) + return R.success(data=data) diff --git a/dongtai_web/views/project_search.py b/dongtai_web/views/project_search.py new file mode 100644 index 000000000..83b17da91 --- /dev/null +++ b/dongtai_web/views/project_search.py @@ -0,0 +1,50 @@ +import logging + +from django.forms.models import model_to_dict +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.models.project import IastProject +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from rest_framework import serializers + +logger = logging.getLogger("django") + + +class _ProjectSearchQuerySerializer(serializers.Serializer): + name = serializers.CharField( + help_text=_("Project name, support fuzzy search.")) +class _ProjectSearchDataSerializer(serializers.Serializer): + id = serializers.IntegerField(help_text=_("The id of the project")) + name = serializers.CharField(help_text=_('The name of project')) + + class Meta: + model = IastProject + fields = ['id', 'name'] + + +_ProjectResponseSerializer = get_response_serializer( + _ProjectSearchDataSerializer(many=True)) + + +class ProjectSearch(UserEndPoint): + + @extend_schema_with_envcheck( + [_ProjectSearchQuerySerializer], + tags=[_('Project')], + summary=_('Projects Search'), + description= + _("Get the id and name of the item according to the search keyword matching the item name, in descending order of time." + ), + response_schema=_ProjectResponseSerializer, + ) + def get(self, request): + name = request.query_params.get('name', '') + users = self.get_auth_users(request.user) + projects = IastProject.objects.filter( + user__in=users, name__icontains=name).order_by('-latest_time') + data = [ + model_to_dict(project, fields=['id', 'name']) + for project in projects + ] + return R.success(data=data) diff --git a/dongtai_web/views/project_summary.py b/dongtai_web/views/project_summary.py new file mode 100644 index 000000000..c7a4cb513 --- /dev/null +++ b/dongtai_web/views/project_summary.py @@ -0,0 +1,142 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +import time +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.project import IastProject +from dongtai_common.models.vul_level import IastVulLevel +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_web.base.project_version import get_project_version, get_project_version_by_id, ProjectsVersionDataSerializer +from django.utils.translation import gettext_lazy as _ +from dongtai_common.models.vulnerablity import IastVulnerabilityStatus +from dongtai_web.serializers.project import ProjectSerializer +from dongtai_common.models.hook_type import HookType +from django.db.models import Q +from rest_framework import serializers +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from dongtai_common.models.strategy import IastStrategyModel +from dongtai_web.views.utils.commonstats import get_summary_by_agent_ids +from dongtai_common.utils import const + +class ProjectSummaryQuerySerializer(serializers.Serializer): + version_id = serializers.CharField( + help_text=_("The version id of the project")) + + +class ProjectSummaryDataTypeSummarySerializer(serializers.Serializer): + type_name = serializers.CharField(help_text=_("Name of vulnerability")) + type_count = serializers.IntegerField( + help_text=_("Count of thi vulnerablity type")) + type_level = serializers.IntegerField( + help_text=_("Level of vulnerability")) + + +class ProjectSummaryDataDayNumSerializer(serializers.Serializer): + day_label = serializers.CharField(help_text=_('Timestamp, format %M-%d')) + day_num = serializers.IntegerField( + help_text=_('The number of vulnerabilities corresponding to the time')) + + +class ProjectSummaryDataLevelCountSerializer(serializers.Serializer): + level_name = serializers.CharField( + help_text=_('Level name of vulnerability')) + level_id = serializers.IntegerField( + help_text=_('Level id of vulnerability')) + num = serializers.IntegerField(help_text=_( + 'The number of vulnerabilities corresponding to the level')) + + +class _ProjectSummaryDataSerializer(serializers.Serializer): + name = serializers.CharField(help_text=_('The name of project')) + mode = serializers.ChoiceField(['插桩模式'], + help_text=_('The mode of project')) + id = serializers.IntegerField(help_text=_("The id of the project")) + latest_time = serializers.IntegerField(help_text=_("The latest update time of the project")) + versionData = ProjectsVersionDataSerializer( + help_text=_('Version information about the project')) + type_summary = ProjectSummaryDataTypeSummarySerializer( + many=True, + help_text=_('Statistics on the number of types of vulnerabilities')) + agent_language = serializers.ListField( + child=serializers.CharField(), + help_text=_("Agent language currently included in the project")) + level_count = ProjectSummaryDataLevelCountSerializer( + many=True, + help_text=_( + "Statistics on the number of danger levels of vulnerabilities")) + + +_ProjectSummaryResponseSerializer = get_response_serializer( + _ProjectSummaryDataSerializer()) + + +class ProjectSummary(UserEndPoint): + name = "api-v1-project-summary-" + description = _("Item details - Summary") + + @staticmethod + def weeks_ago(week=1): + + weekend = 7 * week + current_timestamp = int(time.time()) + weekend_ago_time = time.localtime(current_timestamp - 86400 * weekend) + weekend_ago_time_str = str(weekend_ago_time.tm_year) + "-" + str(weekend_ago_time.tm_mon) + "-" + str( + weekend_ago_time.tm_mday) + " 00:00:00" + beginArray = time.strptime(weekend_ago_time_str, "%Y-%m-%d %H:%M:%S") + + beginT = int(time.mktime(beginArray)) + return current_timestamp, beginT, weekend + + @extend_schema_with_envcheck( + tags=[_('Project')], + summary=_('Projects Summary'), + description= + _("Get project deatils and its statistics data about vulnerablity." + ), + response_schema=_ProjectSummaryResponseSerializer, + ) + def get(self, request, id): + auth_users = self.get_auth_users(request.user) + project = IastProject.objects.filter(user__in=auth_users, + id=id).first() + + if not project: + return R.failure(status=203, msg=_('no permission')) + version_id = request.GET.get('version_id', None) + data = dict() + data['owner'] = project.user.get_username() + data['name'] = project.name + data['id'] = project.id + data['mode'] = project.mode + data['latest_time'] = project.latest_time + data['type_summary'] = [] + data['day_num'] = [] + data['level_count'] = [] + + if not version_id: + current_project_version = get_project_version( + project.id, auth_users) + else: + current_project_version = get_project_version_by_id(version_id) + data['versionData'] = current_project_version + relations = IastAgent.objects.filter( + user__in=auth_users, + bind_project_id=project.id, + project_version_id=current_project_version.get("version_id", 0) + ).values("id") + + agent_ids = [relation['id'] for relation in relations] + agent_id = request.query_params.get('agent_id') + if agent_id: + agent_ids = [agent_id] + data_stat = get_summary_by_agent_ids(agent_ids) + data.update(data_stat) + data['agent_language'] = ProjectSerializer( + project).data['agent_language'] + data['agent_alive'] = IastAgent.objects.filter( + bind_project_id=project.id, online=const.RUNNING).count() + return R.success(data=data) diff --git a/dongtai_web/views/project_version_add.py b/dongtai_web/views/project_version_add.py new file mode 100644 index 000000000..60820cdc4 --- /dev/null +++ b/dongtai_web/views/project_version_add.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:sjh +# software: PyCharm +# project: lingzhi-webapi +import logging + +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint + +from dongtai_web.base.project_version import version_modify, VersionModifySerializer +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer + +logger = logging.getLogger("django") + +_ResponseSerializer = get_response_serializer(status_msg_keypair=( + ((202, _('Parameter error')), ''), + ((201, _('Created success')), ''), +)) + + +class ProjectVersionAdd(UserEndPoint): + name = "api-v1-project-version-add" + description = _("New application version information") + + @extend_schema_with_envcheck( + request=VersionModifySerializer, + tags=[_('Project')], + summary=_('Projects Version Add'), + description= + _("""Add project version information according to the given conditions; + if the version id is specified, the corresponding version information is updated according to the given conditions.""" + ), + response_schema=_ResponseSerializer, + ) + def post(self, request): + try: + auth_users = self.get_auth_users(request.user) + result = version_modify(request.user, auth_users, request.data) + if result.get("status", "202") == "202": + return R.failure(status=202, + msg=result.get("msg", _("Parameter error"))) + else: + return R.success(msg=_('Created success'), data=result.get("data", {})) + + except Exception as e: + logger.error(e,exc_info=True) + return R.failure(status=202, msg=_("Parameter error")) diff --git a/dongtai_web/views/project_version_current.py b/dongtai_web/views/project_version_current.py new file mode 100644 index 000000000..a2fc9417a --- /dev/null +++ b/dongtai_web/views/project_version_current.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:sjh + +# software: PyCharm +# project: lingzhi-webapi +import logging, time +from dongtai_common.endpoint import R +from django.db.models import Q +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.models.project_version import IastProjectVersion +from dongtai_common.models.agent import IastAgent +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from rest_framework import serializers + +logger = logging.getLogger("django") + +class _ProjectVersionCurrentSerializer(serializers.Serializer): + version_id = serializers.CharField( + help_text=_("The version id of the project")) + project_id = serializers.IntegerField(help_text=_("The id of the project")) + + +_ResponseSerializer = get_response_serializer(status_msg_keypair=( + ((202, _('Version does not exist')), ''), + ((202, _('Version setting failed')), ''), + ((201, _('Version setting success')), ''), +)) + + +class ProjectVersionCurrent(UserEndPoint): + name = "api-v1-project-version-current" + description = _("Set to the current application version") + + @extend_schema_with_envcheck( + request=_ProjectVersionCurrentSerializer, + tags=[_('Project')], + summary=_('Projects Version Current'), + description= + _("Specify the selected version as the current version of the project according to the given conditions." + ), + response_schema=_ResponseSerializer, + ) + def post(self, request): + try: + project_id = request.data.get("project_id", 0) + version_id = request.data.get("version_id", 0) + if not version_id or not project_id: + return R.failure(status=202, msg=_('Parameter error')) + + users = self.get_auth_users(request.user) + users_id = [user.id for user in users] + version = IastProjectVersion.objects.filter(project_id=project_id, id=version_id, user_id__in=users_id).first() + if version: + version.current_version = 1 + version.update_time = int(time.time()) + version.save(update_fields=["current_version", "update_time"]) + IastProjectVersion.objects.filter( + ~Q(id=version_id), + project_id=project_id, + current_version=1, + status=1 + ).update(current_version=0, update_time=int(time.time())) + + return R.success(msg=_('Version setting success')) + else: + return R.failure(status=202, msg=_('Version does not exist')) + + except Exception as e: + logger.error(e) + return R.failure(status=202, msg=_("Version setting failed")) diff --git a/dongtai_web/views/project_version_delete.py b/dongtai_web/views/project_version_delete.py new file mode 100644 index 000000000..2004d86a6 --- /dev/null +++ b/dongtai_web/views/project_version_delete.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:sjh +# software: PyCharm +# project: lingzhi-webapi +import logging, time +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.models.project_version import IastProjectVersion +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from rest_framework import serializers + +logger = logging.getLogger("django") + + +class _ProjectVersionDeleteSerializer(serializers.Serializer): + version_id = serializers.CharField( + help_text=_("The version id of the project")) + project_id = serializers.IntegerField(help_text=_("The id of the project")) + + +_ResponseSerializer = get_response_serializer(status_msg_keypair=( + ((202, _('Parameter error')), ''), + ((201, _('Version does not exist')), ''), + ((201, _('Deleted Successfully')), ''), +)) + + +class ProjectVersionDelete(UserEndPoint): + name = "api-v1-project-version-delete" + description = _("Delete application version information") + + @extend_schema_with_envcheck( + request=_ProjectVersionDeleteSerializer, + tags=[_('Project')], + summary=_('Projects Version Delete'), + description=_( + "Delete the specified project version according to the conditions." + ), + response_schema=_ResponseSerializer, + ) + def post(self, request): + try: + version_id = request.data.get("version_id", 0) + project_id = request.data.get("project_id", 0) + if not version_id or not project_id: + return R.failure(status=202, msg=_('Parameter error')) + version = IastProjectVersion.objects.filter(id=version_id, project_id=project_id, user=request.user, status=1).first() + if version: + version.status = 0 + version.update_time = int(time.time()) + version.save(update_fields=['status']) + return R.success(msg=_('Deleted Successfully')) + else: + return R.failure(status=202, msg=_('Version does not exist')) + + except Exception as e: + logger.error(e) + return R.failure(status=202, msg=_('Parameter error')) diff --git a/dongtai_web/views/project_version_list.py b/dongtai_web/views/project_version_list.py new file mode 100644 index 000000000..01bf5532b --- /dev/null +++ b/dongtai_web/views/project_version_list.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:sjh +# software: PyCharm +# project: lingzhi-webapi +import logging, time +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.models.project_version import IastProjectVersion +from django.utils.translation import gettext_lazy as _ +from rest_framework import serializers +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer + +logger = logging.getLogger("django") + + +class _VersionListDataSerializer(serializers.ModelSerializer): + version_id = serializers.IntegerField( + source='IastProjectVersion.id', help_text=_("The version id of the project")) + version_name = serializers.CharField( + help_text=_("The version name of the project")) + description = serializers.CharField( + help_text=_("Description of the project versoin")) + current_version = serializers.IntegerField(help_text=_( + "Whether it is the current version, 1 means yes, 0 means no.")) + + class Meta: + model = IastProjectVersion + fields = ['version_id', 'version_name', 'current_version', 'description'] + + +_ProjectVersionListResponseSerializer = get_response_serializer( + _VersionListDataSerializer(many=True)) + +class ProjectVersionList(UserEndPoint): + name = "api-v1-project-version-list" + description = _("View application version list") + + @extend_schema_with_envcheck( + tags=[_('Project')], + summary=_('Projects Version List'), + description=_("Get the version information list of the item corresponding to the id"), + response_schema=_ProjectVersionListResponseSerializer, + ) + def get(self, request, project_id): + try: + auth_users = self.get_auth_users(request.user) + versionInfo = IastProjectVersion.objects.filter(project_id=project_id, user__in=auth_users, status=1).order_by("-id") + data = [] + if versionInfo: + for item in versionInfo: + data.append({ + "version_id": item.id, + "version_name": item.version_name, + "current_version": item.current_version, + "description": item.description, + }) + return R.success(msg=_('Search successful'), data=data) + except Exception as e: + logger.error(e) + return R.failure(status=202, msg=_('Parameter error')) diff --git a/dongtai_web/views/project_version_update.py b/dongtai_web/views/project_version_update.py new file mode 100644 index 000000000..560dd7386 --- /dev/null +++ b/dongtai_web/views/project_version_update.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:sjh +# software: PyCharm +# project: lingzhi-webapi +import logging + +from dongtai_common.endpoint import R +from dongtai_web.base.project_version import version_modify, VersionModifySerializer +from dongtai_common.endpoint import UserEndPoint +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer + +logger = logging.getLogger("django") + +_ResponseSerializer = get_response_serializer(status_msg_keypair=( + ((202, _('Parameter error')), ''), + ((201, _('Update completed')), ''), +)) + + +class ProjectVersionUpdate(UserEndPoint): + name = "api-v1-project-version-update" + description = _("Update application version information") + + @extend_schema_with_envcheck( + request=VersionModifySerializer, + tags=[_('Project')], + summary=_('Projects Version Update'), + description=_( + "Update the version information of the corresponding version id."), + response_schema=_ResponseSerializer, + ) + def post(self, request): + try: + version_id = request.data.get("version_id", 0) + auth_users = self.get_auth_users(request.user) + result = version_modify(request.user, auth_users, request.data) + if not version_id or result.get("status", "202") == "202": + return R.failure(status=202, msg=_("Parameter error")) + else: + return R.success(msg=_('Update completed')) + + except Exception as e: + logger.error(e) + return R.failure(status=202, msg=_('Parameter error')) diff --git a/dongtai_web/views/projects.py b/dongtai_web/views/projects.py new file mode 100644 index 000000000..f9cdf4382 --- /dev/null +++ b/dongtai_web/views/projects.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi + +import logging + +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.models.project import IastProject +from dongtai_web.serializers.project import ProjectSerializer +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from rest_framework import serializers + +logger = logging.getLogger("django") + +class _ProjectsArgsSerializer(serializers.Serializer): + page_size = serializers.IntegerField(default=20, + help_text=_('Number per page')) + page = serializers.IntegerField(default=1, help_text=_('Page index')) + name = serializers.CharField( + default=None, + help_text=_( + "The name of the item to be searched, supports fuzzy search.")) + + +_SuccessSerializer = get_response_serializer(ProjectSerializer(many=True)) + + +class Projects(UserEndPoint): + name = "api-v1-projects" + description = _("View item list") + + @extend_schema_with_envcheck( + [_ProjectsArgsSerializer], + tags=[_('Project')], + summary=_('Projects List'), + description= + _("Get the item corresponding to the user, support fuzzy search based on name." + ), + response_schema=_SuccessSerializer, + ) + def get(self, request): + page = request.query_params.get('page', 1) + page_size = request.query_params.get('pageSize', 20) + name = request.query_params.get('name') + + users = self.get_auth_users(request.user) + queryset = IastProject.objects.filter( + user__in=users).order_by('-latest_time') + + if name: + queryset = queryset.filter(name__icontains=name) + + page_summary, page_data = self.get_paginator(queryset, page, page_size) + return R.success(data=ProjectSerializer(page_data, many=True).data, + page=page_summary) diff --git a/dongtai_web/views/sca_details.py b/dongtai_web/views/sca_details.py new file mode 100644 index 000000000..7a76ae4a2 --- /dev/null +++ b/dongtai_web/views/sca_details.py @@ -0,0 +1,166 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/8/26 11:47 +# software: PyCharm +# project: webapi +import logging + +from dongtai_common.endpoint import R, UserEndPoint +from dongtai_common.models.asset import Asset +from dongtai_common.models.vul_level import IastVulLevel +from django.utils.translation import get_language +from dongtai_web.serializers.sca import ScaSerializer +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from rest_framework import serializers +from dongtai_conf import settings +import requests +import json +from dongtai_web.dongtai_sca.models import VulCveRelation + +logger = logging.getLogger('dongtai-webapi') + + +class ScaDetailResponseDataVulsSerializers(serializers.Serializer): + safe_version = serializers.CharField() + vulcve = serializers.CharField() + vulcwe = serializers.CharField() + vulname = serializers.CharField() + overview = serializers.CharField() + teardown = serializers.CharField() + reference = serializers.CharField() + level = serializers.CharField() + + +class ScaDetailResponseDataSerializers(ScaSerializer): + vuls = ScaDetailResponseDataVulsSerializers(many=True) + + class Meta: + model = ScaSerializer.Meta.model + fields = ScaSerializer.Meta.fields + ['vuls'] + + +_ResponseSerializer = get_response_serializer( + ScaDetailResponseDataSerializers()) + + +class ScaDetailView(UserEndPoint): + name = "api-v1-scas" + description = "" + + @extend_schema_with_envcheck( + [], + [], + [{ + 'name': + _('Get data sample'), + 'description': + _("The aggregation results are programming language, risk level, vulnerability type, project" + ), + 'value': { + "status": 201, + "msg": "success", + "data": { + "id": 12897, + "package_name": "log4j-to-slf4j-2.14.1.jar", + "version": "2.14.1", + "project_name": "demo", + "project_id": 67, + "project_version": "V1.0", + "language": "JAVA", + "agent_name": + "Mac OS X-localhost-v1.0.0-d24bf703ca62499ebdd12770708296f5", + "signature_value": + "ce8a86a3f50a4304749828ce68e7478cafbc8039", + "level": "INFO", + "level_type": 4, + "vul_count": 0, + "dt": 1631088844, + "vuls": [] + } + } + }], + tags=[_('Component')], + summary=_("Component Detail"), + description= + _("Get the details of the corresponding component by specifying the id." + ), + response_schema=_ResponseSerializer + ) + def get(self, request, id): + user = request.user + + try: + agents = self.get_auth_agents_with_user(user) + asset = Asset.objects.filter(agent__in=agents, id=id).first() + + if asset is None: + return R.failure(msg=_( + 'Components do not exist or no permission to access')) + data = ScaSerializer(asset).data + data['vuls'] = list() + + search_query = "" + if asset.agent.language == "JAVA": + search_query = "hash=" + asset.signature_value + elif asset.agent.language == "PYTHON": + version = asset.version + name = asset.package_name.replace("-" + version, "") + search_query = "ecosystem={}&name={}&version={}".format("PyPI", name, version) + if search_query != "": + try: + url = settings.SCA_BASE_URL + "/package_vul/?" + search_query + resp = requests.get(url=url) + resp = json.loads(resp.content) + maven_model = resp.get("data", {}).get("package", {}) + if maven_model is None: + maven_model = {} + vul_list = resp.get("data", {}).get("vul_list", []) + + levels = IastVulLevel.objects.all() + level_dict = {} + language = get_language() + for level in levels: + if language == "zh": + level_dict[level.name] = level.name_value_zh + if language == "en": + level_dict[level.name] = level.name_value_en + + for vul in vul_list: + _level = vul.get("vul_package", {}).get("severity", "none") + _vul = vul.get("vul", {}) + _fixed_versions = vul.get("fixed_versions", []) + cwe_ids = vul.get('vul_package', {}).get('cwe_ids', []) + vul = { + 'safe_version': ",".join(_fixed_versions) if len(_fixed_versions) > 0 else _( + 'Current version stopped for maintenance or it is not a secure version'), + 'vulcve': _vul.get('aliases', [])[0] if len(_vul.get('aliases', [])) > 0 else "", + 'vulcwe': ",".join(cwe_ids), + 'vulname': _vul.get("summary", ""), + 'overview': _vul.get("summary", ""), + 'teardown': _vul.get("details", ""), + 'reference': _vul.get('references', []), + 'level': level_dict.get(_level, _level) + } + cverelation = VulCveRelation.objects.filter( + cve=vul['vulcve']).first() + vul['vulcve_url'] = f"https://cve.mitre.org/cgi-bin/cvename.cgi?name={vul['vulcve']}" if vul[ + 'vulcve'] else "" + vul['vulcnnvd_url'] = "" + vul['vulcnvd_url'] = "" + vul['vulcnnvd'] = "" + vul['vulcnvd'] = "" + if cverelation: + vul['vulcnnvd_url'] = f"http://www.cnnvd.org.cn/web/xxk/ldxqById.tag?CNNVD={cverelation.cnnvd}" if cverelation.cnnvd else "" + vul['vulcnvd_url'] = f"https://www.cnvd.org.cn/flaw/show/{cverelation.cnvd}" if cverelation.cnvd else "" + vul['vulcnnvd'] = cverelation.cnnvd if cverelation.cnnvd else "" + vul['vulcnvd'] = cverelation.cnvd if cverelation.cnvd else "" + data['vuls'].append(vul) + + except Exception as e: + logger.info("get package_vul failed:{}".format(e)) + return R.success(data=data) + except Exception as e: + logger.error(e) + return R.failure(msg=_('Component information query failed')) diff --git a/dongtai_web/views/sca_sidebar_index.py b/dongtai_web/views/sca_sidebar_index.py new file mode 100644 index 000000000..9fa21c32b --- /dev/null +++ b/dongtai_web/views/sca_sidebar_index.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +from django.db.models import Q + +from dongtai_common.endpoint import R, UserEndPoint +from dongtai_common.models.asset import Asset +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck +from django.utils.text import format_lazy + +from dongtai_web.utils import get_model_order_options + +class ScaSidebarList(UserEndPoint): + @extend_schema_with_envcheck( + [ + { + 'name': "language", + 'type': str, + 'description': _("programming language"), + }, + { + 'name': "level", + 'type': str, + 'description': _('Level of vulnerability'), + }, + { + 'name': "app", + 'type': str, + }, + { + 'name': + "order", + 'type': + str, + 'description': + format_lazy( + "{} : {}", _('Sorted index'), ",".join([ + 'package_name', 'version', 'language', 'level', 'dt' + ])) + }, + ], + tags=[_('Component')], + summary=_("Component List"), + description= + _("Use the specified project information to obtain the corresponding component." + ), + ) + def get(self, request): + """ + :param request: + :return: + """ + language = request.query_params.get('language', None) + level = request.query_params.get('level', None) + app_name = request.query_params.get('app', None) + order = request.query_params.get('order', None) + + condition = Q() + if language: + condition = condition & Q(language=language) + if level: + condition = condition & Q(level=level) + if app_name: + condition = condition & Q(app_name=app_name) + + if order and order in get_model_order_options(Asset): + queryset = Asset.objects.values( + 'package_name', + 'version', + 'level', + 'dt' + ).filter(condition).order_by(order) + else: + queryset = Asset.objects.values( + 'package_name', + 'version', + 'level', + 'dt' + ).filter(condition).order_by('-dt') + + page_size = 10 + page_summary, queryset = self.get_paginator(queryset, + page_size=page_size) + return R.success(data=[obj for obj in queryset], + page=page_summary, + total=page_summary['alltotal']) diff --git a/dongtai_web/views/sca_summary.py b/dongtai_web/views/sca_summary.py new file mode 100644 index 000000000..fcb30a21d --- /dev/null +++ b/dongtai_web/views/sca_summary.py @@ -0,0 +1,366 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +import pymysql +from dongtai_common.endpoint import R, UserEndPoint +from dongtai_common.models.vul_level import IastVulLevel +from django.db import connection +from dongtai_web.base.agent import get_project_vul_count, get_agent_languages, initlanguage +from dongtai_web.base.project_version import get_project_version, get_project_version_by_id +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from django.utils.text import format_lazy +from dongtai_web.serializers.vul import VulSummaryTypeSerializer, VulSummaryProjectSerializer, VulSummaryLevelSerializer, \ + VulSummaryLanguageSerializer +from rest_framework import serializers + + +class _ScaSummaryResponseDataSerializer(serializers.Serializer): + language = VulSummaryLanguageSerializer(many=True) + level = VulSummaryLevelSerializer(many=True) + projects = VulSummaryProjectSerializer(many=True) + + +_ResponseSerializer = get_response_serializer( + _ScaSummaryResponseDataSerializer()) + + +class ScaSummary(UserEndPoint): + name = "rest-api-dongtai_sca-summary" + description = _("Three-party components overview") + + @extend_schema_with_envcheck( + [ + { + 'name': "page", + 'type': int, + 'default': 1, + 'required': False, + 'description': _('Page index'), + }, + { + 'name': "pageSize", + 'type': int, + 'default': 20, + 'required': False, + 'description': _('Number per page'), + }, + { + 'name': "language", + 'type': str, + 'description': _("programming language"), + }, + { + 'name': "project_name", + 'type': str, + 'deprecated': True, + 'description': _('Name of Project'), + }, + { + 'name': "project_id", + 'type': int, + 'description': _('Id of Project'), + }, + { + 'name': "level", + 'type': int, + 'description': _('The id level of vulnerability'), + }, + { + 'name': + "version_id", + 'type': + int, + 'description': + _("The default is the current version id of the project.") + }, + { + 'name': "keyword", + 'type': str, + 'description': + _("Fuzzy keyword search field for package_name.") + }, + { + 'name': + "order", + 'type': + str, + 'description': + format_lazy( + "{} : {}", _('Sorted index'), ",".join([ + 'version', 'level', 'vul_count', 'language', + 'package_name' + ])) + }, + ], [], [ + { + 'name': + _('Get data sample'), + 'description': + _("The aggregation results are programming language, risk level, vulnerability type, project" + ), + 'value': { + "status": 201, + "msg": "success", + "data": { + "language": [ + { + "language": "JAVA", + "count": 17 + }, { + "language": "PYTHON", + "count": 0 + } + ], + "level": [ + { + "level": "HIGH", + "count": 0, + "level_id": 1 + }, { + "level": "MEDIUM", + "count": 0, + "level_id": 2 + }, { + "level": "LOW", + "count": 0, + "level_id": 3 + }, { + "level": "INFO", + "count": 17, + "level_id": 4 + } + ], + "projects": [ + { + "project_name": "demo", + "count": 17, + "id": 67 + } + ] + } + } + } + ], + tags=[_('Component')], + summary=_("Component Summary (with project)"), + description= + _("Use the specified project information to get the corresponding component summary" + ), + response_schema=_ResponseSerializer) + def post(self, request): + """ + :param request: + :return: + """ + + end = { + "status": 201, + "msg": "success", + "data": {} + } + + auth_users = self.get_auth_users(request.user) + request_data = request.data + + auth_user_ids = [str(_i.id) for _i in auth_users] + base_query_sql = " LEFT JOIN iast_asset_aggr ON iast_asset.signature_value = iast_asset_aggr.signature_value WHERE iast_asset.user_id in %s and iast_asset.is_del=0 " + sql_params = [auth_user_ids] + asset_aggr_where = " and iast_asset.is_del=0 " + package_kw = request_data.get('keyword', "") + es_query = {} + if package_kw: + es_query['search_keyword'] = package_kw + package_kw = pymysql.converters.escape_string(package_kw) + + if package_kw and package_kw.strip() != '': + package_kw = '%%{}%%'.format(package_kw) + asset_aggr_where = asset_aggr_where + " and iast_asset.package_name like %s" + sql_params.append(package_kw) + project_id = request_data.get('project_id', None) + if project_id and project_id != '': + version_id = request.GET.get('version_id', None) + if not version_id: + current_project_version = get_project_version(project_id, auth_users) + else: + current_project_version = get_project_version_by_id(version_id) + #base_query_sql = base_query_sql + " and iast_asset.project_id=%s and iast_asset.project_version_id=%s " + asset_aggr_where = asset_aggr_where + " and iast_asset.project_id=%s and iast_asset.project_version_id=%s " + sql_params.append(project_id) + sql_params.append(current_project_version.get("version_id", 0)) + es_query["bind_project_id"] = project_id + es_query["project_version_id"] = current_project_version.get("version_id", 0) + +# if ELASTICSEARCH_STATE: +# resp, _ = self.get_data_from_es(request.user.id, es_query) +# return R.success(data=resp) +# + levelInfo = IastVulLevel.objects.filter(id__lt=5).all() + levelNameArr = {} + levelIdArr = {} + DEFAULT_LEVEL = {} + if levelInfo: + for level_item in levelInfo: + DEFAULT_LEVEL[level_item.name_value] = 0 + levelNameArr[level_item.name_value] = level_item.id + levelIdArr[level_item.id] = level_item.name_value + + _temp_data = dict() + # 漏洞等级汇总 + level_summary_sql = "SELECT iast_asset.level_id,count(DISTINCT(iast_asset.signature_value)) as total FROM iast_asset {base_query_sql} {where_sql} GROUP BY iast_asset.level_id " + level_summary_sql = level_summary_sql.format(base_query_sql=base_query_sql, where_sql=asset_aggr_where) + + with connection.cursor() as cursor: + cursor.execute(level_summary_sql, sql_params) + level_summary = cursor.fetchall() + if level_summary: + for item in level_summary: + level_id, total = item + _temp_data[levelIdArr[level_id]] = total + + DEFAULT_LEVEL.update(_temp_data) + end['data']['level'] = [{ + 'level': _key, 'count': _value, 'level_id': levelNameArr[_key] + } for _key, _value in DEFAULT_LEVEL.items()] + + default_language = initlanguage() + language_summary_sql = "SELECT iast_asset.language,count(DISTINCT(iast_asset.signature_value)) as total FROM iast_asset {base_query_sql} {where_sql} GROUP BY iast_asset.language " + language_summary_sql = language_summary_sql.format(base_query_sql=base_query_sql, where_sql=asset_aggr_where) + + with connection.cursor() as cursor: + cursor.execute(language_summary_sql, sql_params) + language_summary = cursor.fetchall() + if language_summary: + for _l in language_summary: + language, total = _l + if default_language.get(language, None): + default_language[language] = total + default_language[language] + else: + default_language[language] = total + + end['data']['language'] = [{ + 'language': _key, 'count': _value + } for _key, _value in default_language.items()] + + end, base_query_sql, asset_aggr_where, sql_param = self.get_extend_data( + end, base_query_sql, asset_aggr_where, sql_params) + + + return R.success(data=end['data']) + + def get_extend_data(self, end: dict, base_query_sql: str, + asset_aggr_where: str, sql_params: tuple): + return end, base_query_sql, asset_aggr_where, sql_params + + def get_data_from_es(self, user_id, es_query): + resp, origin_resp = get_vul_list_from_elastic_search( + user_id, **es_query) + return resp, origin_resp + + +from elasticsearch_dsl import Q, Search +from elasticsearch import Elasticsearch +from elasticsearch_dsl import A +from dongtai_common.models.strategy import IastStrategyModel +from dongtai_common.models.vulnerablity import IastVulnerabilityStatus +from dongtai_common.models.program_language import IastProgramLanguage +from dongtai_common.models.project import IastProject +from dongtai_common.models.vul_level import IastVulLevel +from django.core.cache import cache +from dongtai_conf import settings +from dongtai_common.common.utils import make_hash +from dongtai_conf.settings import ELASTICSEARCH_STATE +from dongtai_common.models.asset_aggr import AssetAggrDocument +from dongtai_web.aggregation.aggregation_common import auth_user_list_str +from dongtai_common.models import LANGUAGE_DICT +from dongtai_common.models.asset import IastAssetDocument +from dongtai_web.utils import dict_transfrom + + +def get_vul_list_from_elastic_search(user_id, + bind_project_id=None, + project_version_id=None, + search_keyword="", + extend_aggs_buckets={}): + user_id_list = [user_id] + auth_user_info = auth_user_list_str(user_id=user_id) + user_id_list = auth_user_info['user_list'] + must_query = [ + Q('terms', user_id=user_id_list), + Q('terms', is_del=[0]), + ] + if bind_project_id: + must_query.append(Q('terms', project_id=[bind_project_id])) + if project_version_id: + must_query.append(Q('terms', project_version_id=[project_version_id])) + if search_keyword: + must_query.append( + Q("wildcard", + **{"package_name.keyword": { + "value": f"*{search_keyword}*" + }})) + a = Q('bool', + must=must_query) + search = IastAssetDocument.search().query(Q('bool', must=must_query))[:0] + buckets = { + 'level': A('terms', field='level_id', size=2147483647), + "language": A('terms', + field='language.keyword', + size=2147483647), + **extend_aggs_buckets + } + for k, v in buckets.items(): + search.aggs.bucket(k, v).bucket( + "distinct_signature_value", + A("cardinality", field="signature_value.keyword")) + res = search.using( + Elasticsearch( + settings.ELASTICSEARCH_DSL['default']['hosts'])).execute() + dic = {} + for key in buckets.keys(): + origin_buckets = res.aggs[key].to_dict()['buckets'] + for i in origin_buckets: + i['id'] = i['key'] + del i['key'] + i['count'] = i['distinct_signature_value']["value"] + del i['distinct_signature_value'] + del i['doc_count'] + if key == 'language': + for i in origin_buckets: + i['language'] = i['id'] + del i['id'] + language_names = [i['language'] for i in origin_buckets] + for i in origin_buckets: + i['id'] = LANGUAGE_DICT.get(i['language']) + for language_key in LANGUAGE_DICT.keys(): + if language_key not in language_names: + origin_buckets.append({ + 'id': LANGUAGE_DICT[language_key], + 'language': language_key, + 'count': 0, + }) + if key == 'level': + for i in origin_buckets: + i['level_id'] = i['id'] + del i['id'] + level_ids = [i['level_id'] for i in origin_buckets] + level = IastVulLevel.objects.values( + 'id', 'name_value').all() + level_dic = dict_transfrom(level, 'id') + for i in origin_buckets: + i['level'] = level_dic[i['level_id']]['name_value'] + for level_id in level_dic.keys(): + if level_id not in level_ids: + origin_buckets.append({ + 'level_id': + level_id, + 'level': + level_dic[level_id]['name_value'], + 'count': + 0, + }) + + dic[key] = list(origin_buckets) + return dic, res diff --git a/dongtai_web/views/scan_strategys.py b/dongtai_web/views/scan_strategys.py new file mode 100644 index 000000000..7a51aab58 --- /dev/null +++ b/dongtai_web/views/scan_strategys.py @@ -0,0 +1,271 @@ +import logging + +from dongtai_common.endpoint import UserEndPoint, R +from dongtai_common.utils import const + +from django.utils.translation import gettext_lazy as _ +from rest_framework import serializers +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from django.utils.text import format_lazy +from rest_framework.serializers import ValidationError +from rest_framework import viewsets + +from django.db import models +from dongtai_common.models.strategy_user import IastStrategyUser +from dongtai_common.models.user import User +import time +from django.db.models import Q +from dongtai_common.permissions import TalentAdminPermission +from dongtai_common.models.project import IastProject +from dongtai_web.serializers.project import ProjectSerializer +from dongtai_web.views.utils.commonview import ( + BatchStatusUpdateSerializerView, + AllStatusUpdateSerializerView, +) + +logger = logging.getLogger('dongtai-webapi') + + +class ScanStrategySerializer(serializers.ModelSerializer): + content = serializers.SerializerMethodField() + class Meta: + model = IastStrategyUser + fields = ['id', 'name', 'content', 'user', 'status', 'created_at'] + def get_content(self, obj): + try: + return [int(i) for i in obj.content.split(',')] + except Exception as e: + print(e) + return [] + +class _ScanStrategyArgsSerializer(serializers.Serializer): + page_size = serializers.IntegerField(default=20, + help_text=_('Number per page')) + page = serializers.IntegerField(default=1, help_text=_('Page index')) + name = serializers.CharField( + required=False, + help_text=_( + "The name of the item to be searched, supports fuzzy search.")) + + +class _ProjectSerializer(ProjectSerializer): + class Meta: + model = IastProject + fields = ['id', 'name'] + + +class ScanCreateSerializer(serializers.Serializer): + name = serializers.CharField(required=True) + status = serializers.ChoiceField((-1, 0, 1), required=True) + content = serializers.ListField(child=serializers.IntegerField(), + required=True) + + +class _ScanStrategyRelationProjectArgsSerializer(serializers.Serializer): + size = serializers.IntegerField(default=5, + max_value=50, + min_value=1, + required=False, + help_text=_('Number per page')) + + +class ScanStrategyRelationProject(UserEndPoint): + @extend_schema_with_envcheck( + request=ScanCreateSerializer, + tags=[_('ScanStrategy')], + summary=_('ScanStrategy Relation Projects'), + description= + _("Get scan strategy relation projects" + ), + ) + def get(self, request, pk): + ser = _ScanStrategyRelationProjectArgsSerializer(data=request.GET) + try: + if ser.is_valid(True): + size = ser.validated_data['size'] + except ValidationError as e: + return R.failure(data=e.detail) + user = self.get_auth_users(request.user) + scan_strategy = IastStrategyUser.objects.filter(pk=pk, + user__in=user).first() + projects = IastProject.objects.filter( + scan=scan_strategy).order_by('-latest_time')[::size] + return R.success(data=_ProjectSerializer(projects, many=True).data) + + +class ScanStrategyViewSet(UserEndPoint, viewsets.ViewSet): + + permission_classes_by_action = {} + + def get_permissions(self): + try: + return [ + permission() for permission in + self.permission_classes_by_action[self.action] + ] + except KeyError: + return [permission() for permission in self.permission_classes] + + @extend_schema_with_envcheck( + [_ScanStrategyArgsSerializer], + tags=[_('ScanStrategy')], + summary=_('ScanStrategy List'), + description= + _("Get the item corresponding to the user, support fuzzy search based on name." + ), + ) + def list(self, request): + ser = _ScanStrategyArgsSerializer(data=request.GET) + try: + if ser.is_valid(True): + name = ser.validated_data.get('name', None) + page = ser.validated_data['page'] + page_size = ser.validated_data['page_size'] + except ValidationError as e: + return R.failure(data=e.detail) + users = self.get_auth_users(request.user) + q = Q(user__in=users) & ~Q(status=-1) + if name: + q = Q(name__icontains=name) & q + queryset = IastStrategyUser.objects.filter(q).order_by('-created_at') + if name: + queryset = queryset.filter(name__icontains=name) + page_summary, page_data = self.get_paginator(queryset, page, page_size) + return R.success(data=ScanStrategySerializer(page_data, + many=True).data, + page=page_summary) + + @extend_schema_with_envcheck( + request=ScanCreateSerializer, + tags=[_('ScanStrategy')], + summary=_('ScanStrategy Create'), + description= + _("Create ScanStrategy" + ), + ) + def create(self, request): + ser = ScanCreateSerializer(data=request.data) + try: + if ser.is_valid(True): + name = ser.validated_data['name'] + content = ser.validated_data['content'] + status = ser.validated_data['status'] + except ValidationError as e: + return R.failure(data=e.detail) + try: + ser.validated_data['content'] = ','.join([str(i) for i in content]) + obj = IastStrategyUser.objects.create(**ser.validated_data, + user=request.user) + return R.success(msg=_('create success'), + data=ScanStrategySerializer(obj).data) + except Exception as e: + logger.error(e) + return R.failure() + + @extend_schema_with_envcheck( + request=ScanCreateSerializer, + tags=[_('ScanStrategy')], + summary=_('ScanStrategy Update'), + description= + _("Get the item corresponding to the user, support fuzzy search based on name." + ), + ) + def update(self, request, pk): + ser = ScanCreateSerializer(data=request.data, partial=True) + try: + if ser.is_valid(True): + pass + except ValidationError as e: + return R.failure(data=e.detail) + if ser.validated_data.get('content', None): + ser.validated_data['content'] = ','.join( + [str(i) for i in ser.validated_data['content']]) + users = self.get_auth_users(request.user) + obj = IastStrategyUser.objects.filter( + pk=pk, user__in=users).update(**ser.validated_data) + return R.success(msg=_('update success')) + + @extend_schema_with_envcheck( + tags=[_('ScanStrategy')], + summary=_('ScanStrategy delete'), + description= + _("Get the item corresponding to the user, support fuzzy search based on name." + ), + ) + def destory(self, request, pk): + scan = IastStrategyUser.objects.filter(pk=pk, user__in=self.get_auth_users(request.user)).first() + if not scan: + return R.failure(msg='No scan strategy found') + if checkusing(scan): + return R.failure(msg='someproject is using this scan strategy') + try: + IastStrategyUser.objects.filter( + pk=pk, + user__in=self.get_auth_users(request.user)).update(status=-1) + return R.success(msg=_('delete success')) + except Exception as e: + logger.error(e) + return R.failure() + + @extend_schema_with_envcheck( + tags=[_('ScanStrategy')], + summary=_('ScanStrategy get'), + description=_("Get the item with pk"), + ) + def retrieve(self, request, pk): + obj = IastStrategyUser.objects.filter(pk=pk, user__in=self.get_auth_users(request.user)).first() + return R.success(data=ScanStrategySerializer(obj).data) + +class ScanStrategyBatchView(BatchStatusUpdateSerializerView): + status_field = 'status' + model = IastStrategyUser + + @extend_schema_with_envcheck( + request=BatchStatusUpdateSerializerView.serializer, + tags=[_('ScanStrategy')], + summary=_('ScanStrategy batch status'), + description=_("batch update status."), + ) + def post(self, request): + data = self.get_params(request.data) + user = request.user + data['ids'] = filter_using(data['ids'], [user]) + self.update_model(request, data) + return R.success(msg=_('update success')) + +class ScanStrategyAllView(AllStatusUpdateSerializerView): + status_field = 'status' + model = IastStrategyUser + + @extend_schema_with_envcheck( + request=BatchStatusUpdateSerializerView.serializer, + tags=[_('ScanStrategy')], + summary=_('ScanStrategy all status'), + description=_("all update status."), + ) + def post(self, request): + data = self.get_params(request.data) + self.update_model(request, data) + return R.success(msg=_('update success')) + + def update_model(self, request, validated_data): + ids = self.model.objects.values_list('id', flat=True).all() + filter_ids = filter_using(ids, request.user) + self.model.objects.filter(pk__in=filter_ids, + user__in=[request.user]).update(**{ + self.status_field: + validated_data['status'] + }) + + +def filter_using(ids, users): + after_filter_ids = [] + for obj in IastStrategyUser.objects.filter(pk__in=ids, user__in=[users]).all(): + if checkusing(obj): + continue + after_filter_ids.append(obj.id) + return after_filter_ids + + +def checkusing(scanstrategy): + return IastProject.objects.filter(scan=scanstrategy).exists() diff --git a/dongtai_web/views/scas.py b/dongtai_web/views/scas.py new file mode 100644 index 000000000..9cb93ccdd --- /dev/null +++ b/dongtai_web/views/scas.py @@ -0,0 +1,551 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +import logging + +import pymysql +from django.db import connection + +from dongtai_common.endpoint import R, UserEndPoint + +from dongtai_common.models.asset_aggr import AssetAggr + +from dongtai_web.base.project_version import get_project_version, get_project_version_by_id +from dongtai_web.serializers.sca import ScaAssetSerializer +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from django.utils.text import format_lazy +from dongtai_common.models.asset import Asset +from itertools import chain + +WINDOW_SIZE = 5 + +logger = logging.getLogger(__name__) +_ResponseSerializer = get_response_serializer(ScaAssetSerializer(many=True)) + + +def get_order_params(order_fields, order_by): + order_type = 'asc' + + if '-' in order_by: + order_by = order_by.split('-')[1] + order_type = 'desc' + + if order_by and order_by in order_fields: + if order_by == 'level': + order_by = "{}_id".format(order_by) + if order_type == 'asc': + order_type = 'desc' + else: + order_type = 'asc' + order = order_by + else: + order = 'vul_count' + order_type = 'desc' + + return order, order_type + +def intersperse(lst, item): + result = [item] * (len(lst) * 2 - 1) + result[0::2] = lst + return result + +class ScaList(UserEndPoint): + @extend_schema_with_envcheck( + [ + { + 'name': "page", + 'type': int, + 'default': 1, + 'required': False, + 'description': _('Page index'), + }, + { + 'name': "pageSize", + 'type': int, + 'default': 20, + 'required': False, + 'description': _('Number per page'), + }, + { + 'name': "language", + 'type': str, + 'description': _("programming language"), + }, + { + 'name': "project_name", + 'type': str, + 'deprecated': True, + 'description': _('Name of Project'), + }, + { + 'name': "level", + 'type': int, + 'description': _('The id of level of vulnerability'), + }, + { + 'name': "project_id", + 'type': int, + 'description': _('Id of Project'), + }, + { + 'name': + "version_id", + 'type': + int, + 'description': + _("The default is the current version id of the project.") + }, + { + 'name': "keyword", + 'type': str, + 'description': + _("Fuzzy keyword search field for package_name.") + }, + { + 'name': + "order", + 'type': + str, + 'description': + format_lazy( + "{} : {}", _('Sorted index'), ",".join([ + 'version', 'level', 'vul_count', 'language', + 'package_name' + ])) + }, + ], [], [ + { + 'name': + _('Get data sample'), + 'description': + _("The aggregation results are programming language, risk level, vulnerability type, project" + ), + 'value': { + "status": + 201, + "msg": + "success", + "data": [ + { + "id": 13293, + "package_name": "message-business-7.1.0.Final.jar", + "version": "7.1.0.Final", + "project_name": "No application", + "project_id": 0, + "project_version": "No application version", + "language": "JAVA", + "agent_name": + "Mac OS X-bogon-v1.0.0-0c864ba2a60b48aaa1a8b49a53a6749b", + "signature_value": + "f744df92326c4bea7682fd16004bec184148db07", + "level": "INFO", + "level_type": 4, + "vul_count": 0, + "dt": 1631189450 + } + ], + "page": { + "alltotal": 795, + "num_pages": 795, + "page_size": 1 + } + } + } + ], + tags=[_('Component')], + summary=_("Component List (with project)"), + description= + _("use the specified project information to obtain the corresponding component." + ), + response_schema=_ResponseSerializer) + def post(self, request): + """ + :param request: + :return: + """ + auth_users = self.get_auth_users(request.user) + request_data = request.data + page = request_data.get('page', 1) + page_size = request_data.get('pageSize', 20) + + page_size = min(50, int(page_size)) + + query_start = (page - 1) * page_size + + auth_user_ids = [str(_i.id) for _i in auth_users] + base_query_sql = " LEFT JOIN iast_asset ON iast_asset.signature_value = iast_asset_aggr.signature_value WHERE iast_asset.user_id in %s and iast_asset.is_del=0 " + list_sql_params = [auth_user_ids] + count_sql_params = [auth_user_ids] + es_query = {} + es_query['page_size'] = page_size + es_query['page'] = page + es_query['user_id'] = request.user.id + asset_aggr_where = " and iast_asset_aggr.id>0 " + where_conditions = [] + where_conditions_dict = {} + user_ids = [_i.id for _i in auth_users] + if len(user_ids) == 1: + where_conditions.append('user_id = %(user_ids)s') + where_conditions_dict['user_ids'] = user_ids[0] + else: + where_conditions.append('user_id IN %(user_ids)s') + where_conditions_dict['user_ids'] = user_ids + + project_id = request_data.get('project_id', None) + if project_id and project_id != '': + es_query['bind_project_id'] = project_id + version_id = request.GET.get('version_id', None) + if not version_id: + current_project_version = get_project_version(project_id, auth_users) + else: + current_project_version = get_project_version_by_id(version_id) + + base_query_sql = base_query_sql + " and iast_asset.project_id=%s and iast_asset.project_version_id=%s " + project_version_id = current_project_version.get("version_id", 0) + es_query['project_version_id'] = project_version_id + list_sql_params.append(project_id) + count_sql_params.append(project_id) + list_sql_params.append(project_version_id) + count_sql_params.append(project_version_id) + where_conditions.append('project_id = %(project_id)s ') + where_conditions_dict['project_id'] = project_id + where_conditions.append('project_version_id = %(project_version_id)s') + where_conditions_dict['project_version_id'] = project_version_id + + total_count_sql = "SELECT count(distinct(iast_asset_aggr.id)) as alltotal FROM iast_asset_aggr {base_query_sql} {where_sql} limit 1 " + list_query_sql = "SELECT iast_asset_aggr.signature_value FROM iast_asset_aggr {base_query_sql} {where_sql} GROUP BY iast_asset_aggr.id {order_sql} {page_sql} " + + language = request_data.get('language', None) + if language: + asset_aggr_where = asset_aggr_where + " and iast_asset_aggr.language in %s" + count_sql_params.append(language) + list_sql_params.append(language) + es_query['languages'] = language + if len(language) == 1: + where_conditions.append('language = %(languages)s') + where_conditions_dict['languages'] = language[0] + else: + where_conditions.append('language IN %(languages)s') + where_conditions_dict['languages'] = language + asset_aggr_where, count_sql_params, list_sql_params = self.extend_sql( + request_data, asset_aggr_where, count_sql_params, list_sql_params) + + level_ids = request_data.get('level_id', None) + if level_ids: + es_query['level_ids'] = level_ids + level_ids = [str(x) for x in level_ids] + asset_aggr_where = asset_aggr_where + " and iast_asset_aggr.level_id in %s" + count_sql_params.append(level_ids) + list_sql_params.append(level_ids) + if len(level_ids) == 1: + where_conditions.append('level_id = %(level_ids)s') + where_conditions_dict['level_ids'] = level_ids[0] + else: + where_conditions.append('level_id IN %(level_ids)s') + where_conditions_dict['level_ids'] = level_ids + + package_kw = request_data.get('keyword', None) + if package_kw: + es_query['search_keyword'] = package_kw + # package_kw = pymysql.converters.escape_string(package_kw) + if package_kw and package_kw.strip() != '': + package_kw = '%{}%'.format(package_kw) + asset_aggr_where = asset_aggr_where + " and iast_asset_aggr.package_name like %s " + list_sql_params.append(package_kw) + count_sql_params.append(package_kw) + where_conditions.append('package_name LIKE %(package_kw)s') + where_conditions_dict['package_kw'] = package_kw + + order_by = '-vul_count' + order = request.data.get('order', None) + if not order or order == "-": + order = '-vul_count' + + order_fields = [ + 'level', 'license', 'vul_count', 'project_count' + ] + order, order_type = get_order_params(order_fields, order) + es_query['order'] = order + es_query['order_type'] = order_type +# if ELASTICSEARCH_STATE: +# data = get_vul_list_from_elastic_searchv2(**es_query) +# else: + data = mysql_search(where_conditions, where_conditions_dict, + page_size, order_type, order, page) + query_data = ScaAssetSerializer(data, many=True).data + + return R.success(data=query_data) + order_sql = " order by {} {},iast_asset_aggr.id DESC ".format(order, order_type) + page_sql = " limit %s,%s" + list_sql_params.append(query_start) + list_sql_params.append(page_size) + + total_count_sql = total_count_sql.format(base_query_sql=base_query_sql, where_sql=asset_aggr_where) + list_query_sql = list_query_sql.format(base_query_sql=base_query_sql, where_sql=asset_aggr_where, + order_sql=order_sql, page_sql=page_sql) + + total_count = 0 + sca_ids = [] + try: + with connection.cursor() as cursor: + cursor.execute(total_count_sql, count_sql_params) + total_count_query = cursor.fetchone() + total_count = total_count_query[0] + + with connection.cursor() as cursor: + cursor.execute(list_query_sql, list_sql_params) + list_query = cursor.fetchall() + if list_query: + for _l in list_query: + sca_ids.append(_l[0]) + except Exception as e: + logger.warning("sca list error:{}".format(e)) + +# if ELASTICSEARCH_STATE : +# query_data = ScaAssetSerializer(get_vul_list_from_elastic_search( +# sca_ids, order_by), +# many=True).data +# else: + query_data = ScaAssetSerializer( + AssetAggr.objects.filter(signature_value__in=sca_ids).order_by(order_by).select_related('level'), + many=True).data + + return R.success(data=query_data) + + def extend_sql(self, request_data, asset_aggr_where, count_sql_params, + list_sql_params): + return asset_aggr_where, count_sql_params, list_sql_params + + +from elasticsearch_dsl import Q, Search +from elasticsearch import Elasticsearch +from elasticsearch_dsl import A +from dongtai_common.models.strategy import IastStrategyModel +from dongtai_common.models.vulnerablity import IastVulnerabilityStatus +from dongtai_common.models.program_language import IastProgramLanguage +from dongtai_common.models.project import IastProject +from dongtai_common.models.vul_level import IastVulLevel +from django.core.cache import cache +from dongtai_conf import settings +from dongtai_common.common.utils import make_hash +from dongtai_conf.settings import ELASTICSEARCH_STATE +from dongtai_common.models.asset_aggr import AssetAggrDocument + + +def get_vul_list_from_elastic_search(sca_ids=[], order=None): + must_query = [ + Q('terms', **{"signature_value": sca_ids}), + ] + a = Q('bool', + must=must_query) + extra_dict = {} + order_list = [] + if order: + order_list.insert(0, order) + res = AssetAggrDocument.search().query(a).extra( + **extra_dict).sort(*order_list)[:len(sca_ids)] + resp = res.execute() + vuls = [i._d_ for i in list(resp)] + for i in vuls: + if '@timestamp' in i.keys(): + del i['@timestamp'] + res_vul = [AssetAggr(**i) for i in vuls] + return res_vul + + +from dongtai_web.aggregation.aggregation_common import auth_user_list_str +from dongtai_common.models.asset import IastAssetDocument + + +def get_vul_list_from_elastic_searchv2(user_id, + bind_project_id=None, + project_version_id=None, + level_ids=[], + languages=[], + order="", + order_type="", + page=1, + page_size=10, + search_keyword="", + extend_filter={}): + user_id_list = [user_id] + auth_user_info = auth_user_list_str(user_id=user_id) + user_id_list = auth_user_info['user_list'] + must_query = [ + Q('terms', user_id=user_id_list), + Q('terms', is_del=[0]), + ] + order_list = ['-signature_value.keyword'] + if order: + order_list.insert(0, {order: {'order': order_type}}) + if bind_project_id: + must_query.append(Q('terms', project_id=[int(bind_project_id)])) + if project_version_id: + must_query.append(Q('terms', project_version_id=[project_version_id])) + if languages: + must_query.append(Q('terms', **{"language.keyword": languages})) + if level_ids: + must_query.append(Q('terms', level_id=level_ids)) + if search_keyword: + must_query.append( + Q("wildcard", + **{"package_name.keyword": { + "value": f"*{search_keyword}*" + }})) + hashkey = f"{__name__}_es" + str( + make_hash([ + user_id, level_ids, languages, search_keyword, page_size, + bind_project_id, project_version_id + ])) + after_table = cache.get(hashkey, {}) + after_key = after_table.get(page, None) + if page != 1 and not after_key: + return [] + extra_dict = {'collapse': {'field': 'signature_value.keyword'}} + after_fields = [] + for info in order_list: + field = '' + if isinstance(info, dict): + field = list(info.keys())[0] + if isinstance(info, str): + if info.startswith('-'): + field = info[1::] + else: + field = info + if field == 'package_name.keyword': + field = 'package_name' + after_fields.append(field) + if after_key: + #sub_after_must_query = [] + sub_after_must_not_query = [] + #sub_after_should_query = [] + sub_after_must_not_query.append( + Q('terms', **{"signature_value.keyword": after_key})) + #for info, value in zip(order_list, after_key): + # field = '' + # opt = '' + # if isinstance(info, dict): + # field = list(info.keys())[0] + # if info[field]['order'] == 'desc': + # opt = 'lte' + # else: + # opt = 'gte' + # if isinstance(info, str): + # if info.startswith('-'): + # field = info[1::] + # opt = 'lt' + # else: + # field = info + # opt = 'gt' + # sub_after_must_query.append(Q('range', **{field: {opt: value}})) + must_query.append( + Q( + 'bool', + must_not=sub_after_must_not_query, + #must=sub_after_must_query, + #should=sub_after_should_query, + #minimum_should_match=1 + )) + a = Q('bool', must=must_query) + search = IastAssetDocument.search().query(Q( + 'bool', + must=must_query)).extra(**extra_dict).sort(*order_list)[:page_size * WINDOW_SIZE] + logger.debug(f"search_query : {search.to_dict()}") + resp = search.execute() + vuls = [i._d_ for i in list(resp)] + if not after_key: + after_key = [] + for i in range(WINDOW_SIZE): + chunk = vuls[page_size * i:page_size * (i + 1)] + if len(chunk) != page_size: + break + new_after_key = after_key.copy() + new_after_key.extend([i['signature_value'] for i in chunk]) + #latest_data = chunk[-1] + #after_key = [ + # latest_data.get(after_field) for after_field in after_fields + #] + after_table[page + i + 1] = new_after_key + after_key = new_after_key + for i in vuls: + if '@timestamp' in i.keys(): + del i['@timestamp'] + if 'signature_value.keyword' in i.keys(): + del i['signature_value.keyword'] + res_vul = [Asset(**i) for i in vuls] + #if resp.hits: + # afterkey = resp.hits[-1].meta['sort'] + # after_table[page + 1] = afterkey + print(after_table) + cache.set(hashkey, after_table) + return res_vul[:page_size] + + +def mysql_search(where_conditions, where_conditions_dict, page_size, + order_type, order, page): + hashkey = f"{__name__}_mysql" + str( + make_hash([ + where_conditions, where_conditions_dict, page_size, order_type, + order + ])) + after_table = cache.get(hashkey, {}) + after_key = after_table.get(page, None) + if page != 1 and not after_key: + return [] + if after_key: + after_order_value, after_signature = after_key + if order_type == 'desc': + where_conditions.append(f"({order}, signature_value) < %(after_order_value)s ") + where_conditions_dict['after_order_value'] = (after_order_value, after_signature) + else: + where_conditions.append(f"({order}, signature_value) > %(after_order_value)s ") + where_conditions_dict['after_order_value'] = (after_order_value, after_signature) +# where_conditions.append(f"signature_value < %(after_order_id)s ") +# where_conditions_dict['after_order_id'] = after_signature + + order_conditions = ["signature_value DESC", ] + order_conditions_dict = { +# "id": 'id', + } + if order_type == 'desc': + order_conditions.insert(0, f"{order} DESC") + else: + order_conditions.insert(0, f"{order} ASC") + order_conditions_dict["field"] = order + final_sql = """SELECT ia2.* FROM iast_asset ia2 + RIGHT JOIN + (SELECT signature_value as _1, MAX(id) as _2, ANY_VALUE(vul_count) as vul_count, + ANY_VALUE(language) as language , + ANY_VALUE(license) as license , + ANY_VALUE(level_id) as level_id FROM iast_asset ia + WHERE {where_place} + GROUP BY signature_value + ORDER BY {order_place} + ) AS TMP ON + ia2.signature_value = TMP._1 AND ia2.id = TMP._2 + ORDER BY {order_place} LIMIT {size} ;""".format( + where_place=' AND '.join(where_conditions) + if where_conditions else '1 = 1', + order_place=' , '.join(order_conditions) + if order_conditions else 'NULL', + size='%(size)s') + base_dict = {'size': page_size * WINDOW_SIZE} + #base_dict.update(order_conditions_dict) + base_dict.update(where_conditions_dict) + data = Asset.objects.raw(final_sql, params=base_dict) + data = list(data) + for i in range(WINDOW_SIZE): + chunk = data[page_size * i:page_size * (i + 1)] + if len(chunk) != page_size: + break + latest_data = chunk[-1] + after_key = [ + getattr(latest_data, order), + getattr(latest_data, 'signature_value') + ] + after_table[page + i + 1] = after_key + cache.set(hashkey, after_table) + return data[:page_size] diff --git a/dongtai_web/views/sensitive_info_rule.py b/dongtai_web/views/sensitive_info_rule.py new file mode 100644 index 000000000..917b182ea --- /dev/null +++ b/dongtai_web/views/sensitive_info_rule.py @@ -0,0 +1,355 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : sensitive_info_rule +# @created : 星期三 11月 17, 2021 16:15:57 CST +# +# @description : +###################################################################### + + + +import logging + +from dongtai_common.endpoint import UserEndPoint, R +from dongtai_common.models.hook_type import HookType +from dongtai_common.utils import const + +from dongtai_web.serializers.hook_type_strategy import HookTypeSerialize +from django.utils.translation import gettext_lazy as _ +from rest_framework import serializers +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from django.utils.text import format_lazy +from rest_framework.serializers import ValidationError +from dongtai_web.serializers.hook_strategy import HOOK_TYPE_CHOICE +from rest_framework import viewsets +from django.db import connection +logger = logging.getLogger('dongtai-webapi') +from django.db import models +from dongtai_common.models.strategy import IastStrategyModel +from dongtai_common.models.user import User +import time +from django.db.models import Q +from dongtai_common.models.sensitive_info import IastPatternType,IastSensitiveInfoRule +import jq +import re2 as re +from dongtai_common.permissions import TalentAdminPermission +from dongtai_web.views.utils.commonview import ( + BatchStatusUpdateSerializerView, + AllStatusUpdateSerializerView, +) +from django.core.exceptions import ( + ObjectDoesNotExist, ) + + +class SensitiveInfoRuleSerializer(serializers.ModelSerializer): + strategy_name = serializers.SerializerMethodField() + strategy_id = serializers.SerializerMethodField() + pattern_type_id = serializers.SerializerMethodField() + pattern_type_name = serializers.SerializerMethodField() + class Meta: + model = IastSensitiveInfoRule + fields = [ + 'id', 'strategy_name', 'strategy_id', 'pattern_type_id', + 'pattern_type_name', 'pattern', 'status', 'latest_time' + ] + + def get_strategy_name(self,obj): + try: + return obj.strategy.vul_name + except ObjectDoesNotExist as e: + print(e) + return '' + + def get_strategy_id(self,obj): + try: + return obj.strategy.id + except ObjectDoesNotExist as e: + print(e) + return 0 + + def get_pattern_type_id(self,obj): + try: + return obj.pattern_type.id + except ObjectDoesNotExist as e: + print(e) + return 0 + def get_pattern_type_name(self,obj): + try: + return obj.pattern_type.name + except ObjectDoesNotExist as e: + print(e) + return '' + +class SensitiveInfoPatternTypeSerializer(serializers.ModelSerializer): + url = serializers.SerializerMethodField() + class Meta: + model = IastPatternType + fields = ['id', 'name', 'url'] + + def get_url(self, obj): + url_dict = {1: 'regex', 2: 'json'} + return url_dict.get(obj.id, '') + + +class SensitiveInfoRuleCreateSerializer(serializers.Serializer): + strategy_id = serializers.IntegerField(min_value=1, + max_value=2147483646, + required=True) + pattern_type_id = serializers.IntegerField(min_value=1, + max_value=2147483646, + required=True) + pattern = serializers.CharField(required=True) + status = serializers.ChoiceField(choices=(0, 1), required=True) + + +class _SensitiveInfoArgsSerializer(serializers.Serializer): + page_size = serializers.IntegerField(default=20, + help_text=_('Number per page')) + page = serializers.IntegerField(default=1, help_text=_('Page index')) + name = serializers.CharField( + default=None, + required=False, + help_text=_( + "The name of the item to be searched, supports fuzzy search.")) + +class _RegexPatternValidationSerializer(serializers.Serializer): + pattern = serializers.CharField(help_text=_('regex pattern')) + test_data = serializers.CharField(help_text=_('the data for test regex')) + +class SensitiveInfoRuleViewSet(UserEndPoint,viewsets.ViewSet): + + permission_classes_by_action = {} + + def get_permissions(self): + try: + return [permission() for permission in self.permission_classes_by_action[self.action]] + except KeyError: + return [permission() for permission in self.permission_classes] + + @extend_schema_with_envcheck( + [_SensitiveInfoArgsSerializer], + tags=[_('SensitiveInfoRule')], + summary=_('SensitiveInfoRule List'), + description= + _("Get the item corresponding to the user, support fuzzy search based on name." + ), + ) + def list(self,request): + ser = _SensitiveInfoArgsSerializer(data=request.GET) + try: + if ser.is_valid(True): + name = ser.validated_data['name'] + page = ser.validated_data['page'] + page_size = ser.validated_data['page_size'] + except ValidationError as e: + return R.failure(data=e.detail) + users = self.get_auth_users(request.user) + q = Q(user__in=users) & ~Q(status=-1) + if name: + strategys = IastStrategyModel.objects.filter( + vul_name__icontains=name).all() + q = Q(strategy__in=strategys) & q + queryset = IastSensitiveInfoRule.objects.filter(q).order_by( + '-latest_time') + page_summary, page_data = self.get_paginator(queryset, page, page_size) + return R.success(data=SensitiveInfoRuleSerializer(page_data, + many=True).data, + page=page_summary) + + @extend_schema_with_envcheck( + request=SensitiveInfoRuleCreateSerializer, + tags=[_('SensitiveInfoRule')], + summary=_('SensitiveInfoRule Create'), + description= + _("Get the item corresponding to the user, support fuzzy search based on name." + ), + ) + def create(self,request): + ser = SensitiveInfoRuleCreateSerializer(data=request.data) + try: + if ser.is_valid(True): + strategy_id = ser.validated_data['strategy_id'] + pattern_type_id = ser.validated_data['pattern_type_id'] + pattern = ser.validated_data['pattern'] + status = ser.validated_data['status'] + except ValidationError as e: + return R.failure(data=e.detail) + strategy = IastStrategyModel.objects.filter(pk=strategy_id).first() + pattern_type = IastPatternType.objects.filter(pk=pattern_type_id).first() + pattern_test_dict = {1:regexcompile,2:jqcompile} + test = pattern_test_dict.get(pattern_type_id,None) + if not test: + return R.failure() + status_ = test(pattern) + if strategy and pattern_type and status_: + obj = IastSensitiveInfoRule.objects.create(strategy=strategy, + pattern_type=pattern_type, + pattern=pattern, + status=status, + user=request.user) + return R.success(msg=_('create success'),data=SensitiveInfoRuleSerializer(obj).data) + else: + return R.failure() + @extend_schema_with_envcheck( + request=SensitiveInfoRuleCreateSerializer, + tags=[_('SensitiveInfoRule')], + summary=_('SensitiveInfoRule Update'), + description= + _("Get the item corresponding to the user, support fuzzy search based on name." + ), + ) + def update(self, request, pk): + ser = SensitiveInfoRuleCreateSerializer(data=request.data) + try: + if ser.is_valid(True): + strategy_id = ser.validated_data['strategy_id'] + pattern_type_id = ser.validated_data['pattern_type_id'] + pattern = ser.validated_data['pattern'] + status = ser.validated_data['status'] + except ValidationError as e: + return R.failure(data=e.detail) + users = self.get_auth_users(request.user) + obj = IastSensitiveInfoRule.objects.filter( + pk=pk, user__in=users).update(**ser.validated_data, + latest_time=time.time()) + return R.success(msg=_('update success')) + + @extend_schema_with_envcheck( + tags=[_('SensitiveInfoRule')], + summary=_('SensitiveInfoRule delete'), + description= + _("Get the item corresponding to the user, support fuzzy search based on name." + ), + ) + def destory(self, request, pk): + users = self.get_auth_users(request.user) + IastSensitiveInfoRule.objects.filter(pk=pk, + user__in=users).update(status=-1) + return R.success(msg=_('delete success')) + + @extend_schema_with_envcheck( + tags=[_('SensitiveInfoRule')], + summary=_('SensitiveInfoRule get'), + description= + _("Get the item corresponding to the user, support fuzzy search based on name." + ), + ) + def retrieve(self, request, pk): + users = self.get_auth_users(request.user) + obj = IastSensitiveInfoRule.objects.filter(pk=pk, + user__in=users).first() + if not obj: + return R.failure() + return R.success(data=SensitiveInfoRuleSerializer(obj).data) + + +class SensitiveInfoPatternTypeView(UserEndPoint): + + @extend_schema_with_envcheck( + tags=[_('SensitiveInfoRule')], + summary=_('SensitiveInfoRule Pattern Type List'), + description= + _("Get the item corresponding to the user." + ), + ) + def get(self,request): + objs = IastPatternType.objects.all() + return R.success(data=SensitiveInfoPatternTypeSerializer(objs,many=True).data) + + +class SensitiveInfoPatternValidationView(UserEndPoint): + @extend_schema_with_envcheck( + request=_RegexPatternValidationSerializer, + tags=[_('SensitiveInfoRule')], + summary=_('SensitiveInfoRule validated_data'), + description= + _("Get the item corresponding to the user, support fuzzy search based on name." + ), + ) + def post(self,request,pattern_type): + pattern_test_dict = {'regex':regextest,'json':jsontest} + ser = _RegexPatternValidationSerializer(data=request.data) + try: + if ser.is_valid(True): + test_data = ser.validated_data['test_data'] + pattern = ser.validated_data['pattern'] + if pattern_type not in pattern_test_dict.keys(): + return R.failure() + except ValidationError as e: + return R.failure(data=e.detail) + test = pattern_test_dict[pattern_type] + data, status = test(test_data,pattern) + return R.success(data={'status':status,'data':data}) +def regexcompile(pattern): + try: + regex = re.compile(pattern) + except Exception as e: + print(e) + return False + return True + +def jqcompile(pattern): + try: + regex = jq.compile(pattern) + except Exception as e: + print(e) + return False + return True + +def regextest(test_data,pattern): + try: + regex = re.compile(pattern, re.M) + except Exception as e: + print(e) + data = '' + status = 0 + return data,status + result = regex.search(test_data) + if result and result.groups(): + return result.group(0), 1 + return '', 1 + +def jsontest(test_data,pattern): + try: + data = jq.compile(pattern).input(text=test_data).text() + status = 1 + except Exception as e: + print(e) + data = '' + status = 0 + return data, status + + + + +class SensitiveInfoRuleBatchView(BatchStatusUpdateSerializerView): + status_field = 'status' + model = IastSensitiveInfoRule + + @extend_schema_with_envcheck( + request=BatchStatusUpdateSerializerView.serializer, + tags=[_('SensitiveInfoRule')], + summary=_('SensitiveInfoRule batch status'), + description=_("batch update status."), + ) + def post(self, request): + data = self.get_params(request.data) + self.update_model(request, data) + return R.success(msg='操作成功') + return R.success(msg=_('update success')) + +class SensitiveInfoRuleAllView(AllStatusUpdateSerializerView): + status_field = 'status' + model = IastSensitiveInfoRule + + @extend_schema_with_envcheck( + request=AllStatusUpdateSerializerView.serializer, + tags=[_('SensitiveInfoRule')], + summary=_('SensitiveInfoRule all status'), + description=_("all update status."), + ) + def post(self, request): + data = self.get_params(request.data) + self.update_model(request, data) + return R.success(msg='操作成功') + return R.success(msg=_('update success')) diff --git a/dongtai_web/views/strategy_delete.py b/dongtai_web/views/strategy_delete.py new file mode 100644 index 000000000..f4adfe127 --- /dev/null +++ b/dongtai_web/views/strategy_delete.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +from rest_framework.request import Request + +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.models.strategy_user import IastStrategyUser +from dongtai_common.models.strategy import IastStrategyModel +from dongtai_common.models.hook_type import HookType +from dongtai_common.models.hook_strategy import HookStrategy +from dongtai_common.utils import const +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from dongtai_common.endpoint import TalentAdminEndPoint + +from rest_framework import serializers +class _StrategyResponseDataStrategySerializer(serializers.Serializer): + id = serializers.CharField(help_text=_('The id of strategy')) + + +_ResponseSerializer = get_response_serializer( + data_serializer=_StrategyResponseDataStrategySerializer(many=True), ) + +DELETE = 'delete' +class StrategyDelete(TalentAdminEndPoint): + + @extend_schema_with_envcheck( + tags=[_('Strategy')], + summary=_('Strategy Delete'), + description=_( + "Delete the corresponding strategy according to id" + ), + response_schema=_ResponseSerializer, + ) + def delete(self, request, id_: int): + if id_ <= 0: + return R.failure() + strategy = IastStrategyModel.objects.filter(pk=id_).first() + if not strategy: + return R.failure(msg=_('This strategy does not exist')) + hook_types = HookType.objects.filter(vul_strategy=strategy).all() + strategy.state = DELETE + strategy.save() + for hook_type in hook_types: + # need to check why language_id show 0 + if hook_type.language_id == 0: + continue + hook_strategies = hook_type.strategies.all() + for hook_strategy in hook_strategies: + hook_strategy.enable = const.DELETE + hook_strategy.save() + hook_type.enable = const.DELETE + hook_type.save() + return R.success(data={"id": id_}) diff --git a/dongtai_web/views/strategy_disable.py b/dongtai_web/views/strategy_disable.py new file mode 100644 index 000000000..2540f0ef5 --- /dev/null +++ b/dongtai_web/views/strategy_disable.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +from dongtai_common.models.hook_type import HookType +from dongtai_common.models.hook_strategy import HookStrategy +from dongtai_common.utils import const +from dongtai_common.models.strategy import IastStrategyModel + +from dongtai_common.endpoint import R +from dongtai_common.endpoint import TalentAdminEndPoint +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer + +_ResponseSerializer = get_response_serializer(status_msg_keypair=( + ((201, _('Strategy is disabled, total {} hook rules')), ''), + ((202, _('Strategy does not exist')), ''), +)) + +DISABLE = 'disable' +class StrategyDisableEndpoint(TalentAdminEndPoint): + @extend_schema_with_envcheck( + tags=[_('Strategy')], + summary=_('Strategy Disable'), + description=_( + "Disable the corresponding strategy according to id" + ), + response_schema=_ResponseSerializer, + ) + def get(self, request, id): + strategy = IastStrategyModel.objects.filter(id=id).first() + strategy_models = HookType.objects.filter(vul_strategy=strategy).all() + if strategy: + strategy.state = DISABLE + strategy.save() + total_counts = 0 + for strategy_model in strategy_models: + counts = strategy_model.strategies.filter(enable=const.HOOK_TYPE_ENABLE).update( + enable=const.HOOK_TYPE_DISABLE) + strategy_model.enable = const.HOOK_TYPE_DISABLE + strategy_model.save(update_fields=['enable']) + total_counts += counts + return R.success(msg=_('Strategy is disabled, total {} hook rules').format(total_counts)) + else: + return R.failure(status=202, msg=_('Strategy does not exist')) + + +if __name__ == '__main__': + + HookStrategy.objects.values("id").count() diff --git a/dongtai_web/views/strategy_enable.py b/dongtai_web/views/strategy_enable.py new file mode 100644 index 000000000..583e5e0aa --- /dev/null +++ b/dongtai_web/views/strategy_enable.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad + +# software: PyCharm +# project: lingzhi-webapi +from dongtai_common.models.hook_type import HookType +from dongtai_common.models.hook_strategy import HookStrategy +from dongtai_common.utils import const +from dongtai_common.models.strategy import IastStrategyModel + +from dongtai_common.endpoint import R +from dongtai_common.endpoint import TalentAdminEndPoint +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer + +_ResponseSerializer = get_response_serializer(status_msg_keypair=( + ((201, _('Policy enabled success, total {} hook rules')), ''), + ((202, _('Strategy does not exist')), ''), +)) + +ENABLE = 'enable' + +class StrategyEnableEndpoint(TalentAdminEndPoint): + @extend_schema_with_envcheck( + tags=[_('Strategy')], + summary=_('Strategy Enbale'), + description=_( + "Enable the corresponding strategy according to id" + ), + response_schema=_ResponseSerializer, + ) + def get(self, request, id): + strategy = IastStrategyModel.objects.filter(id=id).first() + strategy_models = HookType.objects.filter(vul_strategy=strategy).all() + if strategy: + strategy.state = ENABLE + strategy.save() + total_counts = 0 + for strategy_model in strategy_models: + counts = strategy_model.strategies.filter(enable=const.HOOK_TYPE_DISABLE).update( + enable=const.HOOK_TYPE_ENABLE) + strategy_model.enable = const.HOOK_TYPE_ENABLE + strategy_model.save(update_fields=['enable']) + total_counts += counts + return R.success(msg=_('Policy enabled success, total {} hook rules').format(total_counts)) + else: + return R.failure(msg=_('Strategy does not exist')) + + +if __name__ == '__main__': + HookStrategy.objects.values("id").count() diff --git a/dongtai_web/views/strategy_modified.py b/dongtai_web/views/strategy_modified.py new file mode 100644 index 000000000..d00b2ede8 --- /dev/null +++ b/dongtai_web/views/strategy_modified.py @@ -0,0 +1,78 @@ +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.models.hook_type import HookType +from dongtai_common.models.strategy import IastStrategyModel +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from django.utils.translation import gettext_lazy as _ +from rest_framework import serializers +from dongtai_common.endpoint import TalentAdminEndPoint + +_ResponseSerializer = get_response_serializer() + +class StrategyCreateSerializer(serializers.Serializer): + vul_name = serializers.CharField(help_text=_('The name of the vulnerability type targeted by the strategy')) + vul_type = serializers.CharField(help_text=_('Types of vulnerabilities targeted by the strategy')) + state = serializers.CharField(help_text=_('This field indicates whether the vulnerability is enabled, 1 or 0')) + vul_desc = serializers.CharField(help_text=_('Description of the corresponding vulnerabilities of the strategy')) + level_id = serializers.IntegerField( + min_value=1, + max_value=5, + help_text=_('The strategy corresponds to the level of vulnerability')) + vul_fix = serializers.CharField(help_text=_( + "Suggestions for repairing vulnerabilities corresponding to the strategy" + )) + +class StrategyModified(TalentAdminEndPoint): + + @extend_schema_with_envcheck( + request=StrategyCreateSerializer, + tags=[_('Strategy')], + summary=_('Strategy modified'), + description=_( + "Get a list of strategies." + ), + response_schema=_ResponseSerializer, + ) + def put(self, request, id_): + fields = [ + 'vul_type', 'vul_name', 'vul_desc', 'vul_fix', 'state', 'level_id' + ] + # here should refactor with serilizer. + if 'level_id' in request.data.keys() and request.data['level_id'] <= 0: + return R.failure() + data = {k: v for k, v in request.data.items() if k in fields} + strategy = IastStrategyModel.objects.filter( + pk=id_).first() + if not strategy: + return R.failure() + if not HookType.objects.filter(vul_strategy=strategy).exists(): + del data['vul_type'] + _update(strategy, data) + HookType.objects.filter(vul_strategy=strategy, + type=4).update(name=data['vul_name']) + HookType.objects.filter(vul_strategy=strategy, + type=3).update(name=data['vul_name']) + return R.success(data={'id': id_}) + #hook_type = HookType.objects.filter(pk=id_).first() + #_update(hook_type, data) + #strategy = IastStrategyModel.objects.filter( + # hook_type=hook_type.id).first() + #if strategy: + # _update(strategy, data) + #return R.success(data={"id": id_}) + + +def _update(model, dic): + for k, v in dic.items(): + setattr(model, k, v) + model.save() + + +def get_model_field(model, exclude=[], include=[]): + fields = [field.name for field in model._meta.fields] + if include: + return [ + include for field in list(set(fields) - set(exclude)) + if field in include + ] + return list(set(fields) - set(exclude)) diff --git a/dongtai_web/views/strategys.py b/dongtai_web/views/strategys.py new file mode 100644 index 000000000..eed6457ee --- /dev/null +++ b/dongtai_web/views/strategys.py @@ -0,0 +1,216 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad + +# software: PyCharm +# project: lingzhi-webapi +from dongtai_common.utils import const +from dongtai_common.models.hook_type import HookType +from dongtai_common.models.strategy import IastStrategyModel + +from django.db.models import Q +from dongtai_common.endpoint import R +from dongtai_common.utils import const +from dongtai_common.endpoint import UserEndPoint +from dongtai_web.serializers.strategy import StrategySerializer +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from dongtai_common.models.hook_type import HookType +from rest_framework import serializers +from dongtai_common.models.program_language import IastProgramLanguage +import time +from rest_framework.serializers import ValidationError +from dongtai_common.permissions import TalentAdminPermission +from rest_framework.decorators import permission_classes +from dongtai_common.models.vul_level import IastVulLevel + +class _StrategyResponseDataStrategySerializer(serializers.Serializer): + id = serializers.CharField(help_text=_('The id of agent')) + vul_name = serializers.CharField(help_text=_('The name of the vulnerability type targeted by the strategy')) + vul_type = serializers.CharField(help_text=_('Types of vulnerabilities targeted by the strategy')) + enable = serializers.CharField(help_text=_('This field indicates whether the vulnerability is enabled, 1 or 0')) + vul_desc = serializers.CharField(help_text=_('Description of the corresponding vulnerabilities of the strategy')) + level = serializers.IntegerField( + help_text=_('The strategy corresponds to the level of vulnerability')) + dt = serializers.IntegerField( + help_text=_('Strategy update time')) + vul_fix = serializers.CharField(help_text=_( + "Suggestions for repairing vulnerabilities corresponding to the strategy" + )) + + +class StrategyCreateSerializer(serializers.Serializer): + vul_name = serializers.CharField(help_text=_('The name of the vulnerability type targeted by the strategy')) + vul_type = serializers.CharField(help_text=_('Types of vulnerabilities targeted by the strategy')) + state = serializers.CharField(help_text=_('This field indicates whether the vulnerability is enabled, 1 or 0')) + vul_desc = serializers.CharField(help_text=_('Description of the corresponding vulnerabilities of the strategy')) + level_id = serializers.IntegerField( + min_value=1, + help_text=_('The strategy corresponds to the level of vulnerability')) + vul_fix = serializers.CharField( + allow_blank=True, + help_text= + _("Suggestions for repairing vulnerabilities corresponding to the strategy" + )) + + def validate_level_id(self, value): + if not IastVulLevel.objects.filter(pk=value).exists(): + raise serializers.ValidationError("this vul level not exist") + return value + + +_ResponseSerializer = get_response_serializer( + data_serializer=_StrategyResponseDataStrategySerializer(many=True), ) + + +class _StrategyArgsSerializer(serializers.Serializer): + page_size = serializers.IntegerField(default=None, + help_text=_('Number per page')) + page = serializers.IntegerField(default=None, help_text=_('Page index')) + name = serializers.CharField( + default=None, + help_text=_( + "The name of the item to be searched, supports fuzzy search.")) + + +STATUS_DELETE = 'delete' + +class StrategyEndpoint(UserEndPoint): + + @extend_schema_with_envcheck( + tags=[_('Strategy')], + summary=_('Strategy retrieve'), + description=_( + "Get a strategiey by id." + ), + response_schema=_ResponseSerializer, + ) + def get(self, request, pk): + q = ~Q(state=STATUS_DELETE) + q = q & Q(pk=pk) + queryset = IastStrategyModel.objects.filter(q).first() + return R.success(data=StrategySerializer(queryset).data,) + +class StrategysEndpoint(UserEndPoint): + permission_classes_by_action = {'POST':(TalentAdminPermission,)} + + def get_permissions(self): + try: + return [permission() for permission in self.permission_classes_by_action[self.request.method]] + except KeyError: + return [permission() for permission in self.permission_classes] + + @extend_schema_with_envcheck( + [_StrategyArgsSerializer], + tags=[_('Strategy')], + summary=_('Strategy List'), + description=_( + "Get a list of strategies." + ), + response_schema=_ResponseSerializer, + ) + def get(self, request): + ser = _StrategyArgsSerializer(data=request.GET) + try: + if ser.is_valid(True): + page_size = ser.validated_data['page_size'] + page = ser.validated_data['page'] + name = ser.validated_data['name'] + except ValidationError as e: + return R.failure(data=e.detail) + q = ~Q(state=STATUS_DELETE) + if name: + q = q & Q(vul_name__icontains=name) + queryset = IastStrategyModel.objects.filter(q).order_by('-id').all() + if page and page_size: + page_summary, page_data = self.get_paginator(queryset, page, page_size) + return R.success(data=StrategySerializer(page_data, many=True).data, + page=page_summary) + else: + return R.success(data=StrategySerializer(queryset, many=True).data,) + + strategy_models = HookType.objects.filter( + type=const.RULE_SINK + ).values( + 'id', + 'name', + 'value', + 'enable' + ).exclude(enable=const.DELETE) + if strategy_models: + models = dict() + for strategy_model in strategy_models: + models[strategy_model['id']] = { + 'id': + strategy_model['id'], + 'vul_name': + strategy_model['name'], + 'vul_type': + strategy_model['value'], + 'state': + const.STRATEGY_DISABLE + if strategy_model['enable'] is not None + and strategy_model['enable'] == 0 else + const.STRATEGY_ENABLE, + 'vul_desc': + '', + 'level': + 1, + 'dt': + 1 + } + + strategy_ids = models.keys() + profiles = IastStrategyModel.objects.values( + 'level_id', 'vul_desc', 'vul_fix', + 'hook_type_id').filter(hook_type_id__in=strategy_ids) + if profiles: + for profile in profiles: + strategy_id = profile.get('hook_type_id') + models[strategy_id]['vul_desc'] = profile['vul_desc'] + models[strategy_id]['vul_fix'] = profile['vul_fix'] + models[strategy_id]['level'] = profile['level_id'] + return R.success(data=list(models.values())) + else: + return R.success(msg=_('No strategy')) + @extend_schema_with_envcheck( + request=StrategyCreateSerializer, + tags=[_('Strategy')], + summary=_('Strategy Add'), + description=_( + "Generate corresponding strategy group according to the strategy selected by the user." + ), + response_schema=_ResponseSerializer, + ) + def post(self, request): + ser = StrategyCreateSerializer(data=request.data) + try: + if ser.is_valid(True): + pass + except ValidationError as e: + return R.failure(data=e.detail) + print(ser.validated_data) + strategy = IastStrategyModel.objects.create(**ser.validated_data, + user=request.user, + dt=time.time()) + strategy.save() + for language in IastProgramLanguage.objects.all(): + HookType.objects.create(type=3, + name=ser.validated_data['vul_name'], + value=ser.validated_data['vul_type'], + enable=1, + create_time=time.time(), + update_time=time.time(), + created_by=request.user.id, + language=language, + vul_strategy=strategy) + HookType.objects.create(type=4, + name=ser.validated_data['vul_name'], + value=ser.validated_data['vul_type'], + enable=1, + create_time=time.time(), + update_time=time.time(), + created_by=request.user.id, + language=language, + vul_strategy=strategy) + return R.success(data=StrategySerializer(strategy).data) diff --git a/dongtai_web/views/strategys_add.py b/dongtai_web/views/strategys_add.py new file mode 100644 index 000000000..311b98d70 --- /dev/null +++ b/dongtai_web/views/strategys_add.py @@ -0,0 +1,55 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad + +# software: PyCharm +# project: lingzhi-webapi +from rest_framework.request import Request + +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.models.strategy_user import IastStrategyUser +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer + +from rest_framework import serializers +class _StrategyResponseDataStrategySerializer(serializers.Serializer): + id = serializers.CharField(help_text=_('The id of strategy')) + +class _StrategyAddBodyargsSerializer(serializers.Serializer): + ids = serializers.CharField(help_text=_('The id corresponding to the strategys, use"," for segmentation.')) + name = serializers.CharField(help_text=_('The name of strategy')) + + + +_ResponseSerializer = get_response_serializer( + data_serializer=_StrategyResponseDataStrategySerializer(many=True), ) + + + +class StrategyAdd(UserEndPoint): + + @extend_schema_with_envcheck( + request=_StrategyAddBodyargsSerializer, + tags=[_('Strategy')], + summary=_('Sacn Strategy Add'), + description=_( + "Generate corresponding strategy group according to the strategy selected by the user." + ), + response_schema=_ResponseSerializer, + ) + def post(self, request): + + ids = request.data.get("ids", None) + + name = request.data.get("name", None) + user = request.user + if not ids or not name: + return R.failure(msg=_('Parameter error')) + new_strategy = IastStrategyUser.objects.create( + name=name, + content=ids, + user=user, + status=1 + ) + return R.success(data={"id": new_strategy.id}) diff --git a/dongtai_web/views/strategys_list.py b/dongtai_web/views/strategys_list.py new file mode 100644 index 000000000..1c54a4d40 --- /dev/null +++ b/dongtai_web/views/strategys_list.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi + +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.models.strategy_user import IastStrategyUser +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer + +from rest_framework import serializers +class _StrategyResponseDataStrategySerializer(serializers.Serializer): + id = serializers.CharField(help_text=_('The id of agent')) + name = serializers.CharField(help_text=_('The name of the strategy')) + + + +_ResponseSerializer = get_response_serializer( + data_serializer=_StrategyResponseDataStrategySerializer(many=True), ) + + +class StrategyList(UserEndPoint): + + @extend_schema_with_envcheck( + tags=[_('Strategy')], + summary=_('Strategy List (with user)'), + description=_( + "Get a list of strategies." + ), + response_schema=_ResponseSerializer, + ) + def get(self, request): + user = request.user + queryset = IastStrategyUser.objects.filter( + user=user, + status=1 + ).values("id", "name").order_by("-id") + data = [] + if queryset: + for item in queryset: + data.append({ + "id": item['id'], + "name": item['name'], + }) + return R.success(data=data) diff --git a/dongtai_web/views/strategys_type.py b/dongtai_web/views/strategys_type.py new file mode 100644 index 000000000..7c84c6cfe --- /dev/null +++ b/dongtai_web/views/strategys_type.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad + +# software: PyCharm +# project: lingzhi-webapi + +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.models.strategy import IastStrategyModel +from dongtai_common.models.vul_level import IastVulLevel +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer + +from rest_framework import serializers + +class _StrategyTypeResponseDataTypeValueSerializer(serializers.Serializer): + strategy_id = serializers.CharField(help_text=_('The id of strategy')) + vul_name = serializers.CharField(help_text=_('The name of the vulnerability type targeted by the strategy')) + level_id = serializers.IntegerField( + help_text=_('The strategy corresponds to the level of vulnerability')) + + +class _StrategyTypeResponseDataStrategySerializer(serializers.Serializer): + level_id = serializers.IntegerField( + help_text=_('Level id of vulnerability')) + level_name = serializers.IntegerField( + help_text=_('Level name of vulnerability')) + type_value = _StrategyTypeResponseDataTypeValueSerializer(many=True) + + + +_ResponseSerializer = get_response_serializer( + data_serializer=_StrategyTypeResponseDataStrategySerializer(many=True), ) + + +class StrategyType(UserEndPoint): + + @extend_schema_with_envcheck( + tags=[_('Strategy')], + summary=_('Strategy Type'), + description=_( + "Get a list of strategy types." + ), + response_schema=_ResponseSerializer, + ) + def get(self, request): + queryset = IastStrategyModel.objects.filter(state="enable") + allType = IastVulLevel.objects.all().order_by("id") + result = [] + curTyp = {} + if queryset: + for item in queryset: + if not item.level: + continue + if item.level.id not in curTyp.keys(): + curTyp[item.level_id] = [] + curTyp[item.level_id].append({ + "strategy_id": item.id, + "level_id": item.level_id, + "vul_name": item.vul_name + }) + if allType: + for item in allType: + result.append({ + "level_id": item.id, + "level_name": item.name_type, + "type_value": curTyp.get(item.id, []) + }) + return R.success(data=result) diff --git a/dongtai_web/views/system_info.py b/dongtai_web/views/system_info.py new file mode 100644 index 000000000..0fbce9be3 --- /dev/null +++ b/dongtai_web/views/system_info.py @@ -0,0 +1,17 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi + +from dongtai_common.endpoint import R +from django.utils.translation import gettext_lazy as _ +from dongtai_common.endpoint import TalentAdminEndPoint + + +class SystemInfo(TalentAdminEndPoint): + name = "api-v1-system-info" + description = _("API - System Information Page") + + def get(self, request): + return R.success() diff --git a/dongtai_web/views/user_detail.py b/dongtai_web/views/user_detail.py new file mode 100644 index 000000000..898d6fcd7 --- /dev/null +++ b/dongtai_web/views/user_detail.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad + +# software: PyCharm +# project: lingzhi-webapi + +from dongtai_common.endpoint import R +from dongtai_common.endpoint import TalentAdminEndPoint +from dongtai_common.models import User +from django.utils.translation import gettext_lazy as _ + + +class UserDetailEndPoint(TalentAdminEndPoint): + def get(self, request, user_id): + try: + user = User.objects.filter(id=user_id).first() + talent = user.get_talent() + + if talent: + current_talent = request.user.get_talent() + if current_talent == talent: + + department = user.get_department() + return R.success(data={ + 'username': user.get_username(), + 'department': department.get_department_name(), + 'talent': talent.get_talent_name() + }) + except BaseException: + pass + return R.failure(status=203, msg=_('no permission')) diff --git a/dongtai_web/views/user_info.py b/dongtai_web/views/user_info.py new file mode 100644 index 000000000..d50626042 --- /dev/null +++ b/dongtai_web/views/user_info.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad + +# software: PyCharm +# project: lingzhi-webapi +import logging + +from django.contrib.auth.models import Group + +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from django.utils.translation import gettext_lazy as _ +from dongtai_conf.settings import SCA_SETUP + +logger = logging.getLogger("django") + + +class UserInfoEndpoint(UserEndPoint): + name = "api-v1-user-info" + description = _("User Info") + + def get(self, request): + user = request.user + group = Group.objects.filter(user=user).order_by("-id").first() + + return R.success(data={ + 'userid': user.id if not user.is_anonymous else -1, + 'username': user.get_username(), + 'role': 3 if group is None else 2 if group.name == 'talent_admin' else 1 if group.name == 'system_admin' else 0, + 'role_name': '' if group is None else group.name, + 'sca_setup': not SCA_SETUP, + }) diff --git a/dongtai_web/views/user_login.py b/dongtai_web/views/user_login.py new file mode 100644 index 000000000..9dbf1be0e --- /dev/null +++ b/dongtai_web/views/user_login.py @@ -0,0 +1,55 @@ +#!/usr/local/env python +# -*- coding: utf-8 -*- +import logging + +from captcha.models import CaptchaStore +from django.contrib.auth import authenticate, login +from dongtai_web.utils import extend_schema_with_envcheck +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from django.utils.translation import gettext_lazy as _ +import time + +logger = logging.getLogger("dongtai-webapi") + + +class UserLogin(UserEndPoint): + permission_classes = [] + authentication_classes = [] + name = "user_views_login" + description = _("User login") + + @extend_schema_with_envcheck([], { + 'username': "", + 'password': "", + 'captcha_hash_key': "", + 'captcha': "" + }) + def post(self, request): + try: + captcha_hash_key = request.data["captcha_hash_key"] + captcha = request.data["captcha"] + if captcha_hash_key and captcha: + captcha_obj = CaptchaStore.objects.get(hashkey=captcha_hash_key) + if int(captcha_obj.expiration.timestamp()) < int(time.time()): + return R.failure(status=203, msg=_('Captcha timed out')) + if captcha_obj.response == captcha.lower(): + username = request.data["username"] + password = request.data["password"] + user = authenticate(username=username, password=password) + if user is not None and user.is_active: + login(request, user) + return R.success( + msg=_('Login successful'), + data={'default_language': user.default_language}) + else: + logger.warn( + f"user [{username}] login failure, rease: {'user not exist' if user is None else 'user is disable'}") + return R.failure(status=202, msg=_('Login failed')) + else: + return R.failure(status=203, msg=_('Verification code error')) + else: + return R.failure(status=204, msg=_('verification code should not be empty')) + except Exception as e: + logger.error(e) + return R.failure(status=202, msg=_('Login failed')) diff --git a/dongtai_web/views/user_logout.py b/dongtai_web/views/user_logout.py new file mode 100644 index 000000000..6fa80c148 --- /dev/null +++ b/dongtai_web/views/user_logout.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/8/11 15:02 +# software: PyCharm +# project: webapi +import logging + +from django.contrib.auth import logout +from django.http import JsonResponse +from dongtai_common.endpoint import AnonymousAuthEndPoint +from django.utils.translation import gettext_lazy as _ + +from dongtai_conf import settings + +logger = logging.getLogger("django") +from datetime import datetime + +class UserLogout(AnonymousAuthEndPoint): + name = "api-v1-user-logout" + description = _("Sign out") + + def get(self, request): + if request.user.is_active: + logout(request) + response = JsonResponse({ + "status": 201, + "msg": _('Sign out successfully') + }) + response.delete_cookie(key=settings.CSRF_COOKIE_NAME,domain=settings.SESSION_COOKIE_DOMAIN) + response.delete_cookie(key='sessionid',domain=settings.SESSION_COOKIE_DOMAIN) + return response diff --git a/dongtai_web/views/user_passwrd.py b/dongtai_web/views/user_passwrd.py new file mode 100644 index 000000000..28eb045c9 --- /dev/null +++ b/dongtai_web/views/user_passwrd.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad + +# software: PyCharm +# project: lingzhi-webapi +import logging +from django.contrib.auth import authenticate + +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from django.utils.translation import gettext_lazy as _ + +logger = logging.getLogger("dongtai-webapi") + +class UserPassword(UserEndPoint): + name = "api-v1-user-password" + description = _("Change Password") + + def post(self, request): + user = request.user + try: + if not request.data['old_password'] or not request.data['new_password']: + return R.failure(msg=_('Password should not be empty')) + else: + user_check = authenticate(username=user.username, password=request.data['old_password']) + if user_check is not None and user_check.is_active: + password = request.data['new_password'] + + user.set_password(password) + user.save(update_fields=['password']) + return R.success(msg=_('Password has been changed successfully')) + else: + return R.failure(msg=_('Incorrect old password')) + except Exception as e: + logger.error(e) + return R.failure(msg=_('Incorrect')) + diff --git a/dongtai_web/views/user_passwrd_reset.py b/dongtai_web/views/user_passwrd_reset.py new file mode 100644 index 000000000..24bd4f017 --- /dev/null +++ b/dongtai_web/views/user_passwrd_reset.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +import logging + +from dongtai_common.models import User + +from dongtai_common.endpoint import R +from dongtai_common.endpoint import TalentAdminEndPoint +from django.utils.translation import gettext_lazy as _ + +logger = logging.getLogger("dongtai-webapi") + + +class UserPasswordReset(TalentAdminEndPoint): + name = "api-v1-user-password-reset" + description = _("Reset Password") + + def post(self, request): + try: + user_id = request.data.get('userId') + if user_id: + user = User.objects.filter(id=user_id).first() + if user: + username = user.get_username() + user.set_password(f'{username}@123') + user.save(update_fields=['password']) + msg = _('User {} password reset success').format(username) + return R.success(msg=msg) + else: + msg = _('User does not exist') + logger.warning(msg) + return R.failure(msg=msg) + else: + msg = _('UserID is empty') + logger.error(_('UserID is empty')) + return R.failure(msg=msg) + except ValueError as e: + msg = _('UserID must be a numeric') + logger.error(msg, exc_info=True) + except Exception as e: + msg = _('Password reset failed, reasons: {E}').format(e) + logger.error(msg, exc_info=True) + return R.failure(msg="Password reset failed") diff --git a/dongtai_web/views/user_register_batch.py b/dongtai_web/views/user_register_batch.py new file mode 100644 index 000000000..8335a88e1 --- /dev/null +++ b/dongtai_web/views/user_register_batch.py @@ -0,0 +1,126 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# project: dongtai-webapi + +import csv +import logging + +from django.contrib.auth.models import Group +from dongtai_common.models import User +from dongtai_common.models.department import Department + +from dongtai_common.endpoint import R +from dongtai_common.endpoint import SystemAdminEndPoint +from dongtai_web.notify.email import Email +from dongtai_conf import settings +from django.utils.translation import gettext_lazy as _ +import string +import random +logger = logging.getLogger("dongtai-webapi") + +GONG_ZHONG_HAO_IMAGE = "data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAMCAgMCAgMDAwMEAwMEBQgFBQQEBQoHBwYIDAoMDAsKCwsNDhIQDQ4RDgsLEBYQERMUFRUVDA8XGBYUGBIUFRT/2wBDAQMEBAUEBQkFBQkUDQsNFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBT/wAARCAECAQIDASIAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oADAMBAAIRAxEAPwD9U6KKKACikzRmgBaKTNGaAFopM0ZFAC0UmaWgAopM0ZoAWikoyPWgBaKSloAKKKKACikoBBoAWiikzQAtFJnijNAC0UlGR60ALRSZozQAtFJmjNAC0UUUAFFFFABSHkUtIelAH5Aft6ft5/HX4K/tY+OvBvgzxz/Y3hvTfsP2Wy/siwn8vzLC3lf55YGc5eRzyxxnA4AFeAf8PRv2nf8Aopn/AJQNL/8Akal/4Kj/APJ9nxN/7hn/AKa7Sv3T+KXxT8MfBbwJqfjLxlqf9j+G9N8r7Ve/Z5Z/L8yVIk+SJWc5eRBwpxnJ4BNAH4V/8PRv2nf+imf+UDS//kaj/h6N+07/ANFM/wDKBpf/AMjV+qx/4Ki/sxg4PxLOf+wBqn/yNSf8PRf2Y/8Aoph/8EGqf/I1AH5Vf8PRv2nf+imf+UDS/wD5Gr7/AP8AglN+1F8Tv2lP+Fo/8LH8Tf8ACRf2L/Zf2D/QLW18nzvtfmf6iJN2fKj+9nG3jGTn1b/gqP8A8mJ/E3/uGf8Ap0tK+VP+CGX/ADWz/uCf+39AHKft6ft5/HX4K/tY+OvBvgzxz/Y3hvTfsP2Wy/siwn8vzLC3lf55YGc5eRzyxxnA4AFfsBX4A/8ABUf/AJPs+Jv/AHDP/TXaV8q0AftR/wAFW/2o/id+zWfhd/wrjxN/wjv9tf2p9v8A9AtbrzvJ+yeV/r4n2482T7uM7uc4GPgD/h6N+07/ANFM/wDKBpf/AMjV8rUUAf0Uft6/FHxP8F/2TvHPjLwbqf8AY3iTTPsP2S9+zxT+X5l/bxP8kqshykjjlTjORyAa/ID/AIei/tOYx/wszjpj+wNL/wDkav3U+KXxT8MfBbwJqfjLxlqf9j+G9N8r7Ve/Z5Z/L8yVIk+SJWc5eRBwpxnJ4BNeAH/gqL+zHyP+FlnP/YA1T/5GoA6r9gr4o+J/jR+yd4G8ZeMtT/tnxJqf277Xe/Z4oPM8u/uIk+SJVQYSNBwozjJ5JNfQFfit+1J+y58Tv20Pjt4m+Mnwb8M/8Jj8N/Ev2X+yta+32tj9p+z2sVrN+5upYpk2zW8qfOgztyMqQT9Aft6/t5/Ar40/sneOvBvgzxwdZ8Sal9h+y2X9kX8HmeXf28r/ADywKgwkbnlhnGByQKAP0por+az4G/st/E79pIa2fhz4Z/4SIaL5H2//AE+1tfJ87zPL/wBfKm7PlSfdzjbzjIzyvxS+Fvif4LeO9T8G+MtM/sbxJpvlfarLz4p/L8yJJU+eJmQ5SRDwxxnB5BFAH7+/t6/FHxP8F/2TvHPjLwbqf9jeJNM+w/ZL37PFP5fmX9vE/wAkqshykjjlTjORyAa+f/8AglJ+1F8Tv2k/+FoD4jeJv+EiGi/2X9g/0C1tfJ877X5n+oiTdnyo/vZxt4xk5/Nb9gr4o+GPgv8AtY+BvGXjLU/7H8N6Z9u+13v2eWfy/MsLiJPkiVnOXkQcKcZyeATX7AD/AIKifsyZx/wss56Y/sDVP/kagD6pr8f/ANgv9vP46/Gr9rHwL4N8Z+Of7Z8N6l9u+1WX9kWEHmeXYXEqfPFArjDxoeGGcYPBIr9Vfhb8U/DHxp8CaZ4y8G6n/bHhvUvN+y3v2eWDzPLleJ/klVXGHjccqM4yOCDXlX7evwu8T/Gj9k7xz4N8G6Z/bPiTU/sP2Sy+0RQeZ5d/byv88rKgwkbnlhnGByQKAPf8fLivyA/b0/bz+OvwV/ax8deDfBnjn+xvDem/Yfstl/ZFhP5fmWFvK/zywM5y8jnljjOBwAK8A/4ddftOHkfDPj/sP6X/APJNH/Drn9p3/omf/lf0v/5JoA/X/wDb1+KPif4L/sneOfGXg3U/7G8SaZ9h+yXv2eKfy/Mv7eJ/klVkOUkccqcZyOQDX5Af8PRf2nBwPiZx/wBgDS//AJGr9Vf+Co//ACYn8Tf+4Z/6dLSvlT/ghkcf8LsJ6D+xP/b+gD5V/wCHo37Tv/RTP/KBpf8A8jUf8PRv2nf+imf+UDS//kav2A+KP7enwK+C/jrU/BvjLxwdG8Sab5X2qy/si/n8vzIklT54oGQ5SRDwxxnBwQRXKf8AD0X9mP8A6KYf/BBqn/yNQB+VX/D0b9p3/opn/lA0v/5Go/4ejftO/wDRTP8AygaX/wDI1ftT8Df2pPhj+0n/AG2Phx4m/wCEi/sXyPt/+gXVr5PneZ5X+viTdnypPu5xt5xkZ/Ff/gqP/wAn2fE3/uGf+mu0oA/f6iiigAooooAKQ9KWkPSgD8Av+Co//J9nxN/7hn/prtK/VT/gqP8A8mKfEz/uGf8ApztK/Kv/AIKj/wDJ9nxN/wC4Z/6a7Sv1U/4Kj/8AJifxN/7hn/p0tKAPwBzRmiigD9/v+Co//JifxN/7hn/p0tK+VP8Aghl/zWz/ALgn/t/X1X/wVH/5MT+Jv/cM/wDTpaV8q/8ABDHr8bP+4J/7f0AeqftRf8Epf+GlPjr4m+I//C0f+Ec/tr7L/wAS3/hH/tXk+TaxQf637Um7PlbvujG7HOMnyr/hxl/1Wz/y1P8A7tr9Va+f/wBvX4o+J/gv+yd458ZeDdT/ALG8SaZ9h+yXv2eKfy/Mv7eJ/klVkOUkccqcZyOQDQB8Vf8ADjIf9Fs/8tT/AO7a+AP2o/gZ/wAM2fHXxN8OP7b/AOEi/sX7L/xMvsn2XzvOtYp/9Vvfbjzdv3jnbnjOB6r/AMPRf2nMY/4WWMdMf2Bpf/yNXgPxS+KXif40+O9T8ZeMtT/tnxJqXlfar3yIoPM8uJIk+SJVQYSNBwozjJ5JNAH9E/7UnwM/4aT+BXib4c/23/wjv9tfZf8AiZ/ZPtXk+TdRT/6rem7PlbfvDG7POMH4BH/BDPv/AMLt/wDLU/8Au2vtT9vX4o+J/gv+yd458ZeDdT/sbxJpn2H7Je/Z4p/L8y/t4n+SVWQ5SRxypxnI5ANfkB/w9F/acHA+JnH/AGANL/8AkagD9qf2XPgZ/wAM2fArwz8Of7bHiL+xftX/ABM/sn2XzvOupZ/9Vvfbjzdv3jnbnjOB/Nbya+qf+Ho37Tv/AEUz/wAoGl//ACNXK/sFfC7wx8aP2sfA3g3xlpn9seG9T+3fa7L7RLB5nl2FxKnzxMrjDxoeGGcYPBIoA+1P+CGf/NbM/wDUE/8Ab+vVf2ov+CUn/DSfx18TfEf/AIWh/wAI5/bX2X/iW/8ACP8A2ryfJtYoP9b9qTdnyt33RjdjnGT9VfAz9lz4Y/s2nWz8OfDP/COnWvI+3/6fdXXneT5nl/6+V9uPNk+7jO7nOBj1agD8Vv2ov+CUo/Zr+BXib4j/APC0f+Ej/sX7N/xLP+Ef+y+d511FB/rftT7cebu+6c7ccZyPgHkH0r7+/Zb/AGo/id+2h8dvDPwb+Mnib/hMfhv4l+1f2rov2C1sftP2e1luof31rFFMm2a3if5HGduDlSQT/gq1+y78Mf2bP+FXf8K48M/8I7/bX9qfb/8AT7q687yfsnlf6+V9uPNk+7jO7nOBgAP2Xf8Agq3/AMM1/Arwz8OP+FXf8JH/AGL9q/4mf/CQfZfO866ln/1X2V9uPN2/eOdueM4Hqv8Aw/N/6on/AOXX/wDcVflXX7/f8Ouv2Y/+iZn/AMH+qf8AyTQB8q/8PzP+qJf+XX/9xUn/AA/N/wCqJ/8Al1//AHFX1X/w66/ZjHI+Ghz/ANh/VP8A5Jr8gP29fhd4Y+C/7WPjnwb4N0z+x/DemfYfsll9oln8vzLC3lf55WZzl5HPLHGcDgAUAfr/AP8ABUf/AJMT+Jv/AHDP/TpaV8qf8EMv+a2f9wT/ANv6+q/+Co//ACYn8Tf+4Z/6dLSvlT/ghl/zWz/uCf8At/QB8rf8FRuP26/iZ/3DP/TZaV8q5r6q/wCCo/8AyfZ8Tf8AuGf+mu0r5VoA/VP/AIIZf81s/wC4J/7f18rf8FR/+T7Pib/3DP8A012lfVP/AAQy/wCa2f8AcE/9v6+Vv+Co/wDyfZ8Tf+4Z/wCmu0oA/f6iiigAooooAKQ9KWkPSgD8Av8AgqP/AMn2fE3/ALhn/prtK/VT/h6L+zH/ANFMP/gg1T/5Gryn9qL/AIJS/wDDSnx18TfEf/haP/COf219l/4lv/CP/avJ8m1ig/1v2pN2fK3fdGN2OcZPlf8Aw4y/6rZ/5an/AN20AfVX/D0X9mP/AKKYf/BBqn/yNR/w9F/Zj/6KYf8AwQap/wDI1fKv/DjP/qtv/lqf/dtH/DjL/qtn/lqf/dtAHV/t6/t5/Ar40/sneOvBvgzxwdZ8Sal9h+y2X9kX8HmeXf28r/PLAqDCRueWGcYHJArlP+CGYx/wuz/uCf8At/R/w4y/6rZ/5an/AN219UfsM/sM/wDDF/8Awm3/ABW3/CY/8JL9i/5hP2H7N9n+0f8ATeXfu8/2xt754APyt/4Kj/8AJ9nxN/7hn/prtK9V/Zb/AGXPid+xf8dvDPxk+Mnhn/hDvhv4a+1f2rrX2+1vvs32i1ltYf3NrLLM+6a4iT5EON2ThQSPKv8AgqP/AMn2fE3/ALhn/prtK/aj9qP4Gf8ADSfwK8TfDn+2x4d/tr7L/wATP7J9q8nybqKf/Vb03Z8rb94Y3Z5xggB8DP2o/hj+0n/bY+HPib/hIv7F8j7f/oF1a+T53meV/r4k3Z8qT7ucbecZGfxX/wCCo/8AyfZ8Tf8AuGf+mu0r6q/5Qv8A/VYj8Sf+4H/Z39n/APgT5vmfb/8AY2+V/Fu+UP7DH/Dyc/8ADR3/AAm3/Cuv+E0/5lr+yv7U+x/ZP9A/4+fOh8zf9k8z/Vrt37ecbiAeVfst/sufE79i/wCO3hn4yfGTwz/wh3w38Nfav7V1r7fa332b7Ray2sP7m1llmfdNcRJ8iHG7JwoJHqn7c/8Axsn/AOEJ/wCGcv8Ai4v/AAhf27+3v+YX9j+1/Z/s3/H75Pmb/slx/q92NnzY3Llf+G5/+Hk//GOX/CE/8K6/4TT/AJmX+1f7U+x/ZP8ATv8Aj28mHzN/2Ty/9YuN+7nG0p/yhg/6rF/wsn/uB/2d/Z//AIE+b5n2/wD2NvlfxbvlAPVv2W/2o/hj+xf8CfDPwb+Mnib/AIQ74keGvtX9q6L/AGfdX32b7RdS3UP761ilhfdDcRP8jnG7BwwIHyr+y3+y58Tv2L/jt4Z+Mnxk8M/8Id8N/DX2r+1da+32t99m+0WstrD+5tZZZn3TXESfIhxuycKCR6r/AMMMf8PJ/wDjI3/hNv8AhXX/AAmn/Mtf2V/an2P7J/oH/Hz50Pmb/snmf6tdu/bzt3H7+/aj+Bn/AA0l8CvE3w5/tv8A4R3+2vsv/Ez+yfavJ8m6in/1W9N2fK2/eGN2ecYIAfA39qT4YftJDW/+Fc+Jv+Ei/sXyPt/+gXVr5PneZ5f+viTdnypPu5xt5xkZ5b4o/t6fAr4L+OtT8G+MvHB0bxJpvlfarL+yL+fy/MiSVPnigZDlJEPDHGcHBBFfFX/KGD/qsX/Cyf8AuB/2d/Z//gT5vmfb/wDY2+V/Fu+X4A/aj+Of/DSfx18TfEcaIfDv9tfZf+Jb9r+1eT5NrFB/rdibs+Vu+6Mbsc4yQD9qf+Co/wDyYn8Tf+4Z/wCnS0r5U/4IZf8ANbP+4J/7f0v/AA3P/wAPJ/8AjHH/AIQn/hXX/Caf8zL/AGr/AGp9j+yf6f8A8e3kw+Zv+yeX/rF2793O3afqj9hn9hr/AIYu/wCE2/4rb/hMf+El+xf8wn7D9n+z/aP+m8u/d5/tjb3zwAflb/wVH/5Ps+Jv/cM/9NdpXqv7Lf7LnxO/Yv8Ajt4Z+Mnxk8M/8Id8N/DX2r+1da+32t99m+0WstrD+5tZZZn3TXESfIhxuycKCR9U/tRf8Epf+GlPjr4m+I//AAtH/hHP7a+y/wDEt/4R/wC1eT5NrFB/rftSbs+Vu+6Mbsc4yfVf+Co3/JinxMx1/wCJZ/6c7SgD5V/bo/42TDwT/wAM5f8AFxP+EL+3f29/zC/sf2v7P9m/4/fJ8zf9kuP9Xu27PmxuXP5r/FL4W+J/gt471Pwb4y0z+xvEmm+V9qsvPin8vzIklT54mZDlJEPDHGcHkEV+lH/BDPn/AIXZn/qCf+39fKv/AAVG/wCT6/ib/wBwz/02WlAH6q/8FR/+TE/ib/3DP/TpaV8Af8Epf2ovhj+zX/wtH/hY/ib/AIR3+2v7L+wf6BdXXneT9r83/URPtx5sf3sZ3cZwcfqn+1H8Df8AhpP4FeJvhx/bf/CO/wBtfZf+Jl9k+1eT5N1FP/qt6bs+Vt+8Mbs84wfgH/hxnx/yWz/y1P8A7toA+qv+Hov7Mf8A0Uw/+CDVP/kaj/h6L+zH/wBFMP8A4INU/wDkavlX/hxl/wBVs/8ALU/+7aP+HGX/AFWz/wAtT/7toA+qv+Hov7MZ4HxLOf8AsAap/wDI1fkB+3r8UfDHxo/ax8c+MvBup/2x4b1P7D9kvfs8sHmeXYW8T/JKquMPG45UZxkcEGvtX/hxn/1W3/y1P/u2k/4cZf8AVbP/AC1P/u2gD9VaKKKACiiigAoopD0oAWvn/wDb1+KPif4L/sneOfGXg3U/7G8SaZ9h+yXv2eKfy/Mv7eJ/klVkOUkccqcZyOQDX5A/8FR/+T7Pib/3DP8A012lfv8AUAfgB/w9F/acHA+JnH/YA0v/AORqP+Ho37Tv/RTP/KBpf/yNX7U/HP8Aak+GP7Nh0QfEfxN/wjv9tef9g/0C6uvO8ny/N/1ET7cebH97Gd3GcHHlf/D0X9mP/oph/wDBBqn/AMjUAdV+3r8UfE/wX/ZO8c+MvBup/wBjeJNM+w/ZL37PFP5fmX9vE/ySqyHKSOOVOM5HIBr5/wD+CUv7UXxO/aTHxRHxG8Tf8JENF/sv7B/oFra+T532vzP9REm7PlR/ezjbxjJz8rfst/sufE79i/47eGfjJ8ZPDP8Awh3w38Nfav7V1r7fa332b7Ray2sP7m1llmfdNcRJ8iHG7JwoJB/wVa/ai+GP7Sn/AAq7/hXHib/hIv7F/tT7f/oF1a+T532Tyv8AXxJuz5Un3c4284yMgHlX/BUf/k+z4m/9wz/012lJ/wAPRv2nf+imf+UDS/8A5Gr9Vf8Aglx/yYn8Mv8AuJ/+nS7rwD9vX9vP4FfGn9k7x14N8GeODrPiTUvsP2Wy/si/g8zy7+3lf55YFQYSNzywzjA5IFAHJ/sMf8bJ/wDhNv8Aho7/AIuL/wAIX9i/sH/mF/Y/tf2j7T/x4+R5m/7Lb/6zdt2fLjc2fK/2pP2o/id+xf8AHbxN8G/g34m/4Q74b+Gvsv8AZWi/YLW++zfaLWK6m/fXUUsz7priV/nc43YGFAA+Vfgb+y38Tv2khrZ+HPhn/hIhovkfb/8AT7W18nzvM8v/AF8qbs+VJ93ONvOMjPK/FL4W+J/gt471Pwb4y0z+xvEmm+V9qsvPin8vzIklT54mZDlJEPDHGcHkEUAfr9+1J+y58Mf2L/gT4m+Mnwb8M/8ACHfEjw19l/srWv7Qur77N9ouorWb9zdSywvuhuJU+dDjdkYYAj8q/jl+1L8T/wBpL+xP+FjeJv8AhIv7F8/7B/oFra+T53l+Z/qIk3Z8qP72cbeMZOfKq+//APglL+1F8Mf2bP8AhaP/AAsfxN/wjv8AbX9l/YP9AurrzvJ+1+b/AKiJ9uPNj+9jO7jODgA+f/hd+3p8dfgt4F0zwb4M8cDRvDem+b9lsv7IsJ/L8yV5X+eWBnOXkc8scZwMAAV+1P7evxR8T/Bf9k7xz4y8G6n/AGN4k0z7D9kvfs8U/l+Zf28T/JKrIcpI45U4zkcgGvzW/ak/Zc+J37aHx28TfGT4N+Gf+Ex+G/iX7L/ZWtfb7Wx+0/Z7WK1m/c3UsUybZreVPnQZ25GVIJ+//wDgqP8A8mJ/E3/uGf8Ap0tKAPxW+OX7UnxO/aS/sQfEbxN/wkQ0Xz/sH+gWtr5PneX5n+oiTdnyo/vZxt4xk5/Sr9gr9gz4FfGn9k7wL4y8Z+BzrPiTUvt32q9/te/g8zy7+4iT5Ip1QYSNBwozjJ5JNfP/APwSl/ai+GP7Nf8AwtH/AIWP4m/4R3+2v7L+wf6BdXXneT9r83/URPtx5sf3sZ3cZwcH7Un7LnxO/bQ+O3ib4yfBvwz/AMJj8N/Ev2X+yta+32tj9p+z2sVrN+5upYpk2zW8qfOgztyMqQSAfpV8Lv2C/gV8F/HWmeMvBvgc6N4k03zfst7/AGvfz+X5kTxP8ks7IcpI45U4zkYIBr5+/wCCrX7UXxO/Zq/4VcPhx4m/4Rwa1/an2/8A0C1uvO8n7J5X+vifbjzZPu4zu5zgY+Af+CXH/J9nwy/7if8A6a7uvv7/AIKtfsufE79pT/hV3/CuPDP/AAkX9i/2p9v/ANPtbXyfO+yeV/r5U3Z8qT7ucbecZGQD4A/4ejftO/8ARTP/ACgaX/8AI1cr8Uf29Pjr8afAup+DfGfjgaz4b1LyvtVl/ZFhB5nlypKnzxQK4w8aHhhnGDkEiuq/4dc/tO/9Ez/8r+l//JNfa37ev7efwK+NP7J3jrwb4M8cHWfEmpfYfstl/ZF/B5nl39vK/wA8sCoMJG55YZxgckCgDlP+CGZyfjaT3/sT/wBv6+Vf+Co//J9nxN/7hn/prtK9V/4JS/tRfDH9ms/FH/hY/ib/AIR3+2v7L+wf6BdXXneT9r8z/URPtx5sf3sZ3cZwcff/APw9F/Zj/wCimH/wQap/8jUAflV/w9G/ad/6KZ/5QNL/APkaj/h6L+04eD8TOP8AsAaX/wDI1erfst/sufE79i/47eGfjJ8ZPDP/AAh3w38Nfav7V1r7fa332b7Ray2sP7m1llmfdNcRJ8iHG7JwoJHqn7c//Gyf/hCf+Gcv+Li/8IX9u/t7/mF/Y/tf2f7N/wAfvk+Zv+yXH+r3Y2fNjcuQD7V/YK+KPif40fsneBvGXjLU/wC2fEmp/bvtd79nig8zy7+4iT5IlVBhI0HCjOMnkk0ft6/FHxP8F/2TvHPjLwbqf9jeJNM+w/ZL37PFP5fmX9vE/wAkqshykjjlTjORyAa8A/Zb/aj+GP7F/wACfDPwb+Mnib/hDviR4a+1f2rov9n3V99m+0XUt1D++tYpYX3Q3ET/ACOcbsHDAgfkD8Lfhb4n+NPjvTPBvg3TP7Z8Sal5v2Wy8+KDzPLieV/nlZUGEjc8sM4wOSBQB79/w9F/acHA+JnH/YA0v/5Go/4ejftO/wDRTP8AygaX/wDI1ff3/BKX9l74m/s1j4on4j+Gf+EdGtDSzYH7fa3XneT9r83/AFEr7cebH97Gd3GcHH0F8Uf29PgV8F/HWp+DfGXjg6N4k03yvtVl/ZF/P5fmRJKnzxQMhykiHhjjODggigD3+iiigAooooAKQ9KWkPIoA+AP2ov+CUv/AA0p8dfE3xH/AOFo/wDCOf219l/4lv8Awj/2ryfJtYoP9b9qTdnyt33RjdjnGT9/5r8gP29P28/jr8Ff2sfHXg3wZ45/sbw3pv2H7LZf2RYT+X5lhbyv88sDOcvI55Y4zgcACvAP+Ho37Tv/AEUz/wAoGl//ACNQB9V/8FzevwUx/wBRv/2wr8q+a9V+OX7UnxP/AGk/7E/4WN4m/wCEi/sXz/sH+gWtr5PneX5v+oiTdnyo/vZxt4xk5/Sr9gr9gz4FfGn9k7wL4y8Z+BzrPiTUvt32q9/te/g8zy7+4iT5Ip1QYSNBwozjJ5JNAH2n+1H8DP8AhpP4FeJvhz/bY8O/219l/wCJn9k+1eT5N1FP/qt6bs+Vt+8Mbs84wfgE/wDBDPPX42f+Wp/9218qf8PRv2nf+imf+UDS/wD5Go/4ei/tOHg/Ezj/ALAGl/8AyNQB+1P7LfwM/wCGbPgT4Z+HH9t/8JF/Yv2r/iZfZPsvneddSz/6re+3Hm7fvHO3PGcD8Av2XPgZ/wANJ/HXwz8ODrZ8O/219q/4mX2T7V5Pk2ss/wDqt6bs+Vt+8Mbs84wf3T/YK+KPif40fsneBvGXjLU/7Z8San9u+13v2eKDzPLv7iJPkiVUGEjQcKM4yeSTS/C79gv4FfBfx1pnjLwb4HOjeJNN837Le/2vfz+X5kTxP8ks7IcpI45U4zkYIBoA5P8AYZ/YZ/4Yv/4TY/8ACbf8Jj/wkv2L/mE/Yfs32f7R/wBN5d+7z/bG3vnj8q/+Co3/ACfX8Tf+4Z/6bLSv3+xhcDoK/AL/AIKj/wDJ9nxN/wC4Z/6a7SgDyn9lz4Gf8NJ/HXwz8OP7b/4R3+2vtX/Ey+yfavJ8m1ln/wBVvTdnytv3hjdnnGD6v+3L+w1/wxd/whOPG3/CY/8ACSfbf+YV9h+zfZ/s/wD02l37vtHtjb3zx8//AAt+KXif4LeO9M8ZeDdT/sbxJpvm/Zb3yIp/L8yJ4n+SVWQ5SRxypxnI5ANdV8cv2o/id+0kNEHxG8Tf8JENF8/7B/oFra+T53l+Z/qIk3Z8qP72cbeMZOQD6q/Zd/4KtD9mz4FeGfhx/wAKu/4SP+xftX/Ez/4SD7L53nXUs/8Aqvsr7cebt+8c7c8ZwD9qL/gq1/w0p8CvE3w4Pwu/4Rz+2vsv/Ey/4SD7V5Pk3UU/+q+ypuz5W37wxuzzjB+AKKAFGQeK+/v2Xf8Agqz/AMM2fArwz8OP+FXf8JH/AGL9q/4mf/CQfZfO866ln/1X2V9uPN2/eOdueM4B/wAEpP2Xfhj+0p/wtH/hY/hn/hIv7F/sv7B/p91a+T532vzf9RKm7PlR/ezjbxjJz9//APDrr9mP/omZ/wDB/qn/AMk0AfKo/YY/4dsH/ho7/hNv+Fi/8IX/AMy1/ZX9l/bPtf8AoH/Hz503l7Ptfmf6tt2zbxncD/h+Zjr8E/8Ay6//ALiryr9lv9qP4nftofHbwz8G/jJ4m/4TH4b+JftX9q6L9gtbH7T9ntZbqH99axRTJtmt4n+Rxnbg5UkH7/H/AAS6/Zj/AOiZ/wDlf1T/AOSaAPlX/h+b/wBUT/8ALr/+4q8q/ai/4JS/8M2fArxN8Rv+Fo/8JH/Yv2X/AIln/CP/AGXzvOuooP8AW/an2483d905244zkff/APw66/Zj/wCiZn/wf6p/8k1+QHxR/b0+Ovxp8C6n4N8Z+OBrPhvUvK+1WX9kWEHmeXKkqfPFArjDxoeGGcYOQSKAPAec19//ALLv/BKX/hpT4FeGfiP/AMLR/wCEc/tr7V/xLP8AhH/tXk+TdSwf637Um7PlbvujG7HOMk/4JS/svfDL9pRviifiP4Z/4SI6L/ZZsD9vurXyfO+1+Z/qJU3Z8qP72cbeMZOf19+Fvws8MfBbwJpng3wbpn9j+G9N837LZfaJZ/L8yV5X+eVmc5eRzyxxnA4AFAHgH/BUbj9hT4mf9wz/ANOdpXyp/wAEM+f+F2Z/6gn/ALf19V/8FR/+TE/ib/3DP/TpaV+K3wM/ak+J/wCzZ/bf/CuPE3/CO/215H2//QLW687yfM8r/XxPtx5sn3cZ3c5wMAH6p/tRf8Epv+Gk/jr4m+I//C0f+Ec/tr7N/wASz/hH/tXk+TaxQf637Um7PlbvujG7HOMn4B/4Jcj/AIzr+Gef+on/AOmy7pP+Ho37Tv8A0Uz/AMoGl/8AyNX6AftSfsufDH9i/wCBPib4yfBvwz/wh3xI8NfZf7K1r+0Lq++zfaLqK1m/c3UssL7obiVPnQ43ZGGAIAPv3jFfAP7UX/BKb/hpP46+JviP/wALR/4Rz+2vs3/Es/4R/wC1eT5NrFB/rftSbs+Vu+6Mbsc4yT/glN+1F8Tv2lP+Fo/8LH8Tf8JF/Yv9l/YP9AtbXyfO+1+Z/qIk3Z8qP72cbeMZOfn/APb0/bz+OvwV/ax8deDfBnjn+xvDem/Yfstl/ZFhP5fmWFvK/wA8sDOcvI55Y4zgcACgD9gKKKKACiiigApCcAk9BS0h6UAeA/FH9vT4FfBfx1qfg3xl44OjeJNN8r7VZf2Rfz+X5kSSp88UDIcpIh4Y4zg4IIrlP+Co/wDyYn8Tf+4Z/wCnS0ryn9qL/glKf2k/jr4m+I//AAtH/hHP7a+zf8Sz/hH/ALV5Pk2sUH+t+1Juz5W77oxuxzjJ8r/4bn/4eT/8Y4/8IT/wrr/hNP8AmZf7V/tT7H9k/wBP/wCPbyYfM3/ZPL/1i7d+7nbtIB+Vfev3+/4Jcf8AJifwy/7if/p0u6/Kv9uX9hkfsX/8IT/xW3/CY/8ACS/bf+YV9h+zfZ/s/wD02l37vtHtjb3zx6r+y7/wVa/4Zr+BXhn4cf8ACrv+Ej/sX7V/xMv+Eg+y+d511LP/AKr7K+3Hm7fvHO3PGcAA/X74pfFPwx8FvAmp+MvGWp/2P4b03yvtV79nln8vzJUiT5IlZzl5EHCnGcngE14Cf+Cov7MYOD8Szn/sAap/8jV6n+1H8Df+Gk/gV4m+HH9t/wDCO/219l/4mX2T7V5Pk3UU/wDqt6bs+Vt+8Mbs84wfxX/bm/YZ/wCGL/8AhCf+K2/4TE+Jftv/ADCvsP2b7P8AZ/8ApvLv3ef7Y2988AH6qf8AD0X9mP8A6KYf/BBqn/yNR/wVH/5MT+Jv/cM/9OlpXwB+y7/wSl/4aT+BXhn4j/8AC0f+Ec/tr7V/xLP+Ef8AtXk+TdSwf637Um7PlbvujG7HOMn1X/huf/h5P/xjj/whP/Cuv+E0/wCZl/tX+1Psf2T/AE//AI9vJh8zf9k8v/WLt37ucbSAeVf8Epf2ovhj+zZ/wtH/AIWN4m/4R061/Zf2D/QLq687yftfmf6iJ9uPNj+9jO7jODj5/wD29fij4Y+NH7WPjnxl4N1P+2PDep/Yfsl79nlg8zy7C3if5JVVxh43HKjOMjgg11n7cv7DX/DF/wDwhOPG3/CY/wDCS/bf+YT9h+z/AGf7P/02l37vtHtjb3zx6p+y7/wSlH7SnwK8M/Ef/haP/COf219p/wCJZ/wj/wBq8nybqWD/AFv2pN2fK3fdGN2OcZIB5ro//BK39pbUdUtrW48B22lQTSBHvbvXbBoYAT99xFM7kD/ZVj6A1+of7C/7F9n+xtZ+LEXxg/i7UPEC2QvPK08wR2z2/n/KuHcnP2gj5sH5RwM4F39jH9sx/wBszwj4r1JPCR8GHSLuCyCrqn21pPNUneG8mPaR9DX0/FCkMSxxqERRgKBwBSAj+3R/3Zv+/L/4UjX8QHKzD/ti/wDhUzNtrwL4/fGKN721+HHh268zxDrcq2l1cQHP2GFiA7Ejo23J9gCeOM51KkacbyOzCYSrjavsqS832SW7fkc7+2V+xhof7Zf/AAiH9qeJtT8Of8I59s8r7HYed532jyM7t2MY8gYx13H0r079m74O6f8As4fBfw98O7DUr3WbTRvtGy+ubQxySedcyznKgEDBlI+gr5i8G/tUXXgj4watLezyXfgvVdSkVo2O42wLYSZPbAG5e/J69fum1uY7q3jmikWaKRQySIQQwI4IIrKhiIYhPl6HfmmUYjKZQVdaSV0/09V1PL/2mvhBp/7RXwR8SfD2/wBXuPD9prH2YyailqZDCIbmKf7rbQcmIDkjrX4g/th/sN+MP2RdUsrm/vIPEng3U5fK07xDaR+Wsj7d3lyxZbynwGIG5gwUkMcMF/oWrx/9pP4E2f7Qnwb8XfDu6v10qHVlglhvmtvtH2ORZlcOse9M8x9Nw++exIrqPEPNf+CXH/Jifwy/7if/AKdLuvyA/YK+KPhj4L/tY+BvGXjLU/7H8N6Z9u+13v2eWfy/MsLiJPkiVnOXkQcKcZyeATX2oP26P+HbA/4Zx/4Qn/hYv/CGf8zL/a39l/bPtn+n/wDHt5E3l7Ptfl/6xt2zdxu2jyv9qL/glL/wzX8CvE3xH/4Wj/wkf9i/Zv8AiWf8I/8AZfO866ig/wBb9qfbjzd33TnbjjOQAff/APw9E/Zk6f8ACyznpj+wNU/+Rq/ID9vX4o+GPjR+1j458ZeDdT/tjw3qf2H7Je/Z5YPM8uwt4n+SVVcYeNxyozjI4INdX+w1+w1/w2h/wm2fG3/CHf8ACNfYv+YV9u+0/aPtH/TeLZt8j3zu7Y58q/aj+Bn/AAzZ8dfE3w4/tv8A4SP+xfsv/Ez+yfZfO861in/1W99uPN2/eOdueM4AB9Vfst/sufE79i/47eGfjJ8ZPDP/AAh3w38Nfav7V1r7fa332b7Ray2sP7m1llmfdNcRJ8iHG7JwoJH3+P8AgqL+zHwP+FlnP/YA1T/5Gr4A/ai/4KtD9pP4FeJvhx/wq7/hHP7a+y/8TP8A4SD7V5Pk3UU/+q+ypuz5W37wxuzzjB8r/Ya/Ya/4bQ/4TY/8Jt/wh3/CNfYv+YV9u+0/aPtH/TaLZt+z++d3bHIB+6Xwt+Kfhj40+BNM8ZeDdT/tjw3qXm/Zb37PLB5nlyvE/wAkqq4w8bjlRnGRwQa/mt+Fvwt8T/Gnx3png3wbpn9s+JNS837LZefFB5nlxPK/zysqDCRueWGcYHJAr9Kf+G6P+HbH/GOP/CE/8LF/4Qv/AJmX+1f7L+2fa/8AT/8Aj28mby9n2vy/9Y27Zu43bR6p+y7/AMEpf+Ga/jr4Z+I//C0f+Ej/ALF+0/8AEt/4R/7L53nWssH+t+1Ptx5u77pztxxnIAPKv2Fz/wAO2f8AhNj+0b/xbseNPsP9g/8AMU+2fZPtH2n/AI8fP8vZ9rt/9Zt3b/lztbH1X/w9F/Zj/wCimH/wQap/8jU39ub9hofto/8ACE/8Vt/wh3/CNfbf+YT9u+0faPs//TeLZt+z++d3bHP4sftR/Az/AIZs+Ovib4cDWz4i/sX7L/xM/sn2XzvOtYp/9Vvfbjzdv3jnbnjOAAf0p0UUUAFFFFABRRSE4BJ6CgBa8A+F37BfwK+C/jrTPGXg3wOdG8Sab5v2W9/te/n8vzInif5JZ2Q5SRxypxnIwQDR8Uf29PgV8F/HWp+DfGXjg6N4k03yvtVl/ZF/P5fmRJKnzxQMhykiHhjjODggivn/APak/aj+GP7aHwJ8TfBv4N+Jv+Ex+JHiX7L/AGVov9n3Vj9p+z3UV1N++uoooU2w28r/ADuM7cDLEAgH1T8cv2W/hj+0j/Yn/CxfDP8AwkX9i+f9gxf3Vr5Pm+X5n+olTdnyo/vZxt4xk5/Cz9vX4XeGPgv+1j458G+DdM/sfw3pn2H7JZfaJZ/L8ywt5X+eVmc5eRzyxxnA4AFfan7DH/Gtj/hNv+Gjf+Ldf8Jp9i/sH/mKfbPsn2j7T/x5ed5ez7Xb/wCs253/AC52tjyv9qT9lz4nftofHbxN8ZPg34Z/4TH4b+Jfsv8AZWtfb7Wx+0/Z7WK1m/c3UsUybZreVPnQZ25GVIJAPKf+Ho37Tv8A0Uz/AMoGl/8AyNXlfxy/al+J/wC0l/Yn/CxvE3/CRf2L5/2D/QLW18nzvL8z/URJuz5Uf3s428Yyc/VX7Lf7LnxO/Yv+O3hn4yfGTwz/AMId8N/DX2r+1da+32t99m+0WstrD+5tZZZn3TXESfIhxuycKCR+qfwN/ak+GH7SQ1v/AIVz4m/4SL+xfI+3/wCgXVr5PneZ5X+viTdnypPu5xt5xkZAPws+F37enx1+C3gXTPBvgzxwNG8N6b5v2Wy/siwn8vzJXlf55YGc5eRzyxxnAwABX6VftSfsufDH9i/4E+JvjJ8G/DP/AAh3xI8NfZf7K1r+0Lq++zfaLqK1m/c3UssL7obiVPnQ43ZGGAI+fv29P2DPjr8av2sfHXjLwZ4G/tnw3qX2H7Le/wBr2EHmeXYW8T/JLOrjDxuOVGcZHBBr9gKAPyq/YY/42T/8Jt/w0b/xcX/hC/sX9g/8wv7H9r+0faf+PLyfM3/ZLf8A1m7Gz5cbmz5X+1J+1H8Tv2L/AI7eJvg38G/E3/CHfDfw19l/srRfsFrffZvtFrFdTfvrqKWZ901xK/zucbsDCgAfqn8c/wBqP4Y/s2nRB8RvE3/COnWvP+wf6BdXXneT5fmf6iJ9uPNj+9jO7jODj8LP29fij4Y+NH7WPjnxl4N1P+2PDep/Yfsl79nlg8zy7C3if5JVVxh43HKjOMjgg0AfeX/BE3/kmPxN/wCw3Yf+gGv0ylnW3iaSRlRFBYsxwABXwv8A8EvP2b/iL+zj4E8daf8AETw7/wAI9d6lqtnPaR/bba68xFUqxzBI4GCRwcGvdf2zfGd14R+Cl7FZM0d1q9xHpqyI20qrgs/5qjL/AMCrKrNU4Ob6HZgsLLG4mnho6OTS+88h+NP7W2teMvED+EvhksxRnNu2p2qlprlu4hH8K9fn6nqMAZPiV3eR/Du31Kzju4tT8WXyNDe6hDL5sdnG+fMhjbnfI3R36AZUZyTS6tdp8KfDUPh7S8xeJNQtkl1i/X5ZYEcbltUPVPlILkcndt6AivOvMAr5CviJSleTvL8j+h8qynD0aShRjal/5NNrrJ9r7L9NA1MiW22nrkYr7b+G3x31D4f/ALNPgjVG8P3fiKVp30vbAxXy0jdlQk7W/hVVAxz618TwWU+p3MFtbxPNPK6pHEgyWYnAAHvX6h/BPwK/w3+FegaBdbTdW1vvuMHIErku4B74ZiPwrpyyM5TlKLsrHh8b18PTw9CFWPM+a9trqzT16dDuLC7a9s4JzE0JljVzHIMMuRnB968a/bF8e678L/2cfiP4q8M339ma9pelRz2d35McvlP52M7JFZTwT1Brs/hL8VNM+K2g3F/Yfu5rW5ktbmAnJR1PB/3WGCD711d3/rLn/rlH/wChNX1MZKSTR+G1qU6FSVOorNdD+ZH4o/FHxP8AGjx1qXjHxjqX9seJNS8r7Ve/Z4oPM8uJIk+SJVQYSNBwozjJ5JNeq/FH9vT46/GnwLqfg3xn44Gs+G9S8r7VZf2RYQeZ5cqSp88UCuMPGh4YZxg5BIr+idPuL9K/AP8A4Jcf8n2fDL/uJ/8Apru6oyPKvgb+1L8T/wBm3+2/+Fc+Jv8AhHf7a8j7f/oFrded5PmeX/r4n2482T7uM7uc4GP1U/Zb/Zc+GP7aHwJ8M/GT4yeGf+Ex+JHiX7V/autf2hdWP2n7PdS2sP7m1lihTbDbxJ8iDO3JyxJP1T8c/wBqP4Y/s2f2IPiN4m/4R3+2vP8AsH+gXV153k+X5v8AqIn2482P72M7uM4OPys/ak/Zc+J37aHx28TfGT4N+Gf+Ex+G/iX7L/ZWtfb7Wx+0/Z7WK1m/c3UsUybZreVPnQZ25GVIJAPgCvVfgb+1H8Tv2bRrY+HPib/hHRrXkfb/APQLW687yfM8v/XxPtx5sn3cZ3c5wMdV+wV8UfDHwX/ax8DeMvGWp/2P4b0z7d9rvfs8s/l+ZYXESfJErOcvIg4U4zk8AmvtX9ucf8PJj4JH7OX/ABcQ+C/t39vf8wv7H9r+z/Zv+P3yfM3/AGS4/wBXu27PmxuXIB+a3xS+KXif40+O9T8ZeMtT/tnxJqXlfar3yIoPM8uJIk+SJVQYSNBwozjJ5JNfv7+3r8UfE/wX/ZO8c+MvBup/2N4k0z7D9kvfs8U/l+Zf28T/ACSqyHKSOOVOM5HIBrwD9lv9qP4Y/sX/AAJ8M/Bv4yeJv+EO+JHhr7V/aui/2fdX32b7RdS3UP761ilhfdDcRP8AI5xuwcMCB+f/APw65/ad/wCiZ/8Alf0v/wCSaAPv7/glL+1D8Tf2lB8UR8R/E3/CRDRf7LFgPsFra+T532vzf9REm7PlR/ezjbxjJz9BfFH9gv4FfGjx1qfjLxl4HOs+JNS8r7Ve/wBr38HmeXEkSfJFOqDCRoOFGcZOSSa+Kf2F/wDjWx/wm3/DR3/Fuv8AhNPsP9g/8xT7Z9k+0faf+PHz/L2farf/AFm3dv8AlztbH1X/AMPRf2Y/+imH/wAEGqf/ACNQB9VUUUUAFFFFABSHpS0UAfn/APtRf8EpT+0n8dfE3xH/AOFo/wDCOf219m/4ln/CP/avJ8m1ig/1v2pN2fK3fdGN2OcZPlf/AAwx/wAO2P8AjI3/AITb/hYv/CF/8y1/ZX9l/bPtf+g/8fPnTeXs+1+Z/q2zs28Z3D9VK/Fb9lv9qP4nftofHbwz8G/jJ4m/4TH4b+JftX9q6L9gtbH7T9ntZbqH99axRTJtmt4n+Rxnbg5UkEA8r/bl/bl/4bP/AOEJ/wCKJ/4Q7/hGvtv/ADFft32j7R9n/wCmEWzb9n987u2OfVP2Xf8Agqz/AMM2fArwz8OP+FXf8JH/AGL9q/4mf/CQfZfO866ln/1X2V9uPN2/eOdueM4H3+P+CXX7Mf8A0TP/AMr+qf8AyTR/w66/Zj/6Jmf/AAf6p/8AJNAHyp/w3P8A8PJ/+Mcf+EJ/4V1/wmn/ADMv9q/2p9j+yf6f/wAe3kw+Zv8Asnl/6xdu/dzt2n6q/YZ/Ya/4YwPjb/itv+Ex/wCEl+xf8wn7D9m+z/aP+m8u/d9o9sbe+ePwt+FvxS8T/Bbx3pnjLwbqf9jeJNN837Le+RFP5fmRPE/ySqyHKSOOVOM5HIBr9ff+CUn7UXxO/aT/AOFoD4jeJv8AhIhov9l/YP8AQLW18nzvtfmf6iJN2fKj+9nG3jGTkA+/8V+Vn/D87/qif/l1/wD3FX6qV/KvQB9Vftzftz/8No/8IT/xRP8Awhx8N/bf+Yr9u+0/aPs//TGLZt+z++d3bHPyryaAcGv1/wD2Cv2DPgV8af2TvAvjLxn4HOs+JNS+3far3+17+DzPLv7iJPkinVBhI0HCjOMnkk0Afo7dDr/10i/9DFebftD/AA3f4l+DtOs4xlrLVbW8ZezRhtkn5I7H/gNek3f/ALUi/wDQxU0yCWJ0PIYEEVE4qcXF9Tpw1eeGrRrQ3i7n5DeJ/Ec3ijxHqmr3P+vvrmS5cDoCzFsD2GcVRtiJZlX1rU+JnhSfwB4/17w/Mrp9gu5Ioy45aPOUb8VKn8a5yG6aKRXUjIOea+BqJqck9z+ssK4Tw9OVP4Wlb0sfY37EPwv0vWb/AFLxffqlxc6dMLazgYZETlQWlI9cEAfj3xj3v9on4vWXwt8CXnlzqdcvomgsbcN825hgyEei5z7nA71+f/gT4ta14Elnm8P6zNo8twoWZVwVfHTIYEEjJweozWL4v8cX/ibUJr3UdRn1TUJRh7m4cuQPQE9h2HQV6tPHRo4f2dOPvHwGL4Wr5jm7xmLqXpK1l106drdz6M/YJ8TTw/EHxHogbNrd2H2sj0eORVB/ESn9K+2br79z/wBco/8A0Jq+L/8Agnx4Qmn1vxL4pdWW3hgXToiRw7Mwd8H2CJ/31XuP7YnjzXfhh+zh8R/FXhm+/szXtL0pJ7S78mOXyn83Gdkisp4J6g17OXJrDxufm3F8qbziqqfS1/WyPbYzlB9K+Af2Xf8AglL/AMM1/HXwz8R/+Fo/8JF/Yv2r/iW/8I/9l87zrWWD/W/an2483d905244zke//sEfFDxP8Z/2TvA3jLxjqf8AbHiPUvt32q9+zxQeZ5d9cRJ8kSqgwkaDhRnGTySaX9vX4o+J/gv+yd458ZeDdT/sbxJpn2H7Je/Z4p/L8y/t4n+SVWQ5SRxypxnI5ANemfGHJ/tzfsN/8Nof8ISR42/4Q7/hGvtv/MJ+3faPtH2f/pvFs2+R753dsc/K/wDw3P8A8O2P+Mcv+EJ/4WL/AMIX/wAzL/av9l/bPtf+n/8AHt5E3l7Ptfl/6xt2zdxu2j1P/glL+1D8Tf2lB8UR8R/E3/CRDRf7LFgPsFra+T532vzf9REm7PlR/ezjbxjJz9BfFH9gv4FfGjx1qfjLxl4HOs+JNS8r7Ve/2vfweZ5cSRJ8kU6oMJGg4UZxk5JJoA/nWr6q/YZ/bmH7F48bZ8E/8Jj/AMJL9i/5iv2H7N9n+0f9MZd+77R7Y2988fKtFAHqv7Ufx0/4aT+Ovib4j/2J/wAI7/bX2X/iWfa/tXk+TaxQf63Ym7PlbvujG7HOMn+lLj2r+Vivqn/h6N+07/0Uz/ygaX/8jUAfVf8AwXM6/BPH/Ub/APbCvK/2Xf8AglL/AMNKfArwz8R/+Fo/8I5/bX2r/iWf8I/9q8nybqWD/W/ak3Z8rd90Y3Y5xk+qfsLn/h5OfGx/aN/4uIfBf2H+wf8AmF/Y/tf2j7T/AMePkeZv+yW/+s3bdny43Nn9Kfhb8LPDHwW8CaZ4N8G6Z/Y/hvTfN+y2X2iWfy/MleV/nlZnOXkc8scZwOABQB1dFFFABRRRQAUUUh6UALX8637BXxR8MfBf9rHwN4y8Zan/AGP4b0z7d9rvfs8s/l+ZYXESfJErOcvIg4U4zk8Amv0p/ai/4Ktf8M1/HXxN8OP+FXf8JH/Yv2X/AImX/CQfZfO861in/wBV9lfbjzdv3jnbnjOB+K/IoA/pS+Bn7Ufwx/aSOtj4c+Jv+EiOi+R9v/0C6tfJ87zPL/18Sbs+VJ93ONvOMjPq1fgF+wz+3N/wxd/wm3/FE/8ACYnxJ9i/5i32H7N9n+0f9MZd+77R7Y2988fVP/D87/qif/l1/wD3FQB9Vf8AD0X9mP8A6KYf/BBqn/yNXyp+3Qf+Hk3/AAhJ/Zy/4uIPBf27+3v+YX9j+1/Z/s3/AB/eR5m/7Jcf6vdt2fNjcufgH9lz4Gf8NJ/HXwz8OP7bPh3+2vtX/Ez+yfavJ8m1ln/1W9N2fK2/eGN2ecYP7T/sM/sMj9i8eNj/AMJt/wAJiPEv2L/mFfYfs32f7R/03l37vtHtjb3zwAfhd8Uvhb4n+C3jvU/BvjLTP7G8Sab5X2qy8+Kfy/MiSVPniZkOUkQ8McZweQRXv/8AwS4/5Ps+GX/cT/8ATXd0f8FRh/xnX8TMf9Qz/wBNlpX39+y7/wAEpf8Ahmv46+GfiP8A8LR/4SP+xftX/Et/4R/7L53nWssH+t+1Ptx5u77pztxxnIAPK/8AguYM/wDCk/8AuN/+2FdX+wV+3n8Cvgt+yd4F8G+M/HB0bxJpv277VZf2Rfz+X5l/cSp88UDIcpIh4Y4zg8givff25v2Gv+G0T4J/4rb/AIQ7/hGvtv8AzCft32j7R9n/AOm8Wzb5Hvnd2xz+LH7UfwM/4Zs+Ovib4cDWz4i/sX7L/wATL7J9l87zrWKf/Vb32483b945254zgAH6x/8ABML9nX4h/s1eAfHll8SPD3/COXGoanZ3Vspvra6EkaKQzZgkcDBI64r7tPI4ry79pX4z6f8As8fCHXPH+r6VNrejaY1rHeWFuVEkkU1zFbsVDfKSol3bTgNjGRnI8v8ADX/BQz9ny+02KeD4tWEUEg3pBqlpcRzQj+4copOMYycnjqc5IBo/tX/syP8AFi1TxF4dWNPFNpHsaFjtW8iHIUnoHHOCeucHsR+fmtaHqHhvU59O1Wyn0++gO2S3uYyjofcGv0Ib/goB+z/3+Lfh78Yp/wDCvQvF1j4D8eQLD4gg0jV1H3WubFmdf91s5H4GvIxWXxrvng7M/Qsh4vrZVTWGrx56a211Xp5H5U4Oa7/4Q/BHxN8ZNajtdGtGj09XC3OpyqRBAO+T/E2Oijk+w5r638bj9mP4J3Wmz+K5vDOhSXpkNoNRt55Fl8vbvwpZhxvTqO9ath+3f+zppdqltZ/FPwxaW6DCxQW0qIo9gBgVyUsptK9SWnkfRY/j9TpOOCpNSfWVtPkv8z2r4Z/DrS/hb4NsPDukIRa2q/NI335ZDy7t7k5Pt0HArzv9sHwNrfxT/Zw+JXhXwvZDVNe1HTEtbW0E0cXmS+YG275GVV+XnkjrXPXH/BQL9n8QsT8XtCQY+9FBMzD6Daf5Gpf2ef2t/B37T/i/xpo3gFLy70LwubJptcuozENRkuPP5SNgHVV8j7zhSScBQFBb6CKUUktj8fqVJ1pupUd5PVs8N/Za/aj+GP7F/wACPDPwb+Mnib/hD/iR4b+1f2rov9n3V99n+0XUt1D++tYpYX3Q3ET/ACOcbsHBBA+gPhd+3p8CvjR460zwb4N8cHWfEmpeb9lsv7Iv4PM8uJ5X+eWBUGEjc8sM4wMkgV+P/wDwVF4/bq+JgH/UM/8ATZaV9V/8MMf8O2P+Mjv+E2/4WL/whf8AzLX9lf2X9s+1/wCgf8fPnTeXs+1+Z/q23bNvG7cKMz7++OX7Uvww/Zt/sT/hY3ib/hHf7a8/7B/oF1ded5Pl+Z/qIn2482P72M7uM4OPK/8Ah6L+zH/0Uw/+CDVP/kavlQf8bof+qO/8K2/7jn9o/wBof+A3leX9g/293m/w7fm+Af2o/gb/AMM1/HXxN8OP7b/4SL+xfsv/ABMvsn2XzvOtYp/9Vvfbjzdv3jnbnjOAAfpV+3r+3n8CvjT+yd468G+DPHB1nxJqX2H7LZf2RfweZ5d/byv88sCoMJG55YZxgckCvzV+Bv7LfxO/aS/ts/Dnwz/wkQ0XyPt/+n2tr5PneZ5f+vlTdnypPu5xt5xkZP2XPgZ/w0n8dfDPw4Otnw7/AG19q/4mf2T7V5Pk2ss/+q3puz5W37wxuzzjB+/sj/gi/wD9Vi/4WT/3A/7O/s//AMCfN8z7f/sbfK/i3fKAfav7BXwu8T/Bf9k7wN4N8ZaZ/Y3iTTPt32uy+0RT+X5l/cSp88TMhykiHhjjODyCK5X/AIKj/wDJifxN/wC4Z/6dLSvVP2W/jn/w0n8CfDPxH/sT/hHf7a+1f8S37X9q8nybqWD/AFuxN2fK3fdGN2OcZJ+1H8Df+Gk/gV4m+HH9t/8ACO/219l/4mX2T7V5Pk3UU/8Aqt6bs+Vt+8Mbs84wQD+a2ivqr9ub9hn/AIYvHgn/AIrb/hMf+El+2/8AMJ+w/Zvs/wBn/wCm0u/d9o9sbe+ePlWgD+qiiiigAooooAKQ9KWkPSgD8Av+Co//ACfZ8Tf+4Z/6a7SvtX9vX9gz4FfBb9k7x14y8GeBzo3iTTfsP2W9/te/n8vzL+3if5JZ2Q5SRxypxnI5ANfFX/BUf/k+z4m/9wz/ANNdpX6/ft6/C7xP8aP2TvHPg3wbpn9s+JNT+w/ZLL7RFB5nl39vK/zysqDCRueWGcYHJAoA/Nb/AIJS/svfDH9pM/FE/Efwz/wkR0X+y/sH+n3Vr5Pnfa/M/wBRKm7PlR/ezjbxjJz8/wD7evwu8MfBf9rHxz4N8G6Z/Y/hvTPsP2Sy+0Sz+X5lhbyv88rM5y8jnljjOBwAK5X45fsufE79m0aIfiN4Z/4R0a15/wBg/wBPtbrzvJ8vzP8AUSvtx5sf3sZ3cZwcftT/AMEuP+TE/hl/3E//AE6XdAHlX7Un7Lnwx/Yv+BPib4yfBvwz/wAId8SPDX2X+yta/tC6vvs32i6itZv3N1LLC+6G4lT50ON2RhgCPz//AOHo37Tn/RTP/KDpn/yNXK/sFfFHwx8F/wBrHwN4y8Zan/Y/hvTPt32u9+zyz+X5lhcRJ8kSs5y8iDhTjOTwCa+1f25x/wAPJj4JH7OX/FxD4L+3f29/zC/sf2v7P9m/4/fJ8zf9kuP9Xu27PmxuXIB6r+y3+y58Mf20PgT4Z+Mnxk8M/wDCY/EjxL9q/tXWv7QurH7T9nupbWH9zayxQptht4k+RBnbk5Ykn8//APh6N+07/wBFM/8AKBpf/wAjV+gH7Lf7Ufwx/Yv+BPhn4N/GTxN/wh3xI8Nfav7V0X+z7q++zfaLqW6h/fWsUsL7obiJ/kc43YOGBAP2pP2o/hj+2h8CfE3wb+Dfib/hMfiR4l+y/wBlaL/Z91Y/afs91FdTfvrqKKFNsNvK/wA7jO3AyxAIAn/BKX9qL4nftK/8LRHxH8Tf8JGNF/sv7B/oFra+T532vzf9REm7PlR/ezjbxjJz9A/FH9gv4FfGjx1qfjLxl4HOs+JNS8r7Ve/2vfweZ5cSRJ8kU6oMJGg4UZxk5JJr4q/YY/41sHxt/wANHf8AFuv+E0+w/wBg/wDMU+2fZPtH2n/jx87y9n2u3/1m3dv+XO1sfVX/AA9F/Zj/AOimH/wQap/8jUAe/fFL4WeGPjT4F1Lwb4y0z+2PDepeV9qsvPlg8zy5UlT54mVxh40PDDOMHgkV8/t/wS3/AGZDkj4bMPpr2p//ACTX5BfsFfFHwx8F/wBrHwN4y8Zan/Y/hvTPt32u9+zyz+X5lhcRJ8kSs5y8iDhTjOTwCa+1f25x/wAPJj4JH7OX/FxD4L+3f29/zC/sf2v7P9m/4/fJ8zf9kuP9Xu27PmxuXIB8Vft6fC3wx8Fv2sPHPgzwbpv9keG9N+w/ZbIzyz+X5lhbyv8APIzOcvI55JxnA4AFfS37Bf7Z/wAY/jV+1h4G8GeMvFyax4b1L7d9qsv7IsYPM8uwuJU+eKBXGHjQ8EZxg8EivpT9lv8Aaj+GP7F/wJ8M/Bv4yeJv+EO+JHhr7V/aui/2fdX32b7RdS3UP761ilhfdDcRP8jnG7BwwIH2p8Uvin4Y+C3gTU/GXjLU/wCx/Dem+V9qvfs8s/l+ZKkSfJErOcvIg4U4zk8AmgD8yv8Agt9pdtpg+C/2eMR+Z/bW7HfH2D/E1+W9fv8An/gqL+zGDg/Es5/7AGqf/I1J/wAPRf2Y/wDoph/8EGqf/I1AH5AfsFfC3wx8af2sPA3gzxjpv9r+G9T+3farMTyweZ5djcSp88TK4w8aHgjOMHgkV9p/t0KP+CbP/CEf8M5/8W8/4TQX39vZ/wCJp9s+yfZ/s3/H753l7PtU/wDq9u7f82dq4+V/+CXH/J9nwy/7if8A6a7uv2o+OX7Unwx/Zs/sQfEfxN/wjv8AbXn/AGD/AEC6uvO8ny/N/wBRE+3Hmx/exndxnBwAfzr/ABS+KXif40+O9T8ZeMtT/tnxJqXlfar3yIoPM8uJIk+SJVQYSNBwozjJ5JNfup/wVH/5MT+Jv/cM/wDTpaUf8PRf2Y/+imH/AMEGqf8AyNXv/wAUvin4Y+C3gTU/GXjLU/7H8N6b5X2q9+zyz+X5kqRJ8kSs5y8iDhTjOTwCaAPzV/4IZf8ANbP+4J/7f18rf8FR/wDk+z4m/wDcM/8ATXaV+qp/4Ki/sxg4PxLOf+wBqn/yNSf8PRf2Y/8Aoph/8EGqf/I1AHlX7Un7Lnwx/Yv+BPib4yfBvwz/AMId8SPDX2X+yta/tC6vvs32i6itZv3N1LLC+6G4lT50ON2RhgCPyr+Of7UnxP8A2k/7EHxH8Tf8JF/Yvn/YP9AtbXyfO8vzf9REm7PlR/ezjbxjJz9p/sF/sGfHX4K/tY+BfGXjPwN/Y3hvTft32q9/tewn8vzLC4iT5Ip2c5eRBwpxnJ4BNfQH/BVn9l34nftKf8Ku/wCFceGf+Ei/sX+1Pt/+n2tr5PnfZPL/ANfKm7PlSfdzjbzjIyAerf8ABLj/AJMT+GX/AHE//Tpd18U/sF/t5/HX41ftY+BfBvjPxz/bPhvUvt32qy/siwg8zy7C4lT54oFcYeNDwwzjB4JFfQP7Lf7Ufwx/Yv8AgT4Z+Dfxk8Tf8Id8SPDX2r+1dF/s+6vvs32i6luof31rFLC+6G4if5HON2DhgQPVf+Hov7Mf/RTD/wCCDVP/AJGoA9U+OX7Lnwx/aS/sQ/Ebwz/wkR0Xz/sH+n3Vr5PneX5n+olTdnyo/vZxt4xk5/Cz9vX4XeGPgv8AtY+OfBvg3TP7H8N6Z9h+yWX2iWfy/MsLeV/nlZnOXkc8scZwOABX0B/wVa/ai+GP7Sh+F3/CuPE3/CRf2L/an2//AEC6tfJ877J5f+viTdnypPu5xt5xkZ+gP2Cv28/gV8Fv2TvAvg3xn44OjeJNN+3farL+yL+fy/Mv7iVPnigZDlJEPDHGcHkEUAfpTRRRQAUUUUAFIelLSHpQB+AX/BUf/k+z4m/9wz/012lfv6a/AL/gqP8A8n2fE3/uGf8AprtK/X79vX4o+J/gv+yd458ZeDdT/sbxJpn2H7Je/Z4p/L8y/t4n+SVWQ5SRxypxnI5ANAHxV/wXM/5onj/qN/8AthX1X/wS4/5MU+Gf/cT/APTnd18qfsL/APGyY+Nf+Gjf+Lif8IX9h/sHH/Er+x/a/tH2n/jx8jzN/wBkt/8AWbtuz5cbmz+lXwt+Fnhj4LeBNM8G+DdM/sfw3pvm/ZbL7RLP5fmSvK/zysznLyOeWOM4HAAoA/nX/Zc+Bn/DSfx18M/Dj+2/+Ec/tr7V/wATP7J9q8nybWWf/Vb03Z8rb94Y3Z5xg/tP+wz+wyP2Lx42/wCK2/4TEeJfsX/MK+w/Zvs/2j/pvLv3faPbG3vnjyz9qT9lz4Y/sX/AnxN8ZPg34Z/4Q74keGvsv9la1/aF1ffZvtF1FazfubqWWF90NxKnzocbsjDAEfn+f+Cov7ThGP8AhZnH/YA0z/5GoAX/AIKjH/jOv4mY/wCoZ/6bLSvqr/hhj/h2x/xkd/wm3/Cxf+EL/wCZa/sr+y/tn2v/AED/AI+fOm8vZ9r8z/Vtu2beM7h+a3xS+KXif40+O9T8ZeMtT/tnxJqXlfar3yIoPM8uJIk+SJVQYSNBwozjJ5JNfun/AMFR/wDkxP4m/wDcM/8ATpaUAflX+3L+3MP20P8AhCf+KJ/4Q7/hGvtv/MV+3faftH2f/pjFs2/Z/fO7tjn1X9l3/glL/wANJ/Arwz8R/wDhaP8Awjn9tfav+JZ/wj/2ryfJupYP9b9qTdnyt33RjdjnGT8AA4OR1r3/AOF37enx1+C3gXTPBvgzxwNG8N6b5v2Wy/siwn8vzJXlf55YGc5eRzyxxnAwABQB4BX1V+wz+3L/AMMYDxt/xRP/AAmP/CS/Yv8AmK/Yfs32f7R/0xl37vtHtjb3zx8q19//APBKX9l34Y/tJ/8AC0f+Fj+Gf+Ei/sX+y/sH+n3Vr5Pnfa/N/wBRKm7PlR/ezjbxjJyAeq/8MMf8PJ/+Mjf+E2/4V1/wmn/Mtf2V/an2P7J/oP8Ax8+dD5m/7J5n+rXG/bzjcfK/2ov+CrX/AA0n8CvE3w4Pwu/4Rz+2vsv/ABM/+Eg+1eT5N1FP/qvsqbs+Vt+8Mbs84wU/ak/aj+J37F/x28TfBv4N+Jv+EO+G/hr7L/ZWi/YLW++zfaLWK6m/fXUUsz7priV/nc43YGFAA+gP29f2DPgV8Fv2TvHXjLwZ4HOjeJNN+w/Zb3+17+fy/Mv7eJ/klnZDlJHHKnGcjkA0AfFX7DX7DP8Aw2h/wm3/ABW3/CHf8I19i/5hX277T9o+0f8ATaLZt8j3zu7Y5+qv+HGX/VbP/LU/+7a+APgb+1J8Tv2bf7bHw58Tf8I6Na8j7f8A6Ba3XneT5nl/6+J9uPNk+7jO7nOBj90/2Cvij4n+NH7J3gbxl4y1P+2fEmp/bvtd79nig8zy7+4iT5IlVBhI0HCjOMnkk0AfP/7Lv/BKX/hmv46+GfiP/wALR/4SP+xftX/Et/4R/wCy+d51rLB/rftT7cebu+6c7ccZyPKv+C5h/wCSJ/8Acb/9sK+Vf+Ho37Tv/RTP/KBpf/yNXlfxz/ak+J/7Sf8AYn/Cx/E3/CRf2L5/2D/QLW18nzvL83/URJuz5Uf3s428YycgH1X+y7/wSl/4aU+BXhn4jj4o/wDCOf219q/4lv8Awj/2ryfJupYP9b9qTdnyt33RjdjnGT9/f8FRv+TFPiZ/3DP/AE52lH/BLj/kxP4Zf9xP/wBOl3X5AfFH9vT46/GnwLqfg3xn44Gs+G9S8r7VZf2RYQeZ5cqSp88UCuMPGh4YZxg5BIoA8AyaM0E5JJ6migD+qfikIGK/AL/h6N+07/0Uz/ygaX/8jUf8PRv2nf8Aopn/AJQNL/8AkagD7/8A2ov+CU3/AA0n8dfE3xH/AOFo/wDCOf219m/4ln/CP/avJ8m1ig/1v2pN2fK3fdGN2OcZPlf/AA4y/wCq2f8Alqf/AHbXyp/w9G/ad/6KZ/5QNL/+Rq/X/wDb1+KPif4L/sneOfGXg3U/7G8SaZ9h+yXv2eKfy/Mv7eJ/klVkOUkccqcZyOQDQB+QP7c37DX/AAxd/wAIT/xW3/CY/wDCSfbf+YV9h+zfZ/s//TeXfu8/2xt7549W/Zd/4JS/8NKfArwz8Rx8Uf8AhHP7a+1f8S3/AIR/7V5Pk3UsH+t+1Juz5W77oxuxzjJ+VPjl+1H8Tv2khog+I3ib/hIhovn/AGD/AEC1tfJ87y/M/wBREm7PlR/ezjbxjJz+1P8AwS4/5MT+GX/cT/8ATpd0AfVVFFFABRRRQAUhOASegpaQ9KAPAfij+3p8Cvgv461Pwb4y8cHRvEmm+V9qsv7Iv5/L8yJJU+eKBkOUkQ8McZwcEEVyn/D0X9mP/oph/wDBBqn/AMjV+Vf/AAVG4/br+Jn/AHDP/TZaV9Vf8OMv+q2f+Wp/920AfVX/AA9F/Zj/AOimH/wQap/8jUf8PRf2Y/8Aoph/8EGqf/I1fKv/AA4z/wCq2/8Alqf/AHbR/wAOMv8Aqtn/AJan/wB20AfVX/D0X9mP/oph/wDBBqn/AMjV6p8Df2pPhh+0n/bf/CufE3/CRf2L5H2//QLq18nzvM8r/XxJuz5Un3c4284yM/lb+1F/wSl/4Zr+BXib4jn4o/8ACR/2L9l/4lv/AAj/ANl87zrqKD/W/an2483d905244zkeqf8EMuf+F2en/Ek/wDb+gDk/wBvT9gz46/Gr9rHx14y8GeBv7Z8N6l9h+y3v9r2EHmeXYW8T/JLOrjDxuOVGcZHBBr7W/4ei/sx/wDRTD/4INU/+Rq+qeK/Kz/hxl/1Wz/y1P8A7toA+/vgZ+1J8Mf2kzrY+HHib/hIv7F8j7f/AKBdWvk+d5nlf6+JN2fKk+7nG3nGRn1avlT9hn9hn/hi7/hNv+K2/wCEx/4ST7F/zCfsP2b7P9o/6by7932j2xt754+qs0Afmv8At6/t5/Ar40/sneOvBvgzxwdZ8Sal9h+y2X9kX8HmeXf28r/PLAqDCRueWGcYHJArlP8AghmMf8Ls/wC4J/7f0f8ADjL/AKrZ/wCWp/8AdtfVH7DP7DX/AAxd/wAJt/xW3/CY/wDCS/Yv+YT9h+z/AGf7R/03l37vP9sbe+eAD8rf+Co//J9nxN/7hn/prtK+1f29f28/gV8af2TvHXg3wZ44Os+JNS+w/ZbL+yL+DzPLv7eV/nlgVBhI3PLDOMDkgV1P7UX/AASl/wCGlPjr4m+I/wDwtH/hHf7a+y/8S3/hH/tXk+TaxQf637Um7PlbvujG7HOMn8V80ABGDRX1V+wz+wz/AMNo/wDCbf8AFbf8Id/wjf2L/mFfbvtP2j7R/wBNotm3yPfO7tjn6p/4cZf9Vs/8tT/7toA/Kyv1T/4IZHA+NhPT/iSf+39fAH7LnwM/4aT+Ovhn4cf23/wjv9tfav8AiZfZPtXk+Tayz/6rem7PlbfvDG7POMH9qP2Gf2GR+xd/wm3/ABW3/CYjxL9i/wCYV9h+zfZ/tH/TeXfu8/2xt754AOt+KP7enwK+C/jrU/BvjLxwdG8Sab5X2qy/si/n8vzIklT54oGQ5SRDwxxnBwQRX5q/st/sufE79i/47eGfjJ8ZPDP/AAh3w38Nfav7V1r7fa332b7Ray2sP7m1llmfdNcRJ8iHG7JwoJHlX/BUY/8AGdfxMx/1DP8A02Wlfqn/AMFRv+TFPiZjr/xLP/TnaUAL/wAPRf2Yzx/wss5/7AGqf/I1fkB+3r8UfDHxo/ax8c+MvBup/wBseG9T+w/ZL37PLB5nl2FvE/ySqrjDxuOVGcZHBBrrP2Gv2Gv+G0D42z42/wCEOPhr7F/zCvt32n7R9o/6bxbNv2f3zu7Y58p/aj+Bv/DNnx18TfDj+2/+Ei/sX7L/AMTL7J9l87zrWKf/AFW99uPN2/eOdueM4AB/RR8Uvin4Y+C3gTU/GXjLU/7H8N6b5X2q9+zyz+X5kqRJ8kSs5y8iDhTjOTwCa5T4G/tR/DH9pI62Phz4m/4SI6L5P2/NhdWvk+d5nl/6+JN2fKk+7nG3nGRk/aj+Bv8Aw0n8CvE3w4/tv/hHf7a+y/8AEy+yfavJ8m6in/1W9N2fK2/eGN2ecYPlX7DP7DP/AAxf/wAJt/xW3/CY/wDCS/Yv+YV9h+zfZ/tH/TeXfu+0e2NvfPAB+Vn/AAVH/wCT7Pib/wBwz/012lfr/wDC79vT4FfGjx1png3wb44Os+JNS837LZf2RfweZ5cTyv8APLAqDCRueWGcYGSQK/H/AP4Kjf8AJ9fxN/7hn/pstK+//wBl7/glL/wzX8dfDPxH/wCFo/8ACR/2L9p/4ln/AAj/ANl87zrWWD/W/an2483d905244zkAHlX/Bcz5v8AhSWO/wDbf/thXWfsFft5/Ar4LfsneBfBvjPxwdG8Sab9u+1WX9kX8/l+Zf3EqfPFAyHKSIeGOM4PIIrk/wDguZ/zRPH/AFG//bCvys5oA/qoooooAKKKKACkPSlpD0oA/AL/AIKj/wDJ9nxN/wC4Z/6a7Sv1+/b1+KPif4L/ALJ3jnxl4N1P+xvEmmfYfsl79nin8vzL+3if5JVZDlJHHKnGcjkA1+QP/BUf/k+z4m/9wz/012lfqp/wVH/5MT+Jv/cM/wDTpaUAflV/w9F/acHA+JnH/YA0v/5Go/4ejftO/wDRTP8AygaX/wDI1fK1FAH7/f8ABUf/AJMT+Jv/AHDP/TpaV8qf8EMuf+F2f9wT/wBv6+q/+Co//JifxN/7hn/p0tK+VP8Aghl/zWz/ALgn/t/QByn7en7efx1+Cv7WPjrwb4M8c/2N4b037D9lsv7IsJ/L8ywt5X+eWBnOXkc8scZwOABX7AV+AP8AwVH/AOT7Pib/ANwz/wBNdpX2r+3r+3n8CvjT+yd468G+DPHB1nxJqX2H7LZf2RfweZ5d/byv88sCoMJG55YZxgckCgD9KDyK/ID9vT9vP46/BX9rHx14N8GeOf7G8N6b9h+y2X9kWE/l+ZYW8r/PLAznLyOeWOM4HAArrP8AghkMH42f9wT/ANv6/VSgD8f/ANgv9vP46/Gr9rHwL4N8Z+Of7Z8N6l9u+1WX9kWEHmeXYXEqfPFArjDxoeGGcYPBIr3/AP4KtftRfE79mwfC4fDjxN/wjo1r+1Pt/wDoFrded5P2Ty/9fE+3HmyfdxndznAx6v8A8PRf2Y/+imH/AMEGqf8AyNR/w9F/Zj/6KYf/AAQap/8AI1AHVfsFfFHxP8aP2TvA3jLxlqf9s+JNT+3fa737PFB5nl39xEnyRKqDCRoOFGcZPJJr+dev3+/4ei/sx/8ARTD/AOCDVP8A5Grqv29fhd4n+NH7J3jnwb4N0z+2fEmp/Yfsll9oig8zy7+3lf55WVBhI3PLDOMDkgUAfhZ8DP2pPif+zYNb/wCFceJv+Ed/tryPt/8AoFrded5PmeV/r4n2482T7uM7uc4GPVP+Ho37Tv8A0Uz/AMoGl/8AyNX39/wSl/Ze+Jv7NY+KJ+I/hn/hHRrX9lmwP2+1uvO8n7X5v+olfbjzY/vYzu4zg4+gvij+3p8Cvgv461Pwb4y8cHRvEmm+V9qsv7Iv5/L8yJJU+eKBkOUkQ8McZwcEEUAfgD8Lfil4n+C3jvTPGXg3U/7G8Sab5v2W98iKfy/MieJ/klVkOUkccqcZyOQDXv3/AA9F/acxj/hZYx0x/YGmf/I1fqr/AMPRf2Y/+imH/wAEGqf/ACNXqnwN/ak+GH7Sf9t/8K58Tf8ACRf2L5H2/wD0C6tfJ87zPK/18Sbs+VJ93ONvOMjIB/Ox8Uvil4n+NPjvU/GXjLU/7Z8Sal5X2q98iKDzPLiSJPkiVUGEjQcKM4yeSTX2p+y3+1H8Tv20Pjt4Z+Dfxk8Tf8Jj8N/Ev2r+1dF+wWtj9p+z2st1D++tYopk2zW8T/I4ztwcqSD1P7en7Bnx1+NX7WPjrxl4M8Df2z4b1L7D9lvf7XsIPM8uwt4n+SWdXGHjccqM4yOCDX6q/FL4p+GPgt4E1Pxl4y1P+x/Dem+V9qvfs8s/l+ZKkSfJErOcvIg4U4zk8AmgD81f26P+NbI8E/8ADOX/ABbv/hNPt39vf8xT7Z9k+z/Zv+P3zvL2fa7j/V7d2/5s7Vx+a/xS+KXif40+O9T8ZeMtT/tnxJqXlfar3yIoPM8uJIk+SJVQYSNBwozjJ5JNfumf+Cov7MfI/wCFlnP/AGANU/8AkavgD9qT9lz4nftofHbxN8ZPg34Z/wCEx+G/iX7L/ZWtfb7Wx+0/Z7WK1m/c3UsUybZreVPnQZ25GVIJAPKf+Ho37Tv/AEUz/wAoGl//ACNQf+Cov7ThGD8TMj/sAaX/API1fr/+3r8LvE/xo/ZO8c+DfBumf2z4k1P7D9ksvtEUHmeXf28r/PKyoMJG55YZxgckCvir9hj/AI1sjxt/w0b/AMW7/wCE0+w/2D/zFPtn2T7R9p/48fO8vZ9rt/8AWbd2/wCXO1sAH5r/ABS+KXif40+O9T8ZeMtT/tnxJqXlfar3yIoPM8uJIk+SJVQYSNBwozjJ5JNfv7+3r8UfE/wX/ZO8c+MvBup/2N4k0z7D9kvfs8U/l+Zf28T/ACSqyHKSOOVOM5HIBr81v2pP2XPid+2h8dvE3xk+Dfhn/hMfhv4l+y/2VrX2+1sftP2e1itZv3N1LFMm2a3lT50GduRlSCfoD9vX9vP4FfGn9k7x14N8GeODrPiTUvsP2Wy/si/g8zy7+3lf55YFQYSNzywzjA5IFAHJ/sLj/h5N/wAJsP2jf+Lif8IX9h/sH/mF/Y/tf2j7T/x4+R5m/wCyW/8ArN23Z8uNzZ+q/wDh11+zH/0TM/8Ag/1T/wCSa+VP+CGfy/8AC7c9v7E/9v6+1vij+3p8Cvgv461Pwb4y8cHRvEmm+V9qsv7Iv5/L8yJJU+eKBkOUkQ8McZwcEEUAe/0UUUAFFFFABSHpS0h6UAfgF/wVH/5Ps+Jv/cM/9NdpX7UftR/A3/hpP4FeJvhx/bf/AAjv9tfZf+Jl9k+1eT5N1FP/AKrem7PlbfvDG7POMH8V/wDgqP8A8n2fE3/uGf8AprtKT/h6N+07/wBFM/8AKBpf/wAjUAfVX/DjL/qtn/lqf/dtH/DjL/qtn/lqf/dtfKv/AA9G/ad/6KZ/5QNL/wDkaj/h6N+07/0Uz/ygaX/8jUAfqr/wVG5/YU+Jn/cM/wDTnaV8qf8ABDL/AJrZ/wBwT/2/r4r+KP7enx1+NPgXU/BvjPxwNZ8N6l5X2qy/siwg8zy5UlT54oFcYeNDwwzjByCRX2r/AMEMjk/Gwn/qCf8At/QB6p+1F/wSl/4aU+Ovib4j/wDC0f8AhHP7a+y/8S3/AIR/7V5Pk2sUH+t+1Juz5W77oxuxzjJ/Ffmv6qK+Vf8Ah11+zH/0TM/+D/VP/kmgD8q/2GP25v8Ahi7/AITb/iif+ExPiX7F/wAxX7D9m+z/AGj/AKYS793n+2NvfPH1V/w/N/6on/5df/3FX1V/w66/Zj/6Jmf/AAf6p/8AJNH/AA66/Zj/AOiZn/wf6p/8k0AfKv8Aw4y/6rZ/5an/AN20f8OMv+q2f+Wp/wDdtfqpSHpQB/Nb+1H8Df8Ahmv46+Jvhx/bf/CRf2L9l/4mX2T7L53nWsU/+q3vtx5u37xztzxnA/VP9l3/AIKtf8NKfHXwz8OP+FXf8I7/AG19q/4mX/CQfavJ8m1ln/1X2VN2fK2/eGN2ecYPwD/wVH/5Ps+Jv/cM/wDTXaUf8EuP+T7Phl/3E/8A013dAH6pftzfty/8MX/8IT/xRP8AwmP/AAkv23/mLfYfs/2f7P8A9MJd+77R7Y2988fK/wDwwx/w8n/4yO/4Tb/hXX/Caf8AMtf2V/an2P7J/oH/AB8+dD5m/wCyeZ/q1279vO3cfv745fsufDH9pI6IfiN4Z/4SI6L532DF/dWvk+d5fmf6iVN2fKj+9nG3jGTnq/hb8LPDHwW8CaZ4N8G6Z/Y/hvTfN+y2X2iWfy/MleV/nlZnOXkc8scZwOABQB/Ov+y58DP+Gk/jr4Z+HH9tnw7/AG19q/4mf2T7V5Pk2ss/+q3puz5W37wxuzzjB/af9hn9hn/hi/8A4Tb/AIrb/hMf+El+xf8AMJ+w/Zvs/wBo/wCm0u/d9o9sbe+ePLP2pP2XPhj+xf8AAnxN8ZPg34Z/4Q74keGvsv8AZWtf2hdX32b7RdRWs37m6llhfdDcSp86HG7IwwBCf8Epf2o/id+0p/wtH/hY/ib/AISL+xf7L+wf6Ba2vk+d9r83/URJuz5Uf3s428YycgB+1F/wVa/4Zr+Ovib4cf8ACrv+Ej/sX7L/AMTL/hIPsvnedaxT/wCq+yvtx5u37xztzxnA9W/4Kjf8mKfEz/uGf+nO0r8q/wDgqP8A8n2fE3/uGf8AprtK5T4o/t6fHX40+BdT8G+M/HA1nw3qXlfarL+yLCDzPLlSVPnigVxh40PDDOMHIJFAHgIJzX39+y7/AMFWv+GbPgV4Z+HP/Crv+Ej/ALF+1f8AEz/4SD7L53nXUs/+q+yvtx5u37xztzxnA+AcndnvX6/fsFfsGfAr40/sneBfGXjPwOdZ8Sal9u+1Xv8Aa9/B5nl39xEnyRTqgwkaDhRnGTySaAPtP9qP45/8M2/ArxN8Rv7E/wCEi/sX7L/xLPtf2XzvOuooP9bsfbjzd33TnbjjOR+K/wC3N+3N/wANof8ACE/8UT/whx8Nfbf+Yr9u+0faPs//AEwi2bfs/vnd2xzynxR/b0+Ovxp8C6n4N8Z+OBrPhvUvK+1WX9kWEHmeXKkqfPFArjDxoeGGcYOQSK8AJycnrQB+/wB/wS55/YU+Gf8A3E//AE53dfit+y58Df8AhpP46+Gfhx/bf/CO/wBtfav+Jl9k+1eT5NrLP/qt6bs+Vt+8Mbs84wf2p/4Jcf8AJifwy/7if/p0u6/Cz4W/FLxP8FvHemeMvBup/wBjeJNN837Le+RFP5fmRPE/ySqyHKSOOVOM5HIBoA/dL9hn9hn/AIYv/wCE2/4rb/hMf+El+xf8wr7D9n+z/aP+m8u/d5/tjb3zx+Vn/BUY/wDGdfxMx/1DP/TZaUn/AA9F/acxj/hZYx0x/YGmf/I1eA/FL4peJ/jT471Pxl4y1P8AtnxJqXlfar3yIoPM8uJIk+SJVQYSNBwozjJ5JNAH9PtFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAf/2Q==" + +class UserRegisterEndPoint(SystemAdminEndPoint): + authentication_classes = () + permission_classes = () + + def __init__(self, **kwargs): + super().__init__(**kwargs) + try: + self.email = Email() + self.email.login_server(server=settings.EMAIL_SERVER, + username=settings.EMAIL_USER, + port=settings.EMAIL_PORT, + pwd=settings.EMAIL_PASSWORD, + ssl=settings.ENABLE_SSL) + except Exception as e: + self.email = None + pass + + def get(self, request): + users = self.read_user_data() + self.register(users) + return R.success(msg=_('Account has been created successfully')) + + def post(self, request, token): + """ + { + "form": "xJfeTv", + "entry": { + "field1": 123, + "field2": "Hello, World", + "field3": "It's very good." + } + } + :param request: + :param token: + :return: + """ + + webhook_token = settings.config.get('wenjuan_webhook', 'token') + if token == webhook_token: + entry = request.data['entry'] + username = entry['field_1'] + phone = entry['field_2'] + email_addr = entry['field_3'] + + _user = User.objects.filter(username=username).first() + if _user: + logger.info(_('User {} already exists').format(username)) + else: + self.register_with_raw(username=username, phone=phone, email_addr=email_addr, email=self.email) + logger.info(_('User {} has been created successfully').format(username)) + else: + logger.warn(_('Failed to create user, error message: token is incorrect')) + return R.success(msg=_('Account registration successful')) + + def read_user_data(self): + header = True + users = list() + try: + + with open('/tmp/user_register.csv', newline='', encoding='utf-8') as f: + reader = csv.reader(f) + for row in reader: + if header: + header = False + continue + users.append((row[11].strip(), row[12].strip(), row[13].strip())) + except BaseException: + print(_('User account file read error')) + return users + + @staticmethod + def register_with_raw(username, email_addr, phone, email): + random_str = ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(3)) + password = f'{username}@{random_str}' + new_user = User.objects.create_user(username=username, password=password, email=email_addr, phone=phone) + department = Department.objects.filter(id=1).first() + department.users.add(new_user) + group, success = Group.objects.get_or_create(name='user') + group.user_set.add(new_user) + res= email.sendmail( + from_addr=settings.EMAIL_FROM_ADDR, + to_addrs=[email_addr, settings.ADMIN_EMAIL], + _subject='【洞态IAST】账号创建成功', + _content=f'''

洞态IAST账号创建成功

登陆地址:https://iast.io/login

账号:{username}

密码:{password}


登陆之后,请马上修改默认密码,然后重新登陆使用。


官方网站:https://dongtai.io



官方公众号:洞态公众号二维码

Welcome to DongTai IAST, + your account has been succefully created.

Login URL:https://iast.io/login

Account:{username}

Password:{password}


Notice: You MUST change the password during the first time log in. Password can be changed at "Settings/Account". After that, you can log in again.
DongTai IAST Official Website: https://dongtai.io



DongTai IAST WeChat Official Account: DongTai IAST's QR Code
''', + content_type='html', + ) + print(res) + + def register(self, users): + if users: + email = Email() + email.login_server(server=settings.EMAIL_SERVER, + username=settings.EMAIL_USER, + pwd=settings.EMAIL_PASSWORD, + ssl=settings.ENABLE_SSL) + for user in users: + _user = User.objects.filter(username=user[0]).first() + if _user: + print(f'用户{user[0]}已存在') + else: + self.register_with_raw(username=user[0], email_addr=user[1], phone=user[2], email=self.email) + self.email.logout_server() diff --git a/dongtai_web/views/user_token.py b/dongtai_web/views/user_token.py new file mode 100644 index 000000000..f1207c2cc --- /dev/null +++ b/dongtai_web/views/user_token.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# datetime:2020/5/25 15:03 +# software: PyCharm +# project: webapi +import logging + +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from rest_framework.authtoken.models import Token +from django.utils.translation import gettext_lazy as _ + +logger = logging.getLogger("django") + + +class UserToken(UserEndPoint): + name = "iast-v1-user-token" + description = _("Get OpenAPI token") + + def get(self, request): + token, success = Token.objects.get_or_create(user=request.user) + + return R.success(data={'token': token.key}) diff --git a/dongtai_web/views/utils/__init__.py b/dongtai_web/views/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/dongtai_web/views/utils/commonstats.py b/dongtai_web/views/utils/commonstats.py new file mode 100644 index 000000000..34c3c9bde --- /dev/null +++ b/dongtai_web/views/utils/commonstats.py @@ -0,0 +1,138 @@ +from typing import Union +from collections.abc import Iterable +from dongtai_common.models.strategy import IastStrategyModel +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.models.hook_type import HookType +from django.db.models import (Q, Count, Value) +from dongtai_common.models.vul_level import IastVulLevel +import time +from django.db.models.query import QuerySet + + +def weeks_ago(week: int = 1): + + weekend = 7 * week + current_timestamp = int(time.time()) + weekend_ago_time = time.localtime(current_timestamp - 86400 * weekend) + weekend_ago_time_str = str(weekend_ago_time.tm_year) + "-" + str(weekend_ago_time.tm_mon) + "-" + str( + weekend_ago_time.tm_mday) + " 00:00:00" + beginArray = time.strptime(weekend_ago_time_str, "%Y-%m-%d %H:%M:%S") + + beginT = int(time.mktime(beginArray)) + return current_timestamp, beginT, weekend + +def get_summary_by_agent_ids(agent_ids: Iterable): + data = {} + data['type_summary'] = [] + data['level_count'] = [] + queryset = IastVulnerabilityModel.objects.filter( + agent_id__in=agent_ids, is_del=0).values("hook_type_id", 'strategy_id', + "level_id", "latest_time") + q = ~Q(hook_type_id=0) + queryset = queryset.filter(q) + typeArr = {} + typeLevel = {} + levelCount = {} + strategy_ids = queryset.values_list('strategy_id', + flat=True).distinct() + strategys = { + strategy['id']: strategy + for strategy in IastStrategyModel.objects.filter( + pk__in=strategy_ids).values('id', 'vul_name').all() + } + hook_type_ids = queryset.values_list('hook_type_id', + flat=True).distinct() + hooktypes = { + hooktype['id']: hooktype + for hooktype in HookType.objects.filter( + pk__in=hook_type_ids).values('id', 'name').all() + } + if queryset: + for one in queryset: + hook_type = hooktypes.get(one['hook_type_id'], None) + hook_type_name = hook_type['name'] if hook_type else None + strategy = strategys.get(one['strategy_id'], None) + strategy_name = strategy['vul_name'] if strategy else None + type_ = list( + filter(lambda x: x is not None, + [strategy_name, hook_type_name])) + one['type'] = type_[0] if type_ else '' + typeArr[one['type']] = typeArr.get(one['type'], 0) + 1 + typeLevel[one['type']] = one['level_id'] + levelCount[one['level_id']] = levelCount.get( + one['level_id'], 0) + 1 + typeArrKeys = typeArr.keys() + for item_type in typeArrKeys: + data['type_summary'].append({ + 'type_name': item_type, + 'type_count': typeArr[item_type], + 'type_level': typeLevel[item_type] + }) + + current_timestamp, a_week_ago_timestamp, days = weeks_ago( + week=1) + daylist = [] + while days >= 0: + wtimestamp = current_timestamp - 86400 * days + wDay = time.localtime(wtimestamp) + wkey = str(wDay.tm_mon) + "-" + str(wDay.tm_mday) + daylist.append([wtimestamp, wkey]) + days = days - 1 + timestamp_gt = current_timestamp + queryset_list = [] + queryset_ = IastVulnerabilityModel.objects.filter(agent_id__in=agent_ids, + is_del=0) + for timestamp, _ in daylist: + queryset_list.append( + geneatre_vul_timerange_count_queryset(queryset_, timestamp_gt, + timestamp, wkey)) + timestamp_gt = timestamp + if len(queryset_list) > 1: + start_query_set = queryset_list[0] + final_query_set = start_query_set.union(*queryset_list[1:], all=True) + day_num_dict = {} + for i in final_query_set: + if i['day_label'] in day_num_dict.keys(): + day_num_dict[i['day_label']].append(i) + else: + day_num_dict[i['day_label']] = [i] + day_num_data = [] + for _, day_label in daylist: + obj = {'day_label': day_label, 'day_num': 0} + for i in range(1, 5 + 1): + obj['day_num_level_' + str(i)] = 0 + if day_label in day_num_dict.keys(): + count = 0 + for i in day_num_dict[day_label]: + obj['day_num_level_' + str(i['level_id'])] = i['count'] + count += i['count'] + obj['day_num'] = count + day_num_data.append(obj) + data['day_num'] = day_num_data + levelInfo = IastVulLevel.objects.all() + levelIdArr = {} + levelNum = [] + if levelInfo: + for level_item in levelInfo: + levelIdArr[level_item.id] = level_item.name_value + levelNum.append({ + "level_id": level_item.id, + "level_name": level_item.name_value, + "num": levelCount.get(level_item.id, 0) + }) + data['level_count'] = levelNum + return data + + +def geneatre_vul_timerange_count_queryset( + vul_queryset: QuerySet, + time_gt: int, + time_lt: int, + day_label: str, +): + vul_stat = vul_queryset.filter( + latest_time__gt=time_gt, + latest_time__lt=time_lt).values("level_id").annotate( + count=Count('level_id'), + day_label=Value(day_label)).order_by('level_id').all() + return vul_stat diff --git a/dongtai_web/views/utils/commonview.py b/dongtai_web/views/utils/commonview.py new file mode 100644 index 000000000..2a00cfde5 --- /dev/null +++ b/dongtai_web/views/utils/commonview.py @@ -0,0 +1,69 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : commonview +# @created : 星期五 12月 03, 2021 11:01:55 CST +# +# @description : +###################################################################### + +from dongtai_common.models.sensitive_info import IastPatternType,IastSensitiveInfoRule +from rest_framework import serializers +from dongtai_common.endpoint import UserEndPoint, R +from rest_framework.serializers import ValidationError + + +class BatchStatusUpdateSerializer(serializers.Serializer): + ids = serializers.ListField(child=serializers.IntegerField()) + status = serializers.ChoiceField((-1, 0, 1)) + + +class BatchStatusUpdateSerializerView(UserEndPoint): + serializer = BatchStatusUpdateSerializer + status_field = '' + def post(self, request): + data = self.get_params(request.data) + self.update_model(data) + return R.success(msg='update success') + + def get_params(self, data): + ser = self.serializer(data=data) + try: + if ser.is_valid(True): + pass + except ValidationError as e: + return {'ids': [], 'status': 0} + return ser.validated_data + + def update_model(self, request, validated_data): + self.model.objects.filter(pk__in=validated_data['ids'], + user__in=[request.user]).update(**{ + self.status_field: + validated_data['status'] + }) + + +class AllStatusUpdateSerializer(serializers.Serializer): + status = serializers.ChoiceField((-1, 0, 1)) + + +class AllStatusUpdateSerializerView(UserEndPoint): + serializer = AllStatusUpdateSerializer + status_field = 'status' + + def post(self, request): + data = self.get_params(request.data) + self.update_model(data) + return R.success(msg='update success') + + def get_params(self, data): + ser = self.serializer(data=data) + try: + if ser.is_valid(True): + pass + except ValidationError as e: + return {'status': 0} + return ser.validated_data + + def update_model(self, request, validated_data): + self.model.objects.filter(user__in=[request.user]).update( + **{self.status_field: validated_data['status']}) diff --git a/dongtai_web/views/version_update.py b/dongtai_web/views/version_update.py new file mode 100644 index 000000000..20dcc0696 --- /dev/null +++ b/dongtai_web/views/version_update.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# project: dongtai-webapi +import base64 +import logging + +from dongtai_common.endpoint import R, TalentAdminEndPoint +from dongtai_common.models.agent_method_pool import MethodPool +from dongtai_common.models.profile import IastProfile +from django.utils.translation import gettext_lazy as _ + +logger = logging.getLogger('dongtai-webapi') + + +class MethodPoolVersionUpdate(TalentAdminEndPoint): + def get(self, request): + profile_model = IastProfile.objects.filter(key='enable_update').first() + if profile_model is None or profile_model.value != 'TRUE': + return R.failure(msg=_('Updated is currently not allowed')) + method_pools = MethodPool.objects.all() + length = 5 + index = 0 + while True: + start = index * length + end = (index + 1) * length + print(start) + print(end) + sub_method_pools = method_pools.values('id', 'http_method', 'req_header', 'uri', 'req_params', + 'http_protocol', 'res_header')[start:end] + for method_pool in sub_method_pools: + id = method_pool['id'] + http_method = method_pool['http_method'] + req_header = method_pool['req_header'] + uri = method_pool['uri'] + req_params = method_pool['req_params'] + http_protocol = method_pool['http_protocol'] + res_header = method_pool['res_header'] + MethodPool.objects.filter(id=id).update( + req_header_fs=build_request_header(http_method, req_header, uri, req_params, http_protocol), + res_header=base64_decode(res_header) + ) + if len(sub_method_pools) == length: + index = index + 1 + else: + break + profile_model.value = 'FALSE' + profile_model.save(update_fields=['value']) + return R.success(msg=_('Update completed')) + + +def base64_decode(raw): + try: + return base64.b64decode(raw).decode('utf-8').strip() + except Exception as decode_error: + logger.error(f'base64 decode error, raw: {raw}\nreason:{decode_error}') + return "" + + +def build_request_header(req_method, raw_req_header, uri, query_params, + http_protocol): + decode_req_header = base64_decode(raw_req_header) + return f"{req_method} {uri + ('?' + query_params if query_params else '')} {http_protocol}\n{decode_req_header}" diff --git a/dongtai_web/views/vul_count_for_plugin.py b/dongtai_web/views/vul_count_for_plugin.py new file mode 100644 index 000000000..1f138363f --- /dev/null +++ b/dongtai_web/views/vul_count_for_plugin.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# project: dongtai-webapi +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.vulnerablity import IastVulnerabilityModel + +from dongtai_common.endpoint import R +from dongtai_common.endpoint import MixinAuthEndPoint +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from rest_framework import serializers + +_ResponseSerializer = get_response_serializer( + data_serializer=serializers.IntegerField(), ) + + +class VulCountForPluginEndPoint(MixinAuthEndPoint): + @extend_schema_with_envcheck( + [ + { + 'name': "name", + 'type': str, + }, + ], + tags=[_('Vulnerability')], + summary=_("Vulnerability Count (with agent name)"), + description=_( + "Get the number of vulnerabilities corresponding to the Agent."), + response_schema=_ResponseSerializer, + ) + def get(self, request): + agent_name = request.query_params.get('name') + if not agent_name: + return R.failure(msg=_("Please input agent name.")) + + agent = IastAgent.objects.filter(token=agent_name, + id__in=self.get_auth_agents_with_user( + request.user)).first() + if not agent: + return R.failure(msg=_("agent_name not found")) + + return R.success( + data=IastVulnerabilityModel.objects.values('id').filter( + agent=agent).count()) diff --git a/dongtai_web/views/vul_delete.py b/dongtai_web/views/vul_delete.py new file mode 100644 index 000000000..14f014546 --- /dev/null +++ b/dongtai_web/views/vul_delete.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +from rest_framework.request import Request + +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from django.utils.translation import gettext_lazy as _ +import logging +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer + +logger = logging.getLogger('dongtai-webapi') + +_ResponseSerializer = get_response_serializer(status_msg_keypair=( + ((201, _('Deleted Successfully')), ''), + ((202, _('Deletion failed')), ''), +)) + + +class VulDelete(UserEndPoint): + name = 'api-v1-vul-delete-' + description = _('Delete vulnerability') + + @extend_schema_with_envcheck( + summary=_('Vulnerability Delete'), + tags=[_('Vulnerability')], + description=_( + "Delete the corresponding vulnerability by specifying the id"), + ) + def post(self, request, id): + """ + :param request: + :return: + """ + try: + IastVulnerabilityModel.objects.get( + id=id, + agent_id__in=self.get_auth_agents_with_user(request.user) + ).delete() + return R.success(msg=_('Deleted Successfully')) + except IastVulnerabilityModel.DoesNotExist as e: + return R.failure(msg=_('Failed to delete, error message: Vulnerability does not exist')) + except Exception as e: + logger.error(f'user_id:{request.user.id} msg:{e}') + return R.failure(msg=_('Deletion failed')) diff --git a/dongtai_web/views/vul_details.py b/dongtai_web/views/vul_details.py new file mode 100644 index 000000000..f81cd6f19 --- /dev/null +++ b/dongtai_web/views/vul_details.py @@ -0,0 +1,472 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +import base64 +import json +import logging + +from dongtai_common.models.project import IastProject +from dongtai_common.models.project_version import IastProjectVersion +from dongtai_common.models.strategy import IastStrategyModel +from dongtai_common.models.vulnerablity import IastVulnerabilityStatus +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.models.hook_type import HookType + +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from dongtai_web.serializers.vul import VulSerializer +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from rest_framework import serializers + +logger = logging.getLogger('dongtai-webapi') + + +class _VulDetailResponseDataServerSerializer(serializers.Serializer): + name = serializers.CharField() + hostname = serializers.CharField() + ip = serializers.CharField() + port = serializers.CharField() + container = serializers.CharField() + server_type = serializers.CharField() + container_path = serializers.CharField() + runtime = serializers.CharField() + environment = serializers.CharField() + command = serializers.CharField() + +class _VulDetailResponseDataStrategySerializer(serializers.Serializer): + desc = serializers.CharField() + sample_code = serializers.CharField() + repair_suggestion = serializers.CharField() + +class _VulDetailResponseDataVulSerializer(serializers.Serializer): + url = serializers.CharField() + uri = serializers.CharField() + agent_name = serializers.CharField() + http_method = serializers.CharField() + type = serializers.CharField() + taint_position = serializers.CharField() + first_time = serializers.IntegerField() + latest_time = serializers.IntegerField() + project_name = serializers.CharField(help_text=_('The name of project')) + project_version = serializers.CharField( + help_text=_("The version name of the project")) + language = serializers.CharField( + default=None, + help_text=_("programming language")) + level = serializers.CharField(help_text=_("The name of vulnerablity level")) + level_type = serializers.IntegerField(help_text=_("The id of vulnerablity level")) + counts = serializers.IntegerField() + request_header = serializers.CharField() + response = serializers.CharField() + graph = serializers.CharField() + context_path = serializers.CharField() + client_ip = serializers.CharField() + status = serializers.CharField() + taint_value = serializers.CharField() + param_name = serializers.CharField() + method_pool_id = serializers.IntegerField() + project_id = serializers.IntegerField(help_text=_("The id of the project")) + + +class _VulDetailResponseDataSerializer(serializers.Serializer): + vul = _VulDetailResponseDataVulSerializer() + server = _VulDetailResponseDataServerSerializer() + strategy = _VulDetailResponseDataStrategySerializer() + + +_ResponseSerializer = get_response_serializer(_VulDetailResponseDataSerializer()) + + +class VulDetail(UserEndPoint): + + def __init__(self, server=None, vul_id=None): + super().__init__() + self.server = server + self.vul_id = vul_id + + def get_server(self): + server = self.server + if server: + if not server.ip: + server.ip = "Unknown" + return { + 'name': 'server.name', + 'hostname': server.hostname, + 'ip': server.ip, + 'port': server.port, + 'container': server.container if server.container else 'JavaApplication', + 'server_type': VulSerializer.split_container_name(server.container), + 'container_path': server.container_path, + 'runtime': server.runtime, + 'environment': server.env, + 'command': server.command + } + else: + return { + 'name': "", + 'hostname': "", + 'ip': "Unknown", + 'port': "", + 'container': "JavaApplication", + 'server_type': "", + 'container_path': "", + 'runtime': "", + 'environment': "", + 'command': "" + } + + def parse_graphy(self, graphy): + """ + + :param graphy: [{"classname": "org.apache.struts2.dispatcher.StrutsRequestWrapper", "methodname": "getParameter", "in": ", "out": "desc", "stack": "javax.servlet.ServletRequestWrapper.getParameter(ServletRequestWrapper.java)"}, {"classname": "java.lang.StringBuilder", "methodname": "append", "in": "desc", "out": "select host,user from user where user=+desc order by host ", "stack": "java.lang.StringBuilder.append(StringBuilder.java)"}, {"classname": "java.lang.StringBuilder", "methodname": "toString", "in": "select host,user from user where user=+desc order by host ", "out": "select host,user from user where user=+desc order by host ", "stack": "java.lang.StringBuilder.toString(StringBuilder.java)"}, {"classname": "com.mysql.jdbc.JDBC4Connection", "methodname": "prepareStatement", "in": "select host,user from user where user=+desc order by host ", "out": "NULL", "stack": "com.mysql.jdbc.ConnectionImpl.prepareStatement(ConnectionImpl.java)"}] + :return: + """ + import json + results = [] + try: + if graphy is None: + return results + method_note_pool = json.loads(graphy)[0] + method_counts = len(method_note_pool) + for i in range(method_counts): + method = method_note_pool[i] + if not isinstance(method, dict): + # 有错误数据情况,跳过 fix me + continue + class_name = method['originClassName'] if 'originClassName' in method else method['className'] + method_name = method['methodName'] + source = ', '.join([str(_hash) for _hash in method['sourceHash']]) + target = ', '.join([str(_hash) for _hash in method['targetHash']]) + _item = f"{method['callerClass']}.{method['callerMethod']}()" + filename = method['callerClass'] + line_number = method['callerLineNumber'] + if i == 0: + data_type = _('Source method') + elif i == method_counts - 1: + data_type = _('Hazardous method') + else: + data_type = _('Propagation method') + # data_type 有 lazy 方法,需要转str,否则无法json.dumps + final_res = method.copy() + final_res.update({ + 'type': str(data_type), + 'file': filename, + 'caller': _item, + 'line_number': line_number, + 'class': class_name, + 'method': method_name, + 'source': source, + 'source_value': method.get('sourceValues', None), + 'target': target, + 'target_value': method.get('targetValues', None), + 'node': f'{class_name}.{method_name}()', + 'tag': method.get('tag', None), + 'code': htmlescape(method.get('code', None)), + }) + results.append(final_res) + except Exception as e: + logger.error(_('Analysis of errovence analysis of stain call diagram: {}').format(__name__,e),exc_info=True) + return results + + @staticmethod + def parse_request(method, uri, query_param, protocol, header, data): + _data = f"{method} {uri}?{query_param} {protocol}\n" if query_param else f"{method} {uri} {protocol}\n" + try: + _data = _data + (base64.b64decode(header.encode("utf-8")).decode("utf-8") if header else '') + except Exception as e: + logger.error(_('Error analysis of Header, error message: {}').format(e)) + if data: + _data = _data + "\n" + data + return _data + + @staticmethod + def parse_response(header, body): + return '{header}\n\n{body}'.format(header=header, body=body) + + def get_vul(self, auth_agents): + vul = IastVulnerabilityModel.objects.filter(id=self.vul_id).first() + hook_type = HookType.objects.filter(pk=vul.hook_type_id).first() if vul is not None else None + hook_type_name = hook_type.name if hook_type else None + strategy = IastStrategyModel.objects.filter(pk=vul.strategy_id).first() + strategy_name = strategy.vul_name if strategy else None + type_ = list( + filter(lambda x: x is not None, [strategy_name, hook_type_name])) + vul.type = type_[0] if type_ else '' + status = IastVulnerabilityStatus.objects.filter(pk=vul.status_id).first() + vul.status_ = status.name if status else '' + agent = vul.agent + project_id = agent.bind_project_id + if project_id is None or project_id == 0: + project = None + else: + project = IastProject.objects.values("name").filter(id=project_id).first() + + project_version_id = agent.project_version_id + if project_version_id: + project_version = IastProjectVersion.objects.values('version_name').filter(id=project_version_id).first() + if project_version: + project_version_name = project_version['version_name'] + else: + project_version_name = '' + else: + project_version_name = '' + try: + self.server = agent.server + except Exception as e: + logger.error(_('[{}] Vulnerability information parsing error, error message: {}').format(__name__,e)) + self.server = {} + self.vul_name = vul.type + + return { + 'url': + vul.url, + 'uri': + vul.uri, + 'agent_name': + agent.token, + 'http_method': + vul.http_method, + 'type': + vul.type, + 'taint_position': + vul.taint_position, + 'first_time': + vul.first_time, + 'latest_time': + vul.latest_time, + 'project_name': + project['name'] + if project else _('The application has not been binded'), + 'project_version': + project_version_name, + 'language': + agent.language, + 'level': + vul.level.name_value, + 'level_type': + vul.level.id, + 'counts': + vul.counts, + 'req_header': + htmlescape(self.parse_request(vul.http_method, vul.uri, vul.req_params, + vul.http_protocol, vul.req_header, + vul.req_data)) if is_need_http_detail(strategy_name) else '', + 'response': + htmlescape(self.parse_response(vul.res_header, vul.res_body)) if is_need_http_detail(strategy_name) else '', + 'graph': + self.parse_graphy(vul.full_stack), + 'context_path': + vul.context_path, + 'client_ip': + vul.client_ip, + 'status': + vul.status_, + 'taint_value': + vul.taint_value, + 'param_name': + parse_param_name(vul.param_name) if vul.param_name else {}, + 'method_pool_id': + vul.method_pool_id, + 'project_id': + project_id, + 'is_need_http_detail': is_need_http_detail(strategy_name), + } + + def get_strategy(self): + + strategy = IastStrategyModel.objects.filter( + vul_name=self.vul_name).first() + if strategy: + return { + 'desc': strategy.vul_desc, + 'sample_code': '', + 'repair_suggestion': strategy.vul_fix + } + else: + return {'desc': "", 'sample_code': '', 'repair_suggestion': ''} + + @extend_schema_with_envcheck(response_bodys=[{ + 'name': + _('Get data sample'), + 'description': + _("The aggregation results are programming language, risk level, vulnerability type, project" + ), + 'value': { + "status": 201, + "msg": "success", + "data": { + "vul": { + "url": "http://localhost:81/captcha/captchaImage", + "uri": "/captcha/captchaImage", + "agent_name": + "Mac OS X-localhost-v1.0.0-d24bf703ca62499ebdd12770708296f5", + "http_method": "GET", + "type": "Weak Random Number Generation", + "taint_position": None, + "first_time": 1631089870, + "latest_time": 1631089961, + "project_name": "demo-4.6.1", + "project_version": "V1.0", + "language": "JAVA", + "level": "LOW", + "level_type": 3, + "counts": 6, + "req_header": + "GET /captcha/captchaImage?type=math HTTP/1.1\nhost:localhost:81\nconnection:keep-alive\nsec-ch-ua:\"Google Chrome\";v=\"93\", \" Not;A Brand\";v=\"99\", \"Chromium\";v=\"93\"\nsec-ch-ua-mobile:?0\nuser-agent:Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.63 Safari/537.36\nsec-ch-ua-platform:\"macOS\"\naccept:image/avif,image/webp,image/apng,image/svg+xml,image/*,*/*;q=0.8\nsec-fetch-site:same-origin\nsec-fetch-mode:no-cors\nsec-fetch-dest:image\nreferer:http://localhost:81/login\naccept-encoding:gzip, deflate, br\naccept-language:zh-CN,zh;q=0.9\ncookie:JSESSIONID=4bada2e5-d848-4218-8e24-3b28f765b986\n", + "response": "None\n\nNone", + "graph": None, + "context_path": "127.0.0.1", + "client_ip": "127.0.0.1", + "status": "Confirmed", + "taint_value": None, + "param_name": {}, + "method_pool_id": None, + "project_id": 69 + }, + "server": { + "name": "server.name", + "hostname": "localhost", + "ip": "localhost", + "port": 81, + "container": "Apache Tomcat/9.0.41", + "server_type": "apache tomcat", + "container_path": + "/Users/erzhuangniu/workspace/vul/demo-4.6.1", + "runtime": "OpenJDK Runtime Environment", + "environment": + "java.runtime.name=OpenJDK Runtime Environment, spring.output.ansi.enabled=always, project.name=demo-4.6.1, sun.boot.library.path=/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_292/Contents/Home/jre/lib, java.vm.version=25.292-b10, gop", + "command": "com.ruoyi.demoApplication" + }, + "strategy": { + "desc": + "Verifies that weak sources of entropy are not used.", + "sample_code": "", + "repair_suggestion": None + } + } + } + }], + summary=_('Vulnerability details'), + description= + _('Use the corresponding id of the vulnerability to query the details of the vulnerability' + ), + tags=[_('Vulnerability')], + response_schema=_ResponseSerializer) + def get(self, request, id): + """ + :param request: + :return: + """ + self.vul_id = id + self.auth_agents = self.get_auth_agents_with_user(request.user) + try: + return R.success( + data={ + 'vul': self.get_vul(self.auth_agents), + 'server': self.get_server(), + 'strategy': self.get_strategy() + } + ) + except Exception as e: + logger.error(_('[{}] Vulnerability information parsing error, error message: {}').format(__name__,e)) + return R.failure(msg=_('Vulnerability data query error')) + + +class VulDetailV2(VulDetail): + + def get_graph_and_headers(self, data): + res = {} + res['headers'] = { + 0: { + "agent_name": data['vul']['agent_name'], + "req_header": data['vul']['req_header'], + "response": data['vul']["response"] + } + } + res["graphs"] = [{ + 'graph': data['vul']['graph'], + "meta": { + "client_ip": data['vul']['client_ip'], + "server_ip": data['server']['ip'], + "middreware": data['server']['container'], + "language": data['vul']['language'], + "project_name": data["vul"]['project_name'], + "project_version": data["vul"]["project_version"], + "agent_name": data['vul']["agent_name"], + "taint_value": data['vul']["taint_value"], + "param_name": data['vul']["param_name"], + "url": data['vul']['url'], + } + }] + return res + + def get(self, request, id): + self.vul_id = id + self.auth_agents = self.get_auth_agents_with_user(request.user) + try: + data = { + 'vul': self.get_vul(self.auth_agents), + 'server': self.get_server(), + 'strategy': self.get_strategy() + } + data['headers'] = { + 0: { + "agent_name": data['vul']['agent_name'], + "req_header": data['vul']['req_header'], + "response": data['vul']["response"] + } + } + data["graphs"] = [{ + 'graph': data['vul']['graph'], + "meta": { + "client_ip": data['vul']['client_ip'], + "server_ip": data['server']['ip'], + "middreware": data['server']['container'], + "language": data['vul']['language'], + "project_name": data["vul"]['project_name'], + "project_version": data["vul"]["project_version"], + "agent_name": data['vul']["agent_name"], + "taint_value": data['vul']["taint_value"], + "param_name": data['vul']["param_name"], + "url": data['vul']['url'], + } + }] + data.update(self.get_graph_and_headers(data)) + return R.success(data=data) + except Exception as e: + logger.error( + _('[{}] Vulnerability information parsing error, error message: {}' + ).format(__name__, e),exc_info=True) + return R.failure(msg=_('Vulnerability data query error')) + + + + +def htmlescape(string): + return string.replace( + '', "6350be97a65823fc42ddd9dc78e17ddf13ff693b").replace( + '', "4d415116bf74985fbdb232cd954cd40392fbcd69").replace( + '<', + '<').replace("4d415116bf74985fbdb232cd954cd40392fbcd69", + "").replace( + "6350be97a65823fc42ddd9dc78e17ddf13ff693b", + "") + +def is_need_http_detail(name): + return False if name in ['硬编码'] else True + +def parse_param_name(param_name): + try: + res = json.loads(param_name) + return res + except BaseException: + return {} + + +if __name__ == '__main__': + vul = VulDetail() + graphy = '[{"classname": "org.apache.struts2.dispatcher.StrutsRequestWrapper", "methodname": "getParameter", "in": ", "out": "desc", "stack": "javax.servlet.ServletRequestWrapper.getParameter(ServletRequestWrapper.java)"}, {"classname": "java.lang.StringBuilder", "methodname": "append", "in": "desc", "out": "select host,user from user where user=+desc order by host ", "stack": "java.lang.StringBuilder.append(StringBuilder.java)"}, {"classname": "java.lang.StringBuilder", "methodname": "toString", "in": "select host,user from user where user=+desc order by host ", "out": "select host,user from user where user=+desc order by host ", "stack": "java.lang.StringBuilder.toString(StringBuilder.java)"}, {"classname": "com.mysql.jdbc.JDBC4Connection", "methodname": "prepareStatement", "in": "select host,user from user where user=+desc order by host ", "out": "NULL", "stack": "com.mysql.jdbc.ConnectionImpl.prepareStatement(ConnectionImpl.java)"}]' + vul.parse_graphy(graphy) diff --git a/dongtai_web/views/vul_levels.py b/dongtai_web/views/vul_levels.py new file mode 100644 index 000000000..c8fb78b1e --- /dev/null +++ b/dongtai_web/views/vul_levels.py @@ -0,0 +1,37 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : vul_levels +# @created : 星期五 11月 19, 2021 14:35:44 CST +# +# @description : +###################################################################### + + +from dongtai_common.endpoint import R, AnonymousAndUserEndPoint +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from rest_framework import serializers +from dongtai_common.models.vul_level import IastVulLevel +from django.utils.translation import gettext_lazy as _ + +class IastVulLevelSerializers(serializers.ModelSerializer): + + class Meta: + fields = ['id', 'name_value'] + model = IastVulLevel + + +_ResponseSerializer = get_response_serializer( + data_serializer=IastVulLevelSerializers(many=True), ) + +class VulLevelList(AnonymousAndUserEndPoint): + @extend_schema_with_envcheck( + tags=[_('Vul level list')], + summary=_('Vul level List'), + description=_("Get a list of vul level."), + response_schema=_ResponseSerializer, + ) + def get(self, request): + queryset = IastVulLevel.objects.all() + return R.success( + data=IastVulLevelSerializers(queryset, many=True).data) + diff --git a/dongtai_web/views/vul_list_for_plugin.py b/dongtai_web/views/vul_list_for_plugin.py new file mode 100644 index 000000000..de71a9c4a --- /dev/null +++ b/dongtai_web/views/vul_list_for_plugin.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# project: dongtai-webapi +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.vulnerablity import IastVulnerabilityModel + +from dongtai_common.endpoint import R +from dongtai_common.endpoint import MixinAuthEndPoint +from dongtai_web.serializers.vul import VulForPluginSerializer +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from django.utils.text import format_lazy +from dongtai_web.utils import get_model_order_options + +_ResponseSerializer = get_response_serializer( + VulForPluginSerializer(many=True)) + + +class VulListEndPoint(MixinAuthEndPoint): + @extend_schema_with_envcheck( + [ + { + 'name': "page", + 'type': int, + 'default': 1, + 'required': False, + 'description': _('Page index'), + }, + { + 'name': "pageSize", + 'type': int, + 'default': 20, + 'required': False, + 'description': _('Number per page'), + }, + { + 'name': "name", + 'type': str, + 'description': _('Name of agent'), + }, + { + 'name': "url", + 'type': str, + 'description': _('The URL corresponding to the vulnerability'), + }, + { + 'name': + "order", + 'type': + str, + 'description': + format_lazy( + "{} : {}", _('Sorted index'), ",".join([ + 'id', 'hook_type_id', 'url', 'http_method', + 'top_stack', 'bottom_stack' + ])), + }, + ], + tags=[_('Vulnerability')], + summary=_("Vulnerability List (with agent name)"), + description= + _("Use the agent name to get the corresponding list of vulnerabilities" + ), + response_schema=_ResponseSerializer + ) + def get(self, request): + agent_name = request.query_params.get('name', None) + if not agent_name: + return R.failure(msg=_("Please input agent name.")) + + agent = IastAgent.objects.filter(token=agent_name, + id__in=self.get_auth_agents_with_user( + request.user)).first() + if not agent: + return R.failure(msg=_("agent_name not found")) + + queryset = IastVulnerabilityModel.objects.values( + 'id', 'hook_type_id', 'url', 'http_method', 'top_stack', + 'bottom_stack', 'level_id').filter(agent=agent) + + if queryset: + url = request.query_params.get('url', None) + if url and url != '': + queryset = queryset.filter(url__icontains=url) + + order = request.query_params.get('order', '-latest_time') + if order and order in get_model_order_options(IastVulnerabilityModel): + queryset = queryset.order_by(order) + + page = request.query_params.get('page', 1) + page_size = request.query_params.get("pageSize", 20) + page_summary, page_data = self.get_paginator(queryset, page, page_size) + + return R.success(page=page_summary, data=VulForPluginSerializer(page_data, many=True).data) + else: + return R.success(page=[], data=[]) diff --git a/dongtai_web/views/vul_recheck.py b/dongtai_web/views/vul_recheck.py new file mode 100644 index 000000000..68079b73c --- /dev/null +++ b/dongtai_web/views/vul_recheck.py @@ -0,0 +1,251 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +import logging + +import time +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.project import IastProject +from dongtai_common.models.replay_queue import IastReplayQueue +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.utils.validate import Validate +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from rest_framework import serializers + +from dongtai_common.endpoint import R +from dongtai_common.utils import const +from dongtai_common.endpoint import UserEndPoint +from django.utils.translation import gettext_lazy as _ +from django.db.models import F +from django.db.models import Q +import threading + +from dongtai_web.vul_log.vul_log import log_recheck_vul + +logger = logging.getLogger('dongtai-webapi') + + +class VulReCheckDataSerializer(serializers.Serializer): + no_agent = serializers.BooleanField( + help_text=_('Whether the project does not exist agent')) + pending = serializers.IntegerField( + help_text=_('Waiting queue length for replay')) + recheck = serializers.IntegerField( + help_text=_('Success queue length for replay')) + checking = serializers.IntegerField( + help_text=_('Checking queue length for replay')) + + +_ResponseGetSerializer = get_response_serializer( + VulReCheckDataSerializer(), + status_msg_keypair=( + ((201, _('Handle success')), ''), + ((202, _('Item ID should not be empty')), ''), + ((202, _('Incorrect format parameter')), ''), + ((202, _('Batch playback error')), ''), + ((202, _('Current application has not been associated with probes and cannot be reproduced.')), ''), + ((202, _('No permission to access')), ''), + )) +_ResponsePostSerializer = get_response_serializer( + VulReCheckDataSerializer(), + status_msg_keypair=( + ((201, _('Handle success')), ''), + ((202, _('IDS should not be empty')), ''), + ((202, _('IDS must be: Vulnerability ID, Vulnerability ID Format')), ''), + ((202, _('Vulnerability replay error')), ''), + )) + + +class VulReCheck(UserEndPoint): + @staticmethod + def recheck(vul_queryset): + timestamp = int(time.time()) + waiting_count = 0 + success_count = 0 + re_success_count = 0 + opt_vul_queryset = vul_queryset.only('agent__id', 'id') + vul_ids = [i.id for i in opt_vul_queryset] + vul_id_agentmap = {i.id:i.agent_id for i in opt_vul_queryset} + history_replay_vul_ids = IastReplayQueue.objects.filter( + relation_id__in=vul_ids, + replay_type=const.VUL_REPLAY).order_by('relation_id').values_list( + 'relation_id', flat=True).distinct() + waiting_count = IastReplayQueue.objects.filter( + Q(relation_id__in=vul_ids) + & Q(replay_type=const.VUL_REPLAY) + & Q(state__in=(const.PENDING, const.WAITING))).count() + re_success_count = IastReplayQueue.objects.filter( + Q(relation_id__in=[i.id for i in opt_vul_queryset]) + & Q(replay_type=const.VUL_REPLAY) + & ~Q(state__in=(const.PENDING, const.WAITING))).update( + state=const.WAITING, + count=F('count') + 1, + update_time=timestamp) + vuls_not_exist = set(vul_ids) - set(history_replay_vul_ids) + success_count = len(vuls_not_exist) + IastReplayQueue.objects.bulk_create( + [ + IastReplayQueue(agent_id=vul_id_agentmap[vul_id], + relation_id=vul_id, + state=const.WAITING, + count=1, + create_time=timestamp, + update_time=timestamp, + replay_type=const.VUL_REPLAY) + for vul_id in vuls_not_exist + ], + ignore_conflicts=True) + vul_queryset.update(status_id=1, latest_time=timestamp) + return waiting_count, success_count, re_success_count + + @staticmethod + def vul_check_for_queryset(vul_queryset): + active_agent_ids = IastAgent.objects.filter( + id__in=vul_queryset.values('agent_id'), + online=const.RUNNING, + is_core_running=const.CORE_IS_RUNNING).values( + "id").distinct().all() + no_agent = vul_queryset.filter(~Q( + agent_id__in=active_agent_ids)).count() + waiting_count, success_count, re_success_count = VulReCheck.recheck( + vul_queryset) + return no_agent, waiting_count, success_count, re_success_count + + @extend_schema_with_envcheck( + [{ + 'name': + 'type', + 'type': + str, + 'description': + _('''available options are ("all","project"). + Corresponding to all or specific project respectively.''') + }, { + 'name': + "projectId", + 'type': + int, + 'description': + _("""The corresponding id of the Project. + Only If the type is project, the projectId here will be used.""") + }], + tags=[_('Vulnerability')], + summary=_("Vulnerability verification"), + description=_("""Verify the user's corresponding vulnerabilities. + Need to specify the type"""), + response_schema=_ResponsePostSerializer + ) + def post(self, request): + """ + :param request: + :return: + """ + try: + vul_ids = request.data.get('ids') + if vul_ids is None or vul_ids == '': + return R.failure(_("IDS should not be empty")) + + vul_ids = vul_ids.split(',') + if Validate.is_number(vul_ids) is False: + return R.failure(_('IDS must be: Vulnerability ID, Vulnerability ID Format')) + + auth_agents = self.get_auth_agents_with_user(user=request.user) + vul_queryset = IastVulnerabilityModel.objects.filter( + id__in=vul_ids, agent__in=auth_agents) + no_agent, waiting_count, success_count, re_success_count = self.vul_check_for_queryset( + vul_queryset) + # 加重放日志 vul_ids + log_recheck_vul(request.user.id,request.user.username,vul_ids,"待验证") + + # def log_recheck_vul(user_id: int, user_name: str, vul_id: list, vul_status: str): + return R.success(data={ + "no_agent": no_agent, + "pending": waiting_count, + "recheck": re_success_count, + "checking": success_count + },msg=_('Handle success')) + + except Exception as e: + logger.error(f' msg:{e}') + return R.failure(msg=_('Vulnerability replay error')) + + def vul_check_for_project(self, project_id, auth_users): + try: + project_exist = IastProject.objects.values("id").filter( + id=project_id, user__in=auth_users).exists() + if project_exist: + agent_queryset = IastAgent.objects.values("id").filter( + bind_project_id=project_id) + if agent_queryset: + agent_ids = agent_queryset.values_list('id',flat=True) + vul_queryset = IastVulnerabilityModel.objects.filter( + agent_id__in=agent_ids) + waiting_count, success_count, re_success_count = self.recheck( + vul_queryset) + return True, waiting_count, re_success_count, success_count, None + else: + return False, 0, 0, 0, _( + 'Current application has not been associated with probes and cannot be reproduced.' + ) + else: + return False, 0, 0, 0, _('No permission to access') + except Exception as e: + logger.error(f' msg:{e}',exc_info=True) + return False, 0, 0, 0, _('Batch playback error') + + @extend_schema_with_envcheck( + [{ + 'name': + 'type', + 'type': + str, + 'description': + _('''available options are ("all","project"). + Corresponding to all or specific project respectively.''') + }, { + 'name': + "projectId", + 'type': + int, + 'description': + _("""The corresponding id of the Project. + Only If the type is project, the projectId here will be used.""") + }], + tags=[_('Vulnerability')], + summary=_("Vulnerability verification"), + description=_("""Verify the user's corresponding vulnerabilities. + Need to specify the type"""), + response_schema=_ResponsePostSerializer + ) + def get(self, request): + + try: + check_type = request.query_params.get('type') + project_id = request.query_params.get('projectId') + if check_type == 'project' and not project_id: + return R.failure(msg=_("Item ID should not be empty")) + if check_type == 'all': + vul_queryset = IastVulnerabilityModel.objects.filter( + agent__in=self.get_auth_agents_with_user(request.user)) + no_agent, pending, recheck, checking = self.vul_check_for_queryset( + vul_queryset) + + elif check_type == 'project': + auth_users = self.get_auth_users(request.user) + status, pending, recheck, checking, msg = self.vul_check_for_project( + project_id, auth_users=auth_users) + no_agent = None + return R.success(data={ + "no_agent": no_agent if no_agent else 0, + "pending": pending, + "recheck": recheck, + "checking": checking + }, + msg=_("Handle success")) + return R.failure(msg=_("Incorrect format parameter")) + + except Exception as e: + logger.error(f'user_id:{request.user.id} msg:{e}', exc_info=True) + return R.failure(msg=_('Batch playback error')) diff --git a/dongtai_web/views/vul_recheck_v2.py b/dongtai_web/views/vul_recheck_v2.py new file mode 100644 index 000000000..4b790af45 --- /dev/null +++ b/dongtai_web/views/vul_recheck_v2.py @@ -0,0 +1,262 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +import logging + +import time +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.project import IastProject +from dongtai_common.models.replay_queue import IastReplayQueue +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.utils.validate import Validate +from dongtai_web.aggregation.aggregation_common import turnIntListOfStr +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from rest_framework import serializers + +from dongtai_common.endpoint import R +from dongtai_common.utils import const +from dongtai_common.endpoint import UserEndPoint +from django.utils.translation import gettext_lazy as _ +from django.db.models import F +from django.db.models import Q +import threading +from dongtai_common.models.vul_recheck_payload import IastVulRecheckPayload +logger = logging.getLogger('dongtai-webapi') + + +_ResponseGetSerializer = get_response_serializer( + status_msg_keypair=( + ((201, _('Handle success')), ''), + ((202, _('Item ID should not be empty')), ''), + ((202, _('Incorrect format parameter')), ''), + ((202, _('Batch playback error')), ''), + ((202, _('Current application has not been associated with probes and cannot be reproduced.')), ''), + ((202, _('No permission to access')), ''), + )) +_ResponsePostSerializer = get_response_serializer( + status_msg_keypair=( + ((201, _('Handle success')), ''), + ((202, _('IDS should not be empty')), ''), + ((202, _('IDS must be: Vulnerability ID, Vulnerability ID Format')), ''), + ((202, _('Vulnerability replay error')), ''), + )) + + +class VulReCheckv2(UserEndPoint): + @staticmethod + def recheck(vul_queryset): + timestamp = int(time.time()) + waiting_count = 0 + success_count = 0 + re_success_count = 0 + opt_vul_queryset = vul_queryset.only('agent__id', 'id') + vul_ids = [i.id for i in opt_vul_queryset] + vul_id_agentmap = {i.id: i.agent_id for i in opt_vul_queryset} + history_replay_vul_ids = IastReplayQueue.objects.filter( + relation_id__in=vul_ids, + replay_type=const.VUL_REPLAY).order_by('relation_id').values_list( + 'relation_id', flat=True).distinct() + + waiting_count = IastReplayQueue.objects.filter( + Q(relation_id__in=vul_ids) + & Q(replay_type=const.VUL_REPLAY) + & Q(state__in=(const.PENDING, const.WAITING))).count() + re_success_count = IastReplayQueue.objects.filter( + Q(relation_id__in=[i.id for i in opt_vul_queryset]) + & Q(replay_type=const.VUL_REPLAY) + & ~Q(state__in=(const.PENDING, const.WAITING))).update( + state=const.WAITING, + count=F('count') + 1, + update_time=timestamp) + vuls_not_exist = set(vul_ids) # - set(history_replay_vul_ids) + success_count = len(vuls_not_exist) + vul_payload_dict = {} + for vul_id in vuls_not_exist: + vul_payload_dict[vul_id] = IastVulRecheckPayload.objects.filter( + strategy__iastvulnerabilitymodel__id=vul_id).values_list( + 'pk', flat=True).all() + replay_queue = [] + for key, value in vul_payload_dict.items(): + item = [ + IastReplayQueue(agent_id=vul_id_agentmap[vul_id], + relation_id=vul_id, + state=const.WAITING, + count=1, + create_time=timestamp, + update_time=timestamp, + replay_type=const.VUL_REPLAY, + payload_id=payload_id) for payload_id in value + ] + if not item: + item = [ + IastReplayQueue(agent_id=vul_id_agentmap[vul_id], + relation_id=vul_id, + state=const.WAITING, + count=1, + create_time=timestamp, + update_time=timestamp, + replay_type=const.VUL_REPLAY) + ] + replay_queue += item + IastReplayQueue.objects.bulk_create(replay_queue, + ignore_conflicts=True) + vul_queryset.update(status_id=1, latest_time=timestamp) + + return waiting_count, success_count, re_success_count + + @staticmethod + def vul_check_for_queryset(vul_queryset): + active_agent_ids = IastAgent.objects.filter( + id__in=vul_queryset.values('agent_id'), + online=const.RUNNING, + is_core_running=const.CORE_IS_RUNNING).values( + "id").distinct().all() + no_agent = vul_queryset.filter(~Q( + agent_id__in=active_agent_ids)).count() + waiting_count, success_count, re_success_count = VulReCheckv2.recheck( + vul_queryset) + return no_agent, waiting_count, success_count, re_success_count + + @extend_schema_with_envcheck( + [{ + 'name': + 'type', + 'type': + str, + 'description': + _('''available options are ("all","project"). + Corresponding to all or specific project respectively.''') + }, { + 'name': + "projectId", + 'type': + int, + 'description': + _("""The corresponding id of the Project. + Only If the type is project, the projectId here will be used.""") + }], + tags=[_('Vulnerability')], + summary=_("Vulnerability verification"), + description=_("""Verify the user's corresponding vulnerabilities. + Need to specify the type"""), + response_schema=_ResponsePostSerializer + ) + def post(self, request): + """ + :param request: + :return: + """ + try: + + vul_ids = request.data.get("ids", "") + + user = request.user + # 超级管理员 + if user.is_system_admin(): + queryset = IastVulnerabilityModel.objects.filter(is_del=0) + # 租户管理员 or 部门管理员 + elif user.is_talent_admin() or user.is_department_admin: + users = self.get_auth_users(user) + user_ids = list(users.values_list('id', flat=True)) + queryset = IastVulnerabilityModel.objects.filter(is_del=0, agent__user_id__in=user_ids) + else: + # 普通用户 + queryset = IastVulnerabilityModel.objects.filter(is_del=0, agent__user_id=user.id) + ids_list = turnIntListOfStr(vul_ids) + + vul_queryset = queryset.filter(id__in=ids_list) + + no_agent, waiting_count, success_count, re_success_count = self.vul_check_for_queryset( + vul_queryset) + + return R.success(data={ + "no_agent": no_agent, + "pending": waiting_count, + "recheck": re_success_count, + "checking": success_count + },msg=_('Handle success')) + + except Exception as e: + logger.error(f' msg:{e}') + return R.failure(msg=_('Vulnerability replay error')) + + def vul_check_for_project(self, project_id, auth_users): + try: + project_exist = IastProject.objects.values("id").filter( + id=project_id, user__in=auth_users).exists() + if project_exist: + agent_queryset = IastAgent.objects.values("id").filter( + bind_project_id=project_id) + if agent_queryset: + agent_ids = agent_queryset.values_list('id',flat=True) + vul_queryset = IastVulnerabilityModel.objects.filter( + agent_id__in=agent_ids) + waiting_count, success_count, re_success_count = self.recheck( + vul_queryset) + return True, waiting_count, re_success_count, success_count, None + else: + return False, 0, 0, 0, _( + 'Current application has not been associated with probes and cannot be reproduced.' + ) + else: + return False, 0, 0, 0, _('No permission to access') + except Exception as e: + logger.error(f' msg:{e}',exc_info=True) + return False, 0, 0, 0, _('Batch playback error') + + @extend_schema_with_envcheck( + [{ + 'name': + 'type', + 'type': + str, + 'description': + _('''available options are ("all","project"). + Corresponding to all or specific project respectively.''') + }, { + 'name': + "projectId", + 'type': + int, + 'description': + _("""The corresponding id of the Project. + Only If the type is project, the projectId here will be used.""") + }], + tags=[_('Vulnerability')], + summary=_("Vulnerability verification"), + description=_("""Verify the user's corresponding vulnerabilities. + Need to specify the type"""), + response_schema=_ResponsePostSerializer + ) + def get(self, request): + + try: + check_type = request.query_params.get('type') + project_id = request.query_params.get('projectId') + if check_type == 'project' and not project_id: + return R.failure(msg=_("Item ID should not be empty")) + if check_type == 'all': + vul_queryset = IastVulnerabilityModel.objects.filter( + agent__in=self.get_auth_agents_with_user(request.user)) + + def vul_check_thread(): + self.vul_check_for_queryset(vul_queryset) + t1 = threading.Thread(target=vul_check_thread, daemon=True) + t1.start() + return R.success(msg=_('Verification in progress')) + elif check_type == 'project': + auth_users = self.get_auth_users(request.user) + + def vul_check_thread(): + self.vul_check_for_project(project_id, + auth_users=auth_users) + t1 = threading.Thread(target=vul_check_thread, daemon=True) + t1.start() + return R.success(msg=_("Verification in progress")) + return R.failure(msg=_("Incorrect format parameter")) + + except Exception as e: + logger.error(f'user_id:{request.user.id} msg:{e}', exc_info=True) + return R.failure(msg=_('Batch playback error')) diff --git a/dongtai_web/views/vul_request_replay.py b/dongtai_web/views/vul_request_replay.py new file mode 100644 index 000000000..cb8e9dcb0 --- /dev/null +++ b/dongtai_web/views/vul_request_replay.py @@ -0,0 +1,320 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +import logging +from http.server import BaseHTTPRequestHandler +from io import BytesIO +import base64 +import time +from dongtai_common.models.agent_method_pool import MethodPool +from dongtai_common.models.replay_method_pool import IastAgentMethodPoolReplay +from dongtai_common.models.replay_queue import IastReplayQueue +from dongtai_common.models.user import User +from dongtai_common.utils import const +from dongtai_common.models.agent import IastAgent +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from django.utils.translation import gettext_lazy as _ +from collections import namedtuple +from dongtai_web.utils import extend_schema_with_envcheck + +logger = logging.getLogger('dongtai-webapi') + + +class HttpRequest(BaseHTTPRequestHandler): + def __init__(self, raw_request): + self.body = None + self.uri = None + self.params = None + self.rfile = BytesIO(raw_request.encode()) + self.raw_requestline = self.rfile.readline() + self.error_code = self.error_message = None + self.parse_request() + self.parse_path() + self.parse_body() + + def parse_body(self): + if self.body is None: + self.body = self.rfile.read().decode('utf-8') + return self.body + + def parse_path(self): + items = self.path.split('?') + self.uri = items[0] + self.params = '?'.join(items[1:]) + + +class RequestReplayEndPoint(UserEndPoint): + + @staticmethod + def check_replay_request(raw_request): + """ + :param replay_request: + :return: + """ + try: + replay_request = HttpRequest(raw_request=raw_request) + requests = { + 'method': + replay_request.command, + 'uri': + replay_request.uri, + 'params': + replay_request.params, + 'scheme': + replay_request.request_version, + 'header': + base64.b64encode(replay_request.headers.as_string().strip(). + encode()).decode(), + 'body': + replay_request.body, + } + + return False, requests + except Exception as e: + logger.error(_('HTTP request parsing error, error message: {}').format(e)) + return True, None + + @staticmethod + def check_method_pool(method_pool_id, user): + """ + :param method_pool_id: + :param user: + :return: + status: True;False + model: methodpool;None + """ + if method_pool_id is None or method_pool_id == '': + return True, None + + auth_agents = RequestReplayEndPoint.get_auth_agents_with_user(user) + if method_pool_id == -1: + method_pool_model = namedtuple('MethodPool', ['id', 'agent']) + agent = namedtuple('MethodPool', ['id', 'is_running']) + agent.id = 0 + agent.is_running = 0 + method_pool_model.agent = agent + method_pool_model.id = -1 + else: + method_pool_model = MethodPool.objects.filter( + id=method_pool_id, agent__in=auth_agents).first() + if method_pool_model: + return False, method_pool_model + else: + return True, None + + @staticmethod + def check_agent_active(agent): + """ + :param agent: + :return: True ;False + """ + if not agent: + return True + return (agent.online == 0) + + @staticmethod + def send_request_to_replay_queue(relation_id, agent_id, replay_request, + replay_type): + """ + :param replay_request: + :param method_pool_model: + :return: 0、1 + """ + timestamp = int(time.time()) + replay_queue = IastReplayQueue.objects.filter( + replay_type=replay_type, + relation_id=relation_id, + agent_id=agent_id).first() + if replay_queue: + if replay_queue.state not in [ + const.WAITING, + ]: + replay_queue.state = const.WAITING + replay_queue.uri = replay_request['uri'] + replay_queue.method = replay_request['method'] + replay_queue.scheme = replay_request['scheme'] + replay_queue.header = replay_request['header'] + replay_queue.params = replay_request['params'] + replay_queue.body = replay_request['body'] + replay_queue.update_time = timestamp + replay_queue.agent_id = agent_id + replay_queue.save(update_fields=[ + 'uri', 'method', 'scheme', 'header', 'params', 'body', + 'update_time', 'agent_id', 'state', 'agent_id' + ]) + IastAgentMethodPoolReplay.objects.filter( + replay_id=replay_queue.id, + replay_type=replay_type).delete() + else: + replay_queue = IastReplayQueue.objects.create( + relation_id=relation_id, + replay_type=replay_type, + state=const.WAITING, + uri=replay_request['uri'], + method=replay_request['method'], + scheme=replay_request['scheme'], + header=replay_request['header'], + params=replay_request['params'], + body=replay_request['body'], + create_time=timestamp, + count=0, + update_time=timestamp, + agent_id=agent_id, + ) + return replay_queue.id + + @extend_schema_with_envcheck( + [], { + 'methodPoolId': 'int', + 'replayRequest': 'str', + 'agent_id': 'int', + 'replay_type': 'int' + }) + def post(self, request): + """ + :param request:{ + 'id':vul_id, + 'request': 'header' + } + :return: + """ + try: + + method_pool_id = request.data.get('methodPoolId') + replay_request = request.data.get('replayRequest') + agent_id = request.data.get('agent_id', None) + replay_type = request.data.get('replay_type', None) + if replay_type is not None and int(replay_type) not in [ + const.API_REPLAY, const.REQUEST_REPLAY + ]: + return R.failure(msg="replay_type error") + replay_type = const.REQUEST_REPLAY if replay_type is None else int( + replay_type) + + check_failure, method_pool_model = self.check_method_pool( + method_pool_id, request.user) + if check_failure: + return R.failure(msg=_( + 'Stain pool data does not exist or no permission to access')) + if agent_id: + agent = IastAgent.objects.filter(pk=agent_id).first() + check_failure = self.check_agent_active(agent) + if check_failure and agent is not None: + agent = IastAgent.objects.filter( + bind_project_id=agent.bind_project_id, + online=1).first() + check_failure = self.check_agent_active(agent) + else: + check_failure = self.check_agent_active( + method_pool_model.agent) + if check_failure: + return R.failure(msg=_( + 'The probe has been destroyed or suspended, please check the probe status' + )) + + check_failure, checked_request = self.check_replay_request( + raw_request=replay_request) + if check_failure: + return R.failure(msg=_('Replay request is illegal')) + if agent_id: + replay_id = self.send_request_to_replay_queue( + relation_id=method_pool_model.id, + agent_id=agent.id, + replay_request=checked_request, + replay_type=replay_type) + else: + replay_id = self.send_request_to_replay_queue( + relation_id=method_pool_model.id, + agent_id=method_pool_model.agent.id, + replay_request=checked_request, + replay_type=replay_type) + return R.success(msg=_('Relay request success'), + data={'replayId': replay_id}) + + except Exception as e: + print(e) + logger.error(f'user_id:{request.user.id} msg:{e}') + return R.failure(msg=_('Vulnerability replay error')) + + @staticmethod + def check_replay_data_permission(replay_id, auth_agents): + return IastReplayQueue.objects.values('id').filter( + id=replay_id, agent__in=auth_agents).exists() + + @staticmethod + def parse_response(header, body): + try: + _data = base64.b64decode(header.encode("utf-8")).decode("utf-8") + except Exception as e: + _data = '' + logger.error( + _('Response header analysis error, error message: {}'.format( + e))) + return '{header}\n\n{body}'.format(header=_data, body=body) + + @extend_schema_with_envcheck([ + { + 'name': "replayid", + 'type': int, + 'required': True, + }, + { + 'name': "replay_type", + 'type': int, + 'required': False, + 'description': "available options are (2,3)", + }, + ]) + def get(self, request): + replay_id = request.query_params.get('replayId') + # auth_agents = self.get_auth_agents_with_user(request.user) + replay_type = request.query_params.get('replay_type', None) + if replay_type is not None and int(replay_type) not in [ + const.API_REPLAY, const.REQUEST_REPLAY + ]: + return R.failure(msg="replay_type error") + replay_type = const.REQUEST_REPLAY if replay_type is None else int(replay_type) + replay_data = IastReplayQueue.objects.filter(id=replay_id).first() + if not replay_data: + return R.failure( + status=203, + msg=_( + 'Replay request does not exist or no permission to access') + ) + + if request.user.is_superuser == 1 or replay_data.agent.user_id == request.user.id: + pass + elif request.user.is_superuser == 2 and replay_data.agent.user_id != request.user.id: + # 部门鉴权 + talent = request.user.get_talent() + departments = talent.departments.all() + if replay_data.agent.user.get_department() not in departments: + replay_data = {} + elif replay_data.agent.user_id != request.user.id: + replay_data = {} + + if not replay_data: + return R.failure( + status=203, + msg=_( + 'Replay request does not exist or no permission to access') + ) + if replay_data.state != const.SOLVED: + return R.failure(msg=_('Replay request processing')) + replay_data = IastAgentMethodPoolReplay.objects.filter( + replay_id=replay_id, + replay_type=replay_type).values('res_header', 'res_body', + 'method_pool', 'id').first() + if replay_data: + return R.success( + data={ + 'response': + self.parse_response(replay_data['res_header'], + replay_data['res_body']), + 'method_pool_replay_id':replay_data['id'], + }) + else: + return R.failure(status=203, msg=_('Replay failed')) diff --git a/dongtai_web/views/vul_sidebar_index.py b/dongtai_web/views/vul_sidebar_index.py new file mode 100644 index 000000000..c132249d2 --- /dev/null +++ b/dongtai_web/views/vul_sidebar_index.py @@ -0,0 +1,131 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: idea +# project: lingzhi-webapi + +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.models.hook_type import HookType +from dongtai_web.utils import extend_schema_with_envcheck +from django.utils.translation import gettext_lazy as _ +from django.utils.text import format_lazy +from dongtai_common.models.strategy import IastStrategyModel + +VALUES = [ + 'level', + 'latest_time', + 'language', + 'hook_type_id', + 'uri', +] + +from dongtai_web.utils import get_model_order_options + +class VulSideBarList(UserEndPoint): + @extend_schema_with_envcheck( + [{ + 'name': "page", + 'type': int, + 'default': 1, + 'required': False, + 'description': _('Page index'), + }, { + 'name': "pageSize", + 'type': int, + 'default': 20, + 'required': False, + 'description': _('Number per page'), + }, { + 'name': "language", + 'type': str, + 'description': _("programming language") + }, { + 'name': "type", + 'type': str, + 'deprecated': True, + 'description': _('Type of vulnerability'), + }, { + 'name': "hook_type_id", + 'type': str, + 'description': _('ID of the vulnerability type'), + }, { + 'name': + "level", + 'type': + str, + 'description': + format_lazy("{} : {}", _('Level of vulnerability'), "1,2,3,4") + }, { + 'name': "url", + 'type': str, + 'description': _('The URL corresponding to the vulnerability'), + }, { + 'name': + "order", + 'type': + str, + 'description': + format_lazy("{} : {}", _('Sorted index'), ",".join(VALUES)) + }], + tags=[_('Vulnerability')], + summary=_("Vulnerability List"), + description=_( + "Get the list of vulnerabilities corresponding to the user.")) + def get(self, request): + """ + :param request: + :return: + """ + queryset = IastVulnerabilityModel.objects.values( + 'level', + 'latest_time', + 'hook_type_id', + 'uri', + ).filter(agent__in=self.get_auth_agents_with_user(request.user)) + + language = request.query_params.get('language', None) + if language: + queryset = queryset.filter(language=language) + + level = request.query_params.get('level', None) + if level: + queryset = queryset.filter(level=level) + + type = request.query_params.get('type', None) + hook_type_id = request.query_params.get('hook_type_id', None) + if hook_type_id: + queryset = queryset.filter(hook_type_id=hook_type_id) + elif type: + hook_type = HookType.objects.filter(name=type).first() + hook_type_id = hook_type.id if hook_type else None + if hook_type_id: + queryset = queryset.filter(hook_type_id=hook_type_id) + + url = request.query_params.get('url', None) + if url: + queryset = queryset.filter(url=url) + + order = request.query_params.get('order') + if order and order in get_model_order_options(IastVulnerabilityModel): + queryset = queryset.order_by(order) + else: + queryset = queryset.order_by('-latest_time') + + page = request.query_params.get('page', 1) + page_size = request.query_params.get('pageSize', 10) + page_summary, queryset = self.get_paginator(queryset, + page=page, + page_size=page_size) + for obj in queryset: + hook_type = HookType.objects.filter(pk=obj['hook_type_id']).first() + hook_type_name = hook_type.name if hook_type else None + strategy = IastStrategyModel.objects.filter(pk=obj['strategy_id']).first() + strategy_name = strategy.vul_name if strategy else None + type_ = list( + filter(lambda x: x is not None, [strategy_name, hook_type_name])) + obj['type'] = type_[0] if type_ else '' + return R.success(page=page_summary, + total=page_summary['alltotal'], + data=[obj for obj in queryset]) diff --git a/dongtai_web/views/vul_status.py b/dongtai_web/views/vul_status.py new file mode 100644 index 000000000..47a31db62 --- /dev/null +++ b/dongtai_web/views/vul_status.py @@ -0,0 +1,101 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# project: dongtai-webapi + +# status +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.models.vulnerablity import IastVulnerabilityStatus + +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +import logging +from dongtai_web.vul_log.vul_log import log_change_status + +logger = logging.getLogger('dongtai-webapi') + +_ResponseSerializer = get_response_serializer(status_msg_keypair=( + ((201, _('Vulnerability status is modified to {}')), ''), + ((202, _('Incorrect parameter')), ''), +)) + +class VulStatus(UserEndPoint): + name = "api-v1-vuln-status" + description = _("Modify the vulnerability status") + + @extend_schema_with_envcheck( + [], + [ + { + 'name': _("Update with status_id"), + "description": + _("Update vulnerability status with status id."), + 'value': { + 'id': 1, + 'status_id': 1 + }, + }, + { + 'name': _("Update with status name(Not recommended)"), + "description": + _("Update vulnerability status with status name."), + 'value': { + 'id': 1, + 'status': "str" + }, + }, + ], + [{ + 'name': + _('Get data sample'), + 'description': + _("The aggregation results are programming language, risk level, vulnerability type, project" + ), + 'value': { + "status": 201, + "msg": "Vulnerability status is modified to Confirmed" + } + }], + tags=[_('Vulnerability')], + summary=_("Vulnerability Status Modify"), + description=_("""Modify the vulnerability status of the specified id. + The status is specified by the following two parameters. + Status corresponds to the status noun and status_id corresponds to the status id. + Both can be obtained from the vulnerability status list API, and status_id is preferred.""" + ), + response_schema=_ResponseSerializer, + ) + def post(self, request): + vul_id = request.data.get('vul_id') + vul_ids = request.data.get('vul_ids') + status_id = request.data.get('status_id') + user = request.user + user_id = user.id + # 超级管理员 + if not (isinstance(vul_id, int) or isinstance(vul_ids, list)): + return R.failure() + if not vul_ids: + vul_ids = [vul_id] + if user.is_system_admin(): + queryset = IastVulnerabilityModel.objects.filter(is_del=0) + # 租户管理员 or 部门管理员 + elif user.is_talent_admin() or user.is_department_admin: + users = self.get_auth_users(user) + user_ids = list(users.values_list('id', flat=True)) + queryset = IastVulnerabilityModel.objects.filter( + is_del=0, agent__user_id__in=user_ids) + else: + # 普通用户 + queryset = IastVulnerabilityModel.objects.filter( + is_del=0, agent__user_id=user_id) + vul_status = IastVulnerabilityStatus.objects.filter( + pk=status_id).first() + if vul_status: + queryset.filter(id__in=vul_ids).update(status_id=status_id) + ids = list( + queryset.filter(id__in=vul_ids).values_list('id', flat=True)) + log_change_status(user_id, request.user.username, ids, + vul_status.name) + return R.success() diff --git a/dongtai_web/views/vul_summary.py b/dongtai_web/views/vul_summary.py new file mode 100644 index 000000000..a6718b7ed --- /dev/null +++ b/dongtai_web/views/vul_summary.py @@ -0,0 +1,274 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +from django.db.models import Count +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.models.vul_level import IastVulLevel +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.models.strategy import IastStrategyModel + +from dongtai_web.base.agent import get_project_vul_count,get_hook_type_name, get_agent_languages +from dongtai_web.base.project_version import get_project_version, get_project_version_by_id +from django.utils.translation import gettext_lazy as _ +from dongtai_common.models.hook_type import HookType +from django.db.models import Q +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from django.utils.text import format_lazy + +from dongtai_web.serializers.vul import VulSummaryResponseDataSerializer + +import copy,time + + +_ResponseSerializer = get_response_serializer(VulSummaryResponseDataSerializer()) + + +class VulSummary(UserEndPoint): + name = "rest-api-vulnerability-summary" + description = _("Applied vulnerability overview") + + + @extend_schema_with_envcheck( + [ + { + 'name': "language", + 'type': str, + 'description': _("programming language") + }, + { + 'name': "type", + 'type': str, + 'description': _('Type of vulnerability'), + }, + { + 'name': "project_name", + 'type': str, + 'deprecated': True, + 'description': _('Name of Project'), + }, + { + 'name': + "level", + 'type': + int, + 'description': + format_lazy("{} : {}", _('Level of vulnerability'), "1,2,3,4") + }, + { + 'name': "project_id", + 'type': int, + 'description': _('Id of Project'), + }, + { + 'name': + "version_id", + 'type': + int, + 'description': + _("The default is the current version id of the project.") + }, + { + 'name': "status", + 'type': str, + 'deprecated': True, + 'description': _('Name of status'), + }, + { + 'name': "status_id", + 'type': int, + 'description': _('Id of status'), + }, + { + 'name': "url", + 'type': str, + 'description': _('The URL corresponding to the vulnerability'), + }, + { + 'name': + "order", + 'type': + str, + 'description': + format_lazy( + "{} : {}", _('Sorted index'), ",".join( + ['type', 'type', 'first_time', 'latest_time', 'url'])) + }, + ], + [], + [{ + 'name': + _('Get data sample'), + 'description': + _("The aggregation results are programming language, risk level, vulnerability type, project" + ), + 'value': { + "status": 201, + "msg": "success", + "data": { + "language": [{ + "language": "JAVA", + "count": 136 + }, { + "language": "PYTHON", + "count": 0 + }], + "level": [{ + "level": "HIGH", + "count": 116, + "level_id": 1 + }, { + "level": "INFO", + "count": 0, + "level_id": 4 + }], + "type": [{ + "type": "Path Traversal", + "count": 79 + }, { + "type": "Arbitrary Server Side Forwards", + "count": 1 + }], + "projects": [{ + "project_name": "demo1", + "count": 23, + "id": 58 + }, { + "project_name": "demo4", + "count": 2, + "id": 69 + }, { + "project_name": "demo5", + "count": 1, + "id": 71 + }] + }, + "level_data": [] + } + }], + tags=[_('Vulnerability')], + summary=_('Vulnerability Summary'), + description= + _('Use the following conditions to view the statistics of the number of vulnerabilities in the project.' + ), + response_schema=_ResponseSerializer + ) + def get(self, request): + """ + :param request: + :return: + """ + + end = { + "status": 201, + "msg": "success", + "level_data": [], + "data": {} + } + + auth_users = self.get_auth_users(request.user) + auth_agents = self.get_auth_agents(auth_users) + queryset = IastVulnerabilityModel.objects.filter() + + language = request.query_params.get('language') + if language: + auth_agents = auth_agents.filter(language=language) + + project_id = request.query_params.get('project_id') + if project_id and project_id != '': + + version_id = request.GET.get('version_id', None) + if not version_id: + current_project_version = get_project_version( + project_id, auth_users) + else: + current_project_version = get_project_version_by_id(version_id) + auth_agents = auth_agents.filter( + bind_project_id=project_id, + project_version_id=current_project_version.get("version_id", 0) + ) + + queryset = queryset.filter(agent__in=auth_agents) + + status = request.query_params.get('status') + if status: + queryset = queryset.filter(status__name=status) + status_id = request.query_params.get('status_id') + if status_id: + queryset = queryset.filter(status_id=status_id) + + level = request.query_params.get('level') + if level: + try: + level = int(level) + except BaseException: + return R.failure(_("Parameter error")) + queryset = queryset.filter(level=level) + + vul_type = request.query_params.get('type') + if vul_type: + hook_types = HookType.objects.filter(name=vul_type).all() + strategys = IastStrategyModel.objects.filter(vul_name=vul_type).all() + q = Q(hook_type__in=hook_types) | Q(strategy__in=strategys) + queryset = queryset.filter(q) + + url = request.query_params.get('url') + if url and url != '': + queryset = queryset.filter(url__icontains=url) + + q = Q() + queryset = queryset.filter(q) + + agent_count = queryset.values('agent_id').annotate(count=Count('agent_id')) + end['data']['language'] = get_agent_languages(agent_count) + end['data']['projects'] = get_project_vul_count( + users=auth_users, + queryset=agent_count, + auth_agents=auth_agents, + project_id=project_id) + + # 汇总 level + vul_level = IastVulLevel.objects.all() + vul_level_metadata = {} + levelIdArr = {} + DEFAULT_LEVEL = {} + if vul_level: + for level_item in vul_level: + DEFAULT_LEVEL[level_item.name_value] = 0 + vul_level_metadata[level_item.name_value] = level_item.id + levelIdArr[level_item.id] = level_item.name_value + level_summary = queryset.values('level').order_by('level').annotate(total=Count('level')) + for temp in level_summary: + DEFAULT_LEVEL[levelIdArr[temp['level']]] = temp['total'] + end['data']['level'] = [{ + 'level': _key, 'count': _value, 'level_id': vul_level_metadata[_key] + } for _key, _value in DEFAULT_LEVEL.items()] + + # 汇总 type + type_summary = queryset.values( + 'hook_type_id', 'strategy_id', 'hook_type__name', + 'strategy__vul_name').order_by('hook_type_id').annotate( + total=Count('hook_type_id')) + type_summary = list(type_summary) + + vul_type_list = [{ + "type": get_hook_type_name(_).lower().strip(), + "count": _['total'] + } for _ in type_summary] + + tempdic = {} + for vul_type in vul_type_list: + if tempdic.get(vul_type['type'], None): + tempdic[vul_type['type']]['count'] += vul_type['count'] + else: + tempdic[vul_type['type']] = vul_type + vul_type_list = tempdic.values() + end['data']['type'] = sorted(vul_type_list, key=lambda x: x['count'], reverse=True) + return R.success(data=end['data'], level_data=end['level_data']) + + + + + diff --git a/dongtai_web/views/vul_summary_project.py b/dongtai_web/views/vul_summary_project.py new file mode 100644 index 000000000..80554c277 --- /dev/null +++ b/dongtai_web/views/vul_summary_project.py @@ -0,0 +1,209 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +from django.db.models import Count +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.models.strategy import IastStrategyModel + +from dongtai_web.base.agent import get_project_vul_count,get_hook_type_name,get_agent_languages +from dongtai_web.base.project_version import get_project_version, get_project_version_by_id +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from dongtai_web.serializers.vul import VulSummaryResponseDataSerializer +from django.utils.text import format_lazy + +from dongtai_common.models.hook_type import HookType +from django.db.models import Q +_ResponseSerializer = get_response_serializer(VulSummaryResponseDataSerializer()) + + + +class VulSummaryProject(UserEndPoint): + name = "rest-api-vulnerability-summary-project" + description = _("Applied vulnerability overview") + + @extend_schema_with_envcheck( + [ + { + 'name': "language", + 'type': str, + 'description': _("programming language") + }, + { + 'name': "type", + 'type': str, + 'description': _('Type of vulnerability'), + }, + { + 'name': "project_name", + 'type': str, + 'deprecated': True, + 'description': _('Name of Project'), + }, + { + 'name': + "level", + 'type': + int, + 'description': + format_lazy("{} : {}", _('Level of vulnerability'), "1,2,3,4") + }, + { + 'name': "project_id", + 'type': int, + 'description': _('Id of Project'), + }, + { + 'name': + "version_id", + 'type': + int, + 'description': + _("The default is the current version id of the project.") + }, + { + 'name': "status", + 'type': str, + 'deprecated': True, + 'description': _('Name of status'), + }, + { + 'name': "status_id", + 'type': int, + 'description': _('Id of status'), + }, + { + 'name': "url", + 'type': str, + 'description': _('The URL corresponding to the vulnerability'), + }, + { + 'name': + "order", + 'type': + str, + 'description': + format_lazy( + "{} : {}", _('Sorted index'), ",".join( + ['type', 'type', 'first_time', 'latest_time', 'url'])) + }, + ], + [], + [{ + 'name': + _('Get data sample'), + 'description': + _("The aggregation results are programming language, risk level, vulnerability type, project" + ), + 'value': { + "status": 201, + "msg": "success", + "data": { + "language": [{ + "language": "JAVA", + "count": 136 + }, { + "language": "PYTHON", + "count": 0 + }], + "projects": [{ + "project_name": "demo1", + "count": 23, + "id": 58 + },{ + "project_name": "demo5", + "count": 1, + "id": 71 + }] + }, + "level_data": [] + } + }], + tags=[_('Vulnerability')], + summary=_('Vulnerability Summary'), + description= + _('Use the following conditions to view the statistics of the number of vulnerabilities in the project.' + ), + response_schema=_ResponseSerializer + ) + def get(self, request): + """ + :param request: + :return: + """ + + end = { + "status": 201, + "msg": "success", + "level_data": [], + "data": {} + } + + auth_users = self.get_auth_users(request.user) + auth_agents = self.get_auth_agents(auth_users) + queryset = IastVulnerabilityModel.objects.filter() + + language = request.query_params.get('language') + if language: + auth_agents = auth_agents.filter(language=language) + + project_id = request.query_params.get('project_id') + if project_id and project_id != '': + + version_id = request.GET.get('version_id', None) + if not version_id: + current_project_version = get_project_version( + project_id, auth_users) + else: + current_project_version = get_project_version_by_id(version_id) + auth_agents = auth_agents.filter( + bind_project_id=project_id, + project_version_id=current_project_version.get("version_id", 0) + ) + + queryset = queryset.filter(agent__in=auth_agents) + + status = request.query_params.get('status') + if status: + queryset = queryset.filter(status__name=status) + status_id = request.query_params.get('status_id') + if status_id: + queryset = queryset.filter(status_id=status_id) + + level = request.query_params.get('level') + if level: + try: + level = int(level) + except BaseException: + return R.failure(_("Parameter error")) + queryset = queryset.filter(level=level) + + vul_type = request.query_params.get('type') + if vul_type: + hook_types = HookType.objects.filter(name=vul_type).all() + strategys = IastStrategyModel.objects.filter(vul_name=vul_type).all() + q = Q(hook_type__in=hook_types) | Q(strategy__in=strategys) + queryset = queryset.filter(q) + + url = request.query_params.get('url') + if url and url != '': + queryset = queryset.filter(url__icontains=url) + + q = Q() + queryset = queryset.filter(q) + + agent_count = queryset.values('agent_id').annotate(count=Count('agent_id')) + end['data']['language'] = get_agent_languages(agent_count) + end['data']['projects'] = get_project_vul_count( + users=auth_users, + queryset=agent_count, + auth_agents=auth_agents, + project_id=project_id) + + return R.success(data=end['data'], level_data=end['level_data']) + + diff --git a/dongtai_web/views/vul_summary_type.py b/dongtai_web/views/vul_summary_type.py new file mode 100644 index 000000000..e70025886 --- /dev/null +++ b/dongtai_web/views/vul_summary_type.py @@ -0,0 +1,241 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi +from django.db.models import Count +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.models.vul_level import IastVulLevel +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.models.strategy import IastStrategyModel + +from dongtai_web.base.agent import get_project_vul_count,get_hook_type_name +from dongtai_web.base.project_version import get_project_version, get_project_version_by_id +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from dongtai_web.serializers.vul import VulSummaryResponseDataSerializer +from django.utils.text import format_lazy +from dongtai_common.models.hook_type import HookType +from django.db.models import Q +_ResponseSerializer = get_response_serializer(VulSummaryResponseDataSerializer()) + + +class VulSummaryType(UserEndPoint): + name = "rest-api-vulnerability-summary-type" + description = _("Applied vulnerability overview") + + @extend_schema_with_envcheck( + [ + { + 'name': "language", + 'type': str, + 'description': _("programming language") + }, + { + 'name': "type", + 'type': str, + 'description': _('Type of vulnerability'), + }, + { + 'name': "project_name", + 'type': str, + 'deprecated': True, + 'description': _('Name of Project'), + }, + { + 'name': + "level", + 'type': + int, + 'description': + format_lazy("{} : {}", _('Level of vulnerability'), "1,2,3,4") + }, + { + 'name': "project_id", + 'type': int, + 'description': _('Id of Project'), + }, + { + 'name': + "version_id", + 'type': + int, + 'description': + _("The default is the current version id of the project.") + }, + { + 'name': "status", + 'type': str, + 'deprecated': True, + 'description': _('Name of status'), + }, + { + 'name': "status_id", + 'type': int, + 'description': _('Id of status'), + }, + { + 'name': "url", + 'type': str, + 'description': _('The URL corresponding to the vulnerability'), + }, + { + 'name': + "order", + 'type': + str, + 'description': + format_lazy( + "{} : {}", _('Sorted index'), ",".join( + ['type', 'type', 'first_time', 'latest_time', 'url'])) + }, + ], + [], + [{ + 'name': + _('Get data sample'), + 'description': + _("The aggregation results are programming language, risk level, vulnerability type, project" + ), + 'value': { + "status": 201, + "msg": "success", + "data": { + "level": [{ + "level": "HIGH", + "count": 116, + "level_id": 1 + }, { + "level": "INFO", + "count": 0, + "level_id": 4 + }], + "type": [{ + "type": "Path Traversal", + "count": 79 + }, { + "type": "Insecure Hash Algorithms", + "count": 1 + }, { + "type": "Arbitrary Server Side Forwards", + "count": 1 + }] + }, + "level_data": [] + } + }], + tags=[_('Vulnerability')], + summary=_('Vulnerability Summary'), + description= + _('Use the following conditions to view the statistics of the number of vulnerabilities in the project.' + ), + response_schema=_ResponseSerializer + ) + def get(self, request): + """ + :param request: + :return: + """ + + end = { + "status": 201, + "msg": "success", + "level_data": [], + "data": {} + } + + auth_users = self.get_auth_users(request.user) + auth_agents = self.get_auth_agents(auth_users) + queryset = IastVulnerabilityModel.objects.filter() + + language = request.query_params.get('language') + if language: + auth_agents = auth_agents.filter(language=language) + + project_id = request.query_params.get('project_id') + if project_id and project_id != '': + + version_id = request.GET.get('version_id', None) + if not version_id: + current_project_version = get_project_version( + project_id, auth_users) + else: + current_project_version = get_project_version_by_id(version_id) + auth_agents = auth_agents.filter( + bind_project_id=project_id, + project_version_id=current_project_version.get("version_id", 0) + ) + + queryset = queryset.filter(agent__in=auth_agents) + + status = request.query_params.get('status') + if status: + queryset = queryset.filter(status__name=status) + status_id = request.query_params.get('status_id') + if status_id: + queryset = queryset.filter(status_id=status_id) + + level = request.query_params.get('level') + if level: + try: + level = int(level) + except BaseException: + return R.failure(_("Parameter error")) + queryset = queryset.filter(level=level) + + vul_type = request.query_params.get('type') + if vul_type: + hook_types = HookType.objects.filter(name=vul_type).all() + strategys = IastStrategyModel.objects.filter(vul_name=vul_type).all() + q = Q(hook_type__in=hook_types) | Q(strategy__in=strategys) + queryset = queryset.filter(q) + + url = request.query_params.get('url') + if url and url != '': + queryset = queryset.filter(url__icontains=url) + + q = Q() + queryset = queryset.filter(q) + + # 汇总 level + vul_level = IastVulLevel.objects.all() + vul_level_metadata = {} + levelIdArr = {} + DEFAULT_LEVEL = {} + if vul_level: + for level_item in vul_level: + DEFAULT_LEVEL[level_item.name_value] = 0 + vul_level_metadata[level_item.name_value] = level_item.id + levelIdArr[level_item.id] = level_item.name_value + level_summary = queryset.values('level').order_by('level').annotate(total=Count('level')) + for temp in level_summary: + DEFAULT_LEVEL[levelIdArr[temp['level']]] = temp['total'] + end['data']['level'] = [{ + 'level': _key, 'count': _value, 'level_id': vul_level_metadata[_key] + } for _key, _value in DEFAULT_LEVEL.items()] + + # 汇总 type + type_summary = queryset.values( + 'hook_type_id', 'strategy_id', 'hook_type__name', + 'strategy__vul_name').order_by('hook_type_id').annotate( + total=Count('hook_type_id')) + type_summary = list(type_summary) + + vul_type_list = [{ + "type": get_hook_type_name(_).lower().strip(), + "count": _['total'] + } for _ in type_summary] + + tempdic = {} + for vul_type in vul_type_list: + if tempdic.get(vul_type['type'], None): + tempdic[vul_type['type']]['count'] += vul_type['count'] + else: + tempdic[vul_type['type']] = vul_type + vul_type_list = tempdic.values() + end['data']['type'] = sorted(vul_type_list, key=lambda x: x['count'], reverse=True) + + return R.success(data=end['data'], level_data=end['level_data']) + + diff --git a/dongtai_web/views/vulnerability_status.py b/dongtai_web/views/vulnerability_status.py new file mode 100644 index 000000000..6eb62b40a --- /dev/null +++ b/dongtai_web/views/vulnerability_status.py @@ -0,0 +1,88 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : vulnerability_status +# @created : Friday Aug 27, 2021 15:34:34 CST +# +# @description : +###################################################################### + + + +from dongtai_common.models.vulnerablity import IastVulnerabilityStatus +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from django.forms.models import model_to_dict +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from django.utils.translation import gettext_lazy as _ +from rest_framework import serializers + + +class VulnerabilityStatusSerializer(serializers.Serializer): + id = serializers.IntegerField() + name = serializers.CharField() + name_en = serializers.CharField() + name_zh = serializers.CharField() + + class Meta: + model = IastVulnerabilityStatus + fields = ['id', 'name', 'name_en', 'name_zh'] + + +_ResponseSerializer = get_response_serializer( + VulnerabilityStatusSerializer(many=True)) + + +class VulnerabilityStatusView(UserEndPoint): + @extend_schema_with_envcheck( + [], + [], + [{ + 'name': + _('Get data sample'), + 'description': + _("The aggregation results are programming language, risk level, vulnerability type, project" + ), + 'value': { + "status": + 201, + "msg": + "success", + "data": [{ + "id": 1, + "name": "Pending", + "name_en": "Pending", + "name_zh": "\u5f85\u9a8c\u8bc1" + }, { + "id": 2, + "name": "Verifying", + "name_en": "Verifying", + "name_zh": "\u9a8c\u8bc1\u4e2d" + }, { + "id": 3, + "name": "Confirmed", + "name_en": "Confirmed", + "name_zh": "\u5df2\u786e\u8ba4" + }, { + "id": 4, + "name": "Ignore", + "name_en": "Ignore", + "name_zh": "\u5df2\u5ffd\u7565" + }, { + "id": 5, + "name": "Solved", + "name_en": "Solved", + "name_zh": "\u5df2\u5904\u7406" + }] + } + }], + tags=[_('Vulnerability')], + summary=_("Vulnerability Status List"), + description= + _("""Vulnerability status list, which contains the optional status of vulnerabilities. + When calling the vulnerability status modification API, please obtain the vulnerability status data from this API first.""" + ), + response_schema=_ResponseSerializer + ) + def get(self, request): + status = IastVulnerabilityStatus.objects.all() + return R.success(data=[model_to_dict(_) for _ in status]) diff --git a/dongtai_web/views/vuls.py b/dongtai_web/views/vuls.py new file mode 100644 index 000000000..a4e59a587 --- /dev/null +++ b/dongtai_web/views/vuls.py @@ -0,0 +1,315 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author:owefsad +# software: PyCharm +# project: lingzhi-webapi + +from dongtai_common.endpoint import R +from dongtai_common.endpoint import UserEndPoint +from dongtai_common.models.vul_level import IastVulLevel +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.models.strategy import IastStrategyModel + +from dongtai_web.base.agent import get_agents_with_project, get_user_project_name, \ + get_user_agent_pro, get_all_server +from dongtai_web.base.project_version import get_project_version, get_project_version_by_id +from dongtai_web.serializers.vul import VulSerializer +from django.utils.translation import gettext_lazy as _ +from dongtai_common.models.hook_type import HookType +from django.db.models import Q +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer + +from django.utils.text import format_lazy + +from dongtai_web.utils import get_model_order_options +from rest_framework import serializers + +class _VulsEndPointResponseSerializer(VulSerializer): + index = serializers.IntegerField() + project_name = serializers.CharField( + help_text=_('name of project'), + default=_("The application has not been binded")) + project_id = serializers.IntegerField(help_text=_('Id of Project'), + default=0) + server_name = serializers.CharField(default="JavaApplication") + server_type = serializers.CharField() + level_type = serializers.IntegerField() + level = serializers.CharField() + + class Meta: + model = VulSerializer.Meta.model + fields = VulSerializer.Meta.fields + [ + 'index', 'project_name', 'project_id', 'server_name', + 'server_type', 'level_type', 'level' + ] + + +_ResponseSerializer = get_response_serializer( + _VulsEndPointResponseSerializer(many=True)) + + +class VulsEndPoint(UserEndPoint): + + @extend_schema_with_envcheck( + [ + { + 'name': "page", + 'type': int, + 'default': 1, + 'required': False, + 'description': _('Page index'), + }, + { + 'name': "pageSize", + 'type': int, + 'default': 20, + 'required': False, + 'description': _('Number per page'), + }, + { + 'name': "language", + 'type': str, + 'description': _("programming language") + }, + { + 'name': "type", + 'type': str, + 'description': _('Type of vulnerability'), + }, + { + 'name': "project_name", + 'type': str, + 'deprecated': True, + 'description': _('name of project'), + }, + { + 'name': + "level", + 'type': + int, + 'deprecated': + True, + 'description': + format_lazy("{} : {}", _('Level of vulnerability'), "1,2,3,4") + }, + { + 'name': + "level", + 'type': + int, + 'description': + format_lazy("{} : {}", _('The id Level of vulnerability'), + "1,2,3,4") + }, + { + 'name': "project_id", + 'type': int, + 'description': _('Id of Project'), + }, + { + 'name': + "version_id", + 'type': + int, + 'description': + _("The default is the current version id of the project.") + }, + { + 'name': "status", + 'type': str, + 'deprecated': True, + 'description': _('Name of status'), + }, + { + 'name': "status_id", + 'type': int, + 'description': _('Id of status'), + }, + { + 'name': "url", + 'type': str, + 'description': _('The URL corresponding to the vulnerability'), + }, + { + 'name': + "order", + 'type': + str, + 'description': + format_lazy( + "{} : {}", _('Sorted index'), ",".join( + ['type', 'level', 'first_time', 'latest_time', 'url'])) + }, + ], + [], + [{ + 'name': + _('Get data sample'), + 'description': + _("The aggregation results are programming language, risk level, vulnerability type, project" + ), + 'value': { + "status": + 201, + "msg": + "success", + "data": [{ + "id": 12024, + "type": "Weak Random Number Generation", + "hook_type_id": 45, + "url": "http://localhost:81/captcha/captchaImage", + "uri": "/captcha/captchaImage", + "agent_id": 820, + "level_id": 3, + "http_method": "GET", + "top_stack": None, + "bottom_stack": None, + "taint_position": None, + "latest_time": 1631092302, + "first_time": 1631092263, + "language": "JAVA", + "status": "Confirmed", + "index": 0, + "project_name": "demo", + "project_id": 71, + "server_name": "Apache Tomcat/9.0.41", + "server_type": "apache tomcat", + "level_type": 3, + "level": "LOW" + }], + "page": { + "alltotal": 1, + "num_pages": 1, + "page_size": 20 + } + } + }], + tags=[_('Vulnerability')], + summary=_("Vulnerability List (with project)"), + response_schema=_ResponseSerializer, + description=_( + "Get the list of vulnerabilities corresponding to the project"), + ) + def get(self, request): + """ + :param request: + :return: + """ + end = { + "status": 201, + "msg": "success", + "data": [] + } + auth_users = self.get_auth_users(request.user) + auth_agents = self.get_auth_agents(auth_users) + if auth_agents is None: + return R.success(page={}, data=[], msg=_('No data')) + + language = request.query_params.get('language') + if language: + auth_agents = auth_agents.filter(language=language) + + queryset = IastVulnerabilityModel.objects.values( + 'id', 'hook_type_id', 'url', 'uri', 'agent_id', 'level_id', + 'http_method', 'top_stack', 'bottom_stack', 'taint_position', + 'latest_time', 'first_time','strategy_id', + 'status_id').filter(agent__in=auth_agents) + + level = request.query_params.get('level') + if level: + try: + level = int(level) + except BaseException: + return R.failure(_("Parameter error")) + queryset = queryset.filter(level=level) + + type_ = request.query_params.get('type') + type_id = request.query_params.get('hook_type_id') + if type_id: + hook_type = HookType.objects.filter(pk=type_id).first() + hook_type_id = hook_type.id if hook_type else 0 + queryset = queryset.filter(hook_type_id=hook_type_id) + elif type_: + hook_types = HookType.objects.filter(name=type_).all() + strategys = IastStrategyModel.objects.filter(vul_name=type_).all() + q = Q(hook_type__in=hook_types,strategy_id=0) | Q(strategy__in=strategys) + queryset = queryset.filter(q) + + project_name = request.query_params.get('project_name') + if project_name: + agent_ids = get_agents_with_project(project_name, auth_users) + queryset = queryset.filter(agent_id__in=agent_ids) + + project_id = request.query_params.get('project_id') + if project_id: + + version_id = request.GET.get('version_id', None) + if not version_id: + current_project_version = get_project_version( + project_id, auth_users) + else: + current_project_version = get_project_version_by_id(version_id) + agents = auth_agents.filter( + bind_project_id=project_id, + project_version_id=current_project_version.get( + "version_id", 0)) + queryset = queryset.filter(agent_id__in=agents) + + agent_id = request.query_params.get('agent_id') + if agent_id: + queryset = queryset.filter(agent_id=agent_id) + + url = request.query_params.get('url', None) + if url and url != '': + queryset = queryset.filter(url__icontains=url) + status = request.query_params.get('status') + if status: + queryset = queryset.filter(status__name=status) + + status_id = request.query_params.get('status_id') + if status_id: + queryset = queryset.filter(status_id=status_id) + order = request.query_params.get('order') + if order and order in get_model_order_options( + IastVulnerabilityModel) + ['type', '-type']: + if order == 'type': + order = 'hook_type_id' + if order == '-type': + order = '-hook_type_id' + queryset = queryset.order_by(order) + else: + queryset = queryset.order_by('-latest_time') + + projects_info = get_user_project_name(auth_users) + agentArr = get_user_agent_pro(auth_users, projects_info.keys()) + agentPro = agentArr['pidArr'] + agentServer = agentArr['serverArr'] + server_ids = agentArr['server_ids'] + allServer = get_all_server(server_ids) + allType = IastVulLevel.objects.all().order_by("id") + allTypeArr = {} + if allType: + for item in allType: + allTypeArr[item.id] = item.name_value + page = request.query_params.get('page', 1) + page_size = request.query_params.get("pageSize", 20) + page_summary, page_data = self.get_paginator(queryset, page, page_size) + datas = VulSerializer(page_data, many=True).data + pro_length = len(datas) + if pro_length > 0: + for index in range(pro_length): + item = datas[index] + item['index'] = index + item['project_name'] = projects_info.get( + agentPro.get(item['agent_id'], 0), + _("The application has not been binded")) + item['project_id'] = agentPro.get(item['agent_id'], 0) + item['server_name'] = allServer.get( + agentServer.get(item['agent_id'], 0), "JavaApplication") + item['server_type'] = VulSerializer.split_container_name( + item['server_name']) + item['level_type'] = item['level_id'] + item['level'] = allTypeArr.get(item['level_id'], "") + end['data'].append(item) + end['page'] = page_summary + return R.success(page=page_summary, data=end['data']) diff --git a/dongtai_web/vul_log/__init__.py b/dongtai_web/vul_log/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/dongtai_web/vul_log/test.py b/dongtai_web/vul_log/test.py new file mode 100644 index 000000000..edd28d46e --- /dev/null +++ b/dongtai_web/vul_log/test.py @@ -0,0 +1,25 @@ +from django.test import TestCase +from dongtai_web.vul_log.vul_log import (log_change_status, log_recheck_vul, + log_push_to_integration, log_vul_found) + + +class CreateLogTestCase(TestCase): + + def test_query_agent(self): + log_change_status(user_id=1, + user_name="hello", + vul_id=[1], + vul_status="已确认") + log_recheck_vul(user_id=1, + user_name="hello", + vul_id=[1], + vul_status="已确认") + log_push_to_integration(user_id=1, + user_name="hello", + vul_id=1, + integration_name="JIRA",source_vul_type=1) + log_vul_found(user_id=1, + project_name="helloproject", + project_id=1, + vul_id=1, + vul_name="已确认") diff --git a/dongtai_web/vul_log/vul_log.py b/dongtai_web/vul_log/vul_log.py new file mode 100644 index 000000000..227391fdd --- /dev/null +++ b/dongtai_web/vul_log/vul_log.py @@ -0,0 +1,81 @@ +from dongtai_common.models.iast_vul_log import (IastVulLog, MessageTypeChoices) + + +def log_change_status(user_id: int, user_name: str, vul_id: list, + vul_status: str): + kwargs = locals() + msg = f"id为{user_id}的用户{user_name}修改漏洞状态为{vul_status}" + IastVulLog.objects.bulk_create([ + IastVulLog( + msg_type=MessageTypeChoices.CHANGE_STATUS, + msg=msg, + meta_data=kwargs, + vul_id=v_id, + user_id=user_id, + ) + for v_id in vul_id + ]) + + +def log_recheck_vul(user_id: int, user_name: str, vul_id: list, + vul_status: str): + kwargs = locals() + msg = f"自动验证修改漏洞状态为{vul_status}" + IastVulLog.objects.bulk_create([ + IastVulLog( + msg_type=MessageTypeChoices.VUL_RECHECK, + msg=msg, + meta_data=kwargs, + vul_id=v_id, + user_id=user_id, + ) + for v_id in vul_id + ]) + + +def log_push_to_integration(user_id: int, user_name: str, vul_id: int, + integration_name: str,source_vul_type:int): + kwargs = locals() + msg = f"id为{user_id}的用户{user_name}推送漏洞到{integration_name}" + if source_vul_type ==1: + IastVulLog.objects.create( + msg_type=MessageTypeChoices.PUSH_TO_INTEGRATION, + msg=msg, + meta_data=kwargs, + vul_id=vul_id, + user_id=user_id, + ) + else: + IastVulLog.objects.create( + msg_type=MessageTypeChoices.PUSH_TO_INTEGRATION, + msg=msg, + meta_data=kwargs, + asset_vul_id=vul_id, + user_id=user_id, + ) + + +def log_vul_found(user_id: int, project_name: str, project_id: int, + vul_id: int, vul_name: str): + kwargs = locals() + msg = f"id为{project_id}的项目{project_name}检测到漏洞{vul_name}" + IastVulLog.objects.create( + msg_type=MessageTypeChoices.VUL_FOUND, + msg=msg, + meta_data=kwargs, + vul_id=vul_id, + user_id=user_id, + ) + + +def log_asset_vul_found(user_id: int, project_name: str, project_id: int, + asset_vul_id: int, vul_name: str): + kwargs = locals() + msg = f"id为{project_id}的项目{project_name}检测到漏洞{vul_name}" + IastVulLog.objects.create( + msg_type=MessageTypeChoices.VUL_FOUND, + msg=msg, + meta_data=kwargs, + asset_vul_id=asset_vul_id, + user_id=user_id, + ) diff --git a/dongtai_web/vul_log/vul_log_view.py b/dongtai_web/vul_log/vul_log_view.py new file mode 100644 index 000000000..27031933a --- /dev/null +++ b/dongtai_web/vul_log/vul_log_view.py @@ -0,0 +1,34 @@ +from dongtai_common.models.iast_vul_log import (IastVulLog, MessageTypeChoices) +from dongtai_common.utils import const +from dongtai_common.models.hook_type import HookType +from dongtai_common.models.strategy import IastStrategyModel +from dongtai_common.models.document import IastDocument + +from dongtai_common.endpoint import R +from dongtai_common.utils import const +from dongtai_common.endpoint import UserEndPoint +from django.forms.models import model_to_dict +from django.db.models import Q +from rest_framework import serializers +from rest_framework.serializers import ValidationError +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from django.utils.translation import gettext_lazy as _ +from rest_framework import viewsets +from dongtai_web.common import VulType + +class VulLogViewSet(UserEndPoint, viewsets.ViewSet): + name = "api-v1-vul-log" + description = _("vul-log") + + def list(self, request, vul_id): + data = [] + auth_users = self.get_auth_users(request.user) + vul_type = VulType(int(request.query_params.get('vul_type', 1))) + if vul_type == VulType.APPLICATION: + data = IastVulLog.objects.filter(vul_id=vul_id, + user__in=auth_users).all() + if vul_type == VulType.ASSET: + data = IastVulLog.objects.filter(asset_vul_id=vul_id, + user__in=auth_users).all() + + return R.success([model_to_dict(i) for i in data]) diff --git a/dongtai_web/vul_recheck_payload/vul_recheck_payload.py b/dongtai_web/vul_recheck_payload/vul_recheck_payload.py new file mode 100644 index 000000000..a441b86cd --- /dev/null +++ b/dongtai_web/vul_recheck_payload/vul_recheck_payload.py @@ -0,0 +1,147 @@ +import time + +from dongtai_common.endpoint import UserEndPoint, R, TalentAdminEndPoint +from dongtai_common.models.agent_config import IastAgentConfig +from django.utils.translation import gettext_lazy as _ +from dongtai_web.utils import extend_schema_with_envcheck, get_response_serializer +from dongtai_web.serializers.agent_config import AgentConfigSettingSerializer +from rest_framework.serializers import ValidationError +from rest_framework import viewsets +from dongtai_common.models.vul_recheck_payload import IastVulRecheckPayload +from rest_framework import serializers +from django.db.models import Q +from django.forms.models import model_to_dict + + +def get_or_none(classmodel, function, **kwargs): + try: + if function: + return function(classmodel.objects).get(**kwargs) + return classmodel.objects.get(**kwargs) + except classmodel.DoesNotExist: + return None + + +class IastVulRecheckPayloadSerializer(serializers.Serializer): + value = serializers.CharField() + status = serializers.IntegerField() + strategy_id = serializers.IntegerField() + language_id = serializers.IntegerField() + + +class IastVulRecheckPayloadListSerializer(serializers.Serializer): + keyword = serializers.CharField(required=False, default=None) + page = serializers.IntegerField() + page_size = serializers.IntegerField() + strategy_id = serializers.IntegerField(required=False, default=None) + language_id = serializers.IntegerField(required=False, default=None) + + +def vul_recheck_payload_create(data, user_id): + IastVulRecheckPayload.objects.create(strategy_id=data['strategy_id'], + user_id=user_id, + value=data['value'], + status=data['status'], + language_id=data['language_id']) + + +class AgentConfigSettingBatchV2Serializer(serializers.Serializer): + ids = serializers.ListField(child=serializers.IntegerField()) + + +def payload_update(pk, data): + IastVulRecheckPayload.objects.filter(pk=pk).update(**data) + + +class VulReCheckPayloadViewSet(UserEndPoint, viewsets.ViewSet): + name = "api-v1-vul-recheck-payload" + description = _("config recheck payload V2") + + def create(self, request): + ser = IastVulRecheckPayloadSerializer(data=request.data) + try: + if ser.is_valid(True): + pass + except ValidationError as e: + return R.failure(data=e.detail) + vul_recheck_payload_create(ser.data, request.user.id) + return R.success() + + def retrieve(self, request, pk): + obj = get_or_none( + IastVulRecheckPayload, + lambda x: x.values('id', 'user__username', 'strategy__vul_name', + 'value', 'user_id', 'strategy_id', 'status', + 'create_time', 'language_id'), + pk=pk, user_id=request.user.id) + if obj is None: + return R.failure() + return R.success(data=obj) + + def list(self, request): + ser = IastVulRecheckPayloadListSerializer(data=request.query_params) + try: + if ser.is_valid(True): + pass + except ValidationError as e: + return R.failure(data=e.detail) + q = Q(user_id=request.user.id) & ~Q(status=-1) + keyword = ser.data['keyword'] + strategy_id = ser.data['strategy_id'] + language_id = ser.data['language_id'] + page = ser.data['page'] + page_size = ser.data['page_size'] + if keyword: + q = q & Q(value__icontains=keyword) + if strategy_id: + q = q & Q(strategy_id=strategy_id) + if language_id: + q = q & Q(language_id=language_id) + queryset = IastVulRecheckPayload.objects.filter(q).order_by( + '-create_time').values('id', 'user__username', + 'strategy__vul_name', 'value', 'user_id', + 'strategy_id', 'status', 'create_time', + 'language_id') + page_summary, page_data = self.get_paginator(queryset, page, page_size) + return R.success(page=page_summary, + data=list(page_data)) + + def update(self, request, pk): + ser = IastVulRecheckPayloadSerializer(data=request.data) + try: + if ser.is_valid(True): + pass + except ValidationError as e: + return R.failure(data=e.detail) + if IastVulRecheckPayload.objects.filter( + pk=pk, user_id=request.user.id).exists(): + payload_update(pk, ser.data) + return R.success() + return R.success() + + def delete(self, request, pk): + if IastVulRecheckPayload.objects.filter( + pk=pk, user_id=request.user.id).exists(): + IastVulRecheckPayload.objects.filter(pk=pk).update(status=-1) + return R.success() + return R.failure() + + def status_change(self, request): + mode = request.data.get('mode', 1) + q = ~Q(status=-1) & Q(user_id=request.user.id) + if mode == 1: + ids = request.data.get('ids', []) + status = request.data.get('status', 0) + q = q & Q(pk__in=ids) + IastVulRecheckPayload.objects.filter(q).update(status=status) + elif mode == 2: + status = request.data.get('status', 0) + q = q + IastVulRecheckPayload.objects.filter(q).update(status=status) + return R.success() + + def status_all(self, request): + status = request.data.get('status', 0) + q = ~Q(status=-1) + IastVulRecheckPayload.objects.filter(q).update(status=status) + return R.success() diff --git a/manage.py b/manage.py index 191b784f8..73d499c5a 100755 --- a/manage.py +++ b/manage.py @@ -5,7 +5,7 @@ def main(): - os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'AgentServer.settings') + os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dongtai_conf.settings') try: from django.core.management import execute_from_command_line except ImportError as exc: diff --git a/readme.md b/readme.md deleted file mode 100644 index 13d415795..000000000 --- a/readme.md +++ /dev/null @@ -1,65 +0,0 @@ -# DongTai-openapi - -[![django-project](https://img.shields.io/badge/django%20versions-3.0.3-blue)](https://www.djangoproject.com/) -[![DongTai-project](https://img.shields.io/badge/DongTai%20versions-beta-green)](https://huoxianclub.github.io/LingZhi/) -[![DongTai--openapi](https://img.shields.io/badge/DongTai--openapi-v1.0.0-lightgrey)](https://huoxianclub.github.io/LingZhi/#/doc/tutorial/quickstart) -[![DongTai-OpenAPI-Build](https://github.com/HXSecurity/DongTai-openapi/actions/workflows/build-openapi.yml/badge.svg)](https://github.com/HXSecurity/DongTai-openapi/actions/workflows/build-openapi.yml) - -> 负责与agent相关的api服务,包括:接收agent注册信息、接收心跳信息、接收错误日志信息、接收组件信息、接收漏洞信息、接收权限变更信息、发送引擎控制命令、发送hook点策略、下载检测引擎; - -## 项目介绍 -“火线~洞态IAST”是一款专为甲方安全人员、代码审计工程师和0 Day漏洞挖掘人员量身打造的辅助工具,可用于集成devops环境进行漏洞检测、作为代码审计的辅助工具和自动化挖掘0 Day。 - -“火线~洞态IAST”具有五大模块,分别是`DongTai-webapi`、`DongTai-openapi`、`DongTai-engine`、`DongTai-web`、`agent`,其中: -- `DongTai-webapi`用于与`DongTai-web`交互,负责用户相关的API请求; -- `DongTai-openapi`用于与`agent`交互,处理agent上报的数据,向agent下发策略,控制agent的运行等 -- `DongTai-engine`用于对`DongTai-openapi`接收到的数据进行分析、处理,计算存在的漏洞和可用的污点调用链等 -- `DongTai-web`为“火线~洞态IAST”的前端项目,负责页面展示 -- `agent`为各语言的数据采集端,从安装探针的项目中采集相对应的数据,发送至`DongTai-openapi`服务 - - -### 应用场景 -“火线~洞态IAST”可应用于:`devsecops`阶段做自动化漏洞检测、开源软件/组件挖掘通用漏洞、上线前安全测试等场景,主要目的是降低现有漏洞检测的工作量,释放安全从业人员的生产力来做更专业的事情。 - -### 部署方案 - -**源码部署** - -1.配置安装`DongTai-webapi`服务 - -2.修改配置文件 - -复制配置文件`conf/config.ini.example`为`conf/config.ini`并需改其中的配置;其中: - -- `engine`对应的url为`DongTai-engine`的服务地址 -- `apiserver`对应的url为`DongTai-openapi`的服务地址 -- 数据库配置为`DongTai-webapi`服务所使用的数据库 - -3.运行服务 - -- 运行`python manage.py runserver`启动服务 - -**容器部署** - -1.确保已通过[DongTai-webapi](https://github.com/huoxianclub/DongTai-webapi#%E9%83%A8%E7%BD%B2%E6%96%B9%E6%A1%88)创建并部署数据库 - -2.修改配置文件 - -复制配置文件`conf/config.ini.example`为`conf/config.ini`并需改其中的配置;其中: -- `engine`对应的url为`DongTai-engine`的服务地址 -- `apiserver`对应的url为`DongTai-openapi`的服务地址 - -3.构建镜像 -``` -$ docker build -t huoxian/dongtai-openapi:latest . -``` - -4.启动容器 -``` -$ docker run -d -p 8002:8000 --restart=always --name dongtai-openapi huoxian/dongtai-openapi:latest -``` - - -### 文档 -- [官方文档](https://huoxianclub.github.io/LingZhi/#/) -- [快速体验](http://aws.iast.huoxian.cn:8000/login) diff --git a/requirements-prod.txt b/requirements-prod.txt new file mode 100644 index 000000000..f1d61b66e --- /dev/null +++ b/requirements-prod.txt @@ -0,0 +1,52 @@ +asgiref~=3.3.1 +certifi==2020.11.8 +cffi==1.14.4 +chardet==3.0.4 +Django~=3.2.12 +django-cors-headers==3.7.0 +django-filter==2.4.0 +django-ranged-response==0.2.0 +django-xff==1.3.0 +djangorestframework~=3.12.4 +django-rest-framework-proxy==1.6.0 +django-utils==0.0.2 +django-utils-six==2.0 +django-simple-captcha==0.5.14 +django-import-export==2.5.0 +oss2==2.13.1 +idna==2.10 +lxml==4.9.1 +mysqlclient +python-docx==0.8.10 +requests==2.25.1 +six==1.15.0 +urllib3==1.26.5 +xlwt==1.3.0 +uwsgi==2.0.19.1 +pyre2~=0.3.6 +celery==5.2.2 +redis==3.5.3 +python-docx==0.8.10 +openpyxl==3.0.9 +id-validator==1.0.20 +jq==1.2.1 +django-celery-beat==2.2.0 +django-cprofile-middleware +drf_spectacular==0.22.1 +django-modeltranslation==0.17.7 +pycryptodomex~=3.14.1 +simhash==2.1.2 +result~=0.8.0 +python-json-logger~=2.0.2 +jsonlog==4.0.0 +pymysql==1.0.2 +uwsgitop==0.11 +django-redis==5.2.0 +packaging==21.3 +docxtpl==0.16.0 +docxcompose==1.3.4 +django-elasticsearch-dsl==7.2.2 +asyncio-gevent==0.2.1 +gevent==21.12.0 +ddt==1.6.0 +boto3==1.24.59 diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 8637b4606..000000000 --- a/requirements.txt +++ /dev/null @@ -1,7 +0,0 @@ -oss2==2.13.1 -Django==3.2.2 -djangorestframework==3.12.4 -M2Crypto==0.35.2 -mysqlclient==2.0.3 -uwsgi==2.0.19.1 -requests==2.25.1 diff --git a/static/assets/img/backup/favicon.ico b/static/assets/img/backup/favicon.ico new file mode 100644 index 000000000..e5e89a7f1 Binary files /dev/null and b/static/assets/img/backup/favicon.ico differ diff --git a/static/assets/img/backup/logo.png b/static/assets/img/backup/logo.png new file mode 100644 index 000000000..ab456165b Binary files /dev/null and b/static/assets/img/backup/logo.png differ diff --git a/static/assets/img/backup/logo_en.png b/static/assets/img/backup/logo_en.png new file mode 100644 index 000000000..9e075b1b9 Binary files /dev/null and b/static/assets/img/backup/logo_en.png differ diff --git a/static/assets/img/deploy/grayIcon.png b/static/assets/img/deploy/grayIcon.png new file mode 100644 index 000000000..907471fcf Binary files /dev/null and b/static/assets/img/deploy/grayIcon.png differ diff --git a/static/assets/img/deploy/java.png b/static/assets/img/deploy/java.png new file mode 100644 index 000000000..12e4d2447 Binary files /dev/null and b/static/assets/img/deploy/java.png differ diff --git a/static/assets/img/deploy/lightIcon.png b/static/assets/img/deploy/lightIcon.png new file mode 100644 index 000000000..759934aaa Binary files /dev/null and b/static/assets/img/deploy/lightIcon.png differ diff --git a/static/assets/img/deploy/python.png b/static/assets/img/deploy/python.png new file mode 100644 index 000000000..7ec1c7de6 Binary files /dev/null and b/static/assets/img/deploy/python.png differ diff --git a/static/assets/img/favicon.ico b/static/assets/img/favicon.ico new file mode 100644 index 000000000..e5e89a7f1 Binary files /dev/null and b/static/assets/img/favicon.ico differ diff --git a/static/assets/img/loginBg.png b/static/assets/img/loginBg.png new file mode 100644 index 000000000..fb1f2d0ee Binary files /dev/null and b/static/assets/img/loginBg.png differ diff --git a/static/assets/img/logo.png b/static/assets/img/logo.png new file mode 100644 index 000000000..043c71cfd Binary files /dev/null and b/static/assets/img/logo.png differ diff --git a/static/assets/img/logo_en.png b/static/assets/img/logo_en.png new file mode 100644 index 000000000..54a8c8aaa Binary files /dev/null and b/static/assets/img/logo_en.png differ diff --git a/static/assets/img/projectAdd.png b/static/assets/img/projectAdd.png new file mode 100644 index 000000000..6801376d5 Binary files /dev/null and b/static/assets/img/projectAdd.png differ diff --git a/static/assets/img/tag.png b/static/assets/img/tag.png new file mode 100644 index 000000000..2b542c51d Binary files /dev/null and b/static/assets/img/tag.png differ diff --git a/static/assets/img/tag2.png b/static/assets/img/tag2.png new file mode 100644 index 000000000..2bac8d644 Binary files /dev/null and b/static/assets/img/tag2.png differ diff --git a/static/assets/img/touxiang@2x.png b/static/assets/img/touxiang@2x.png new file mode 100644 index 000000000..89822ff1a Binary files /dev/null and b/static/assets/img/touxiang@2x.png differ diff --git a/static/assets/links/GO.png b/static/assets/links/GO.png new file mode 100644 index 000000000..a59dee7ed Binary files /dev/null and b/static/assets/links/GO.png differ diff --git a/static/assets/links/JAVA.png b/static/assets/links/JAVA.png new file mode 100644 index 000000000..093007d5e Binary files /dev/null and b/static/assets/links/JAVA.png differ diff --git a/static/assets/links/PHP.png b/static/assets/links/PHP.png new file mode 100644 index 000000000..e55a366e0 Binary files /dev/null and b/static/assets/links/PHP.png differ diff --git a/static/assets/links/PYTHON.png b/static/assets/links/PYTHON.png new file mode 100644 index 000000000..4908b89b9 Binary files /dev/null and b/static/assets/links/PYTHON.png differ diff --git a/static/assets/links/db.png b/static/assets/links/db.png new file mode 100644 index 000000000..1a112c089 Binary files /dev/null and b/static/assets/links/db.png differ diff --git a/static/assets/template/maven_sca.csv b/static/assets/template/maven_sca.csv new file mode 100644 index 000000000..1710ef30e --- /dev/null +++ b/static/assets/template/maven_sca.csv @@ -0,0 +1,2 @@ +group_id,atrifact_id,version,sha_1,package_name,license +1,2,3,4,5,6 \ No newline at end of file diff --git a/static/data/java_params.json b/static/data/java_params.json new file mode 100644 index 000000000..85e559418 --- /dev/null +++ b/static/data/java_params.json @@ -0,0 +1,122 @@ + { + "org.springframework.web.method.support.HandlerMethodArgumentResolver.resolveArgument": [ +- "GET", +- "POST", +- "HEADER", +- "PATH" +- ], + "org.springframework.web.method.annotation.ServletModelAttributeMethodProcessor.resolveArgument": [ +- "GET", +- "POST", +- "HEADER", +- "PATH" +- ], + "org.springframework.web.servlet.mvc.method.annotation.PathVariableMethodArgumentResolver.resolveName":["PATH"], + "org.springframework.web.bind.annotation.support.HandlerMethodInvoker.resolvePathVariable":["PATH"], + "javax.servlet.ServletRequest.getParameter": [ + "GET", + "POST" + ], + "javax.servlet.ServletRequest.getParameterValues": [ + "GET", + "POST" + ], + "javax.servlet.http.HttpServletRequest.getHeader": ["HEADER"], + "javax.servlet.http.HttpServletRequest.getQueryString": ["GET"], + "javax.servlet.http.HttpServletRequest.getCookies": ["COOKIE"], + "io.grpc.MethodDescriptor.parseRequest":["GRPC"], + "org.apache.struts2.dispatcher.multipart.MultiPartRequest.getParameterValues":[ + "GET", + "POST" + ], + "javax.servlet.ServletRequest.getInputStream":[ + "GET", + "POST" + ], + "javax.servlet.ServletRequest.getParameterNames()":[ + "GET", + "POST" + ], + "javax.servlet.ServletRequest.getParameterMap":[ + "GET", + "POST" + ], + "javax.servlet.ServletRequest.getReader":[ + "GET", + "POST" + ], + "javax.servlet.http.HttpServletRequest.getHeaderNames()":["HEADER"], + "javax.servlet.http.HttpServletRequest.getParts":[ + "GET", + "POST" + ], + "javax.servlet.http.HttpServletRequest.getPart":[ + "GET", + "POST" + ], + "javax.servlet.http.HttpServletRequest.getInputStream":[ + "GET", + "POST" + ], + "javax.servlet.http.HttpServletRequest.getHeader":["HEADER"], + "javax.servlet.http.HttpServletRequest.getCookies":["COOKIE"], + "javax.servlet.http.HttpServletRequest.getHeaders":["HEADER"], + "javax.servlet.http.HttpServletRequest.getQueryString":["GET"], + "javax.servlet.http.HttpServletRequest.getParameter":[ + "GET", + "POST" + ], + "javax.servlet.http.HttpServletRequest.getParameterValues":[ + "GET", + "POST" + ], + "javax.servlet.http.HttpServletRequest.getParameterNames()":[ + "GET", + "POST" + ], + "javax.servlet.http.HttpServletRequest.getParameterMap()":[ + "GET", + "POST" + ], + "javax.servlet.http.HttpServletRequest.getReader()":[ + "GET", + "POST" + ], + "org.apache.commons.fileupload.FileUploadBase.parseRequest(org.apache.commons.fileupload.RequestContext)":[ + "POST" + ], + "jakarta.servlet.ServletRequest.getInputStream":[ + "GET", + "POST" + ], + "jakarta.servlet.ServletRequest.getParameter":[ + "GET", + "POST" + ], + "jakarta.servlet.ServletRequest.getParameterNames":[ + "GET", + "POST" + ], + "jakarta.servlet.ServletRequest.getParameterValues":[ + "GET", + "POST" + ], + "jakarta.servlet.ServletRequest.getReader":[ + "GET", + "POST" + ], + "jakarta.servlet.http.HttpServletRequest.getHeader":["HEADER"], + "jakarta.servlet.http.HttpServletRequest.getHeaders":["HEADER"], + "jakarta.servlet.http.HttpServletRequest.getHeaderNames":["HEADER"], + "jakarta.servlet.http.HttpServletRequest.getParts":[ + "GET", + "POST" + ], + "jakarta.servlet.http.HttpServletRequest.getPart":[ + "GET", + "POST" + ], + "jakarta.servlet.http.HttpServletRequest.getCookies":["COOKIE"], + "jakarta.servlet.http.HttpServletRequest.getQueryString":["GET"], + "javax.servlet.http.HttpServletRequest.getSession":["SESSION"] + } diff --git a/static/i18n/en/LC_MESSAGES/django.po b/static/i18n/en/LC_MESSAGES/django.po new file mode 100644 index 000000000..8bd01f9e1 --- /dev/null +++ b/static/i18n/en/LC_MESSAGES/django.po @@ -0,0 +1,4010 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR , YEAR. +# +msgid "" +msgstr "" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2022-03-27 21:46+0800\n" +"PO-Revision-Date: 2021-08-09 12:55+0800\n" +"Last-Translator: \n" +"Language-Team: \n" +"Language: en\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Application-Id-Version: \n" +"X-Generator: Poedit 2.2.2\n" + +#: apiserver/report/handler/api_route_handler.py:33 +#, fuzzy +#| msgid "Number of Agent" +msgid "No such agent" +msgstr "Number of Agent" + +#: apiserver/report/handler/api_route_handler.py:70 +#, fuzzy +#| msgid "Agent and related data deleted successfully" +msgid "API navigation log record successfully" +msgstr "Agent and related data deleted successfully" + +#: apiserver/report/handler/api_route_handler.py:75 +msgid "API navigation log failed, why: {}" +msgstr "" + +#: apiserver/report/handler/error_log_handler.py:38 +msgid "Error log report saving success" +msgstr "" + +#: apiserver/report/handler/error_log_handler.py:40 +msgid "Error log report saves failed, why: {}" +msgstr "" + +#: apiserver/report/handler/hardencode_vul_handler.py:33 +msgid "class name" +msgstr "" + +#: apiserver/report/handler/hardencode_vul_handler.py:36 +msgid "field" +msgstr "" + +#: apiserver/report/handler/heartbeat_handler.py:105 +#, fuzzy +#| msgid "Number of Agent" +msgid "There is no probe under the project" +msgstr "Number of Agent" + +#: apiserver/report/handler/heartbeat_handler.py:111 +#, fuzzy +#| msgid "Department does not exist" +msgid "Replay request does not exist" +msgstr "Department does not exist" + +#: apiserver/report/handler/heartbeat_handler.py:131 +#, fuzzy +#| msgid "Login successful" +msgid "Reproduction request issued successfully" +msgstr "Login successful" + +#: apiserver/report/handler/heartbeat_handler.py:135 +#, fuzzy +#| msgid "Password reset failed, error message: {E}" +msgid "Replay request query failed, reason: {}" +msgstr "Password reset failed, reasons: {E}" + +#: apiserver/report/handler/report_handler_interface.py:67 +msgid "[{}] Report resolution start" +msgstr "" + +#: apiserver/report/handler/report_handler_interface.py:75 +#, python-brace-format +msgid "[{classname}] Report Analysis Completed" +msgstr "" + +#: apiserver/report/handler/report_handler_interface.py:81 +#, python-brace-format +msgid "" +"[{classname}] report resolution failed, Agent does not exist or no right to " +"access, report data: {report}" +msgstr "" + +#: apiserver/report/handler/sca_handler.py:49 +#, fuzzy +#| msgid "Incomplete data" +msgid "Data is incomplete, data: {}" +msgstr "Incomplete data" + +#: apiserver/report/report_handler_factory.py:52 +#, fuzzy +#| msgid "Operation type does not exist" +msgid "Report type {} handler does not exist" +msgstr "Operation type does not exist" + +#: apiserver/report/report_handler_factory.py:63 +msgid "Registration report type {} handler {}" +msgstr "" + +#: apiserver/views/agent_download.py:66 +#, fuzzy, python-brace-format +#| msgid "Tenant {} creation failed, error message:{}" +msgid "Agent configuration file creation failed, reason: {E}" +msgstr "Tenant {} creation failed, error message:{}" + +#: apiserver/views/agent_download.py:321 +msgid "Agent download failed, user: {}, error details: {}" +msgstr "" + +#: apiserver/views/agent_register.py:132 +msgid "The server port does not exist, has been set to the default: 0" +msgstr "" + +#: apiserver/views/agent_register.py:172 +#, fuzzy +#| msgid "Operation success" +msgid "Server record creation success" +msgstr "Operation success" + +#: apiserver/views/agent_register.py:248 +msgid "auto create project {}" +msgstr "" + +#: apiserver/views/agent_register.py:251 +msgid "auto create project version {}" +msgstr "" + +#: apiserver/views/agent_update.py:48 +#, fuzzy +#| msgid "Operation success" +msgid "Server record update success" +msgstr "Operation success" + +#: apiserver/views/engine_heartbeat.py:46 +#, fuzzy +#| msgid "Agent and related data deleted successfully" +msgid "[{}] Heartbeat data is successful" +msgstr "Agent and related data deleted successfully" + +#: apiserver/views/engine_heartbeat.py:49 +#, fuzzy +#| msgid "Parameter parsing failed, error message: {}" +msgid "Heartbeat data failed, error reason: {}" +msgstr "Parameter parsing failed, error message: {}" + +#: apiserver/views/engine_heartbeat.py:63 +#, fuzzy +#| msgid "Password reset failed, error message: {E}" +msgid "Client IP acquisition failed, reasons: {}" +msgstr "Password reset failed, reasons: {E}" + +#: apitest/views.py:38 +#, fuzzy +#| msgid "Number of Agent" +msgid "no project found" +msgstr "Number of Agent" + +#: apitest/views.py:41 +msgid "Please enter the parameters required for the test first" +msgstr "" + +#: apitest/views.py:46 apitest/views.py:72 +msgid "No API collected" +msgstr "" + +#: apitest/views.py:57 +#, fuzzy +#| msgid "Start Agent" +msgid "Starting API Test" +msgstr "Start Agent" + +#: core/plugins/export_report.py:92 iast/base/agent.py:212 +#, python-brace-format +msgid "" +"We found that there is {1} in the {0} page, attacker can modify the value of " +"{2} to attack:" +msgstr "" +"We found that there is {1} in the {0} page, attacker can modify the value of " +"{2} to attack:" + +#: core/plugins/export_report.py:116 +#, python-brace-format +msgid "call {3} at line {2} of file {1}, incoming parameters {0}" +msgstr "" + +#: core/plugins/export_report.py:124 +#, python-brace-format +msgid "call function {2} at line {1} of {0}" +msgstr "" + +#: core/plugins/export_report.py:131 +#, python-brace-format +msgid "run sink function {2} at line {1} of file {0}" +msgstr "" + +#: core/plugins/export_report.py:137 +#, python-brace-format +msgid "" +"Code call chain: \n" +"{0}, and then {1},\n" +" {2}" +msgstr "" + +#: core/plugins/export_report.py:139 +#, python-brace-format +msgid "Code call chain: call {1} at {0}" +msgstr "" + +#: core/plugins/export_report.py:141 iast/base/agent.py:246 +msgid "{} Appears in {} {}" +msgstr "{} Appears in {} {}" + +#: core/plugins/export_report.py:244 +#, fuzzy +#| msgid "Operation success" +msgid "Report export success" +msgstr "Operation success" + +#: core/plugins/export_report.py:269 core/plugins/export_report.py:519 +#: iast/views/project_report_export.py:148 +msgid "Security Testing Report" +msgstr "Security Testing Report" + +#: core/plugins/export_report.py:281 core/plugins/export_report.py:511 +#: iast/views/project_report_export.py:161 +msgid "First, project information" +msgstr "First, project information" + +#: core/plugins/export_report.py:290 core/plugins/export_report.py:506 +#: iast/views/project_report_export.py:169 +msgid "Application name" +msgstr "Application name" + +#: core/plugins/export_report.py:293 core/plugins/export_report.py:507 +#: iast/views/project_report_export.py:172 +msgid "Author" +msgstr "Author" + +#: core/plugins/export_report.py:296 iast/views/project_report_export.py:175 +msgid "Application type" +msgstr "Application type" + +#: core/plugins/export_report.py:299 core/plugins/export_report.py:508 +#: iast/views/project_report_export.py:178 +msgid "Number of Vulnerability" +msgstr "Number of Vulnerability" + +#: core/plugins/export_report.py:302 core/plugins/export_report.py:509 +#: iast/views/project_report_export.py:181 +msgid "Number of Agent" +msgstr "Number of Agent" + +#: core/plugins/export_report.py:305 iast/views/project_report_export.py:184 +msgid "Latest time" +msgstr "Latest time" + +#: core/plugins/export_report.py:322 core/plugins/export_report.py:512 +#: iast/views/project_report_export.py:206 +msgid "Second, the result analysis" +msgstr "Second, the result analysis" + +#: core/plugins/export_report.py:326 iast/views/project_report_export.py:210 +msgid "2.1 Vulnerability Severity Levels Distribution" +msgstr "2.1 Vulnerability Severity Levels Distribution" + +#: core/plugins/export_report.py:336 iast/views/project_report_export.py:220 +msgid "2.2 Distribution of Vulnerability" +msgstr "2.2 Distribution of Vulnerability" + +#: core/plugins/export_report.py:343 core/plugins/export_report.py:515 +#: core/plugins/export_report.py:610 iast/views/project_report_export.py:228 +msgid "Severity levels" +msgstr "Severity levels" + +#: core/plugins/export_report.py:344 core/plugins/export_report.py:516 +#: core/plugins/export_report.py:609 iast/views/project_report_export.py:229 +msgid "Vulnerability type name" +msgstr "Vulnerability type name" + +#: core/plugins/export_report.py:345 core/plugins/export_report.py:517 +#: iast/views/project_report_export.py:230 +msgid "Number" +msgstr "Number" + +#: core/plugins/export_report.py:356 iast/views/project_report_export.py:243 +msgid "2.3 Vulnerability details" +msgstr "2.3 Vulnerability details" + +#: core/plugins/export_report.py:373 core/plugins/export_report.py:459 +#: core/plugins/export_report.py:580 iast/views/project_report_export.py:266 +msgid "Summary" +msgstr "Summary" + +#: core/plugins/export_report.py:377 core/plugins/export_report.py:461 +#: core/plugins/export_report.py:582 iast/views/project_report_export.py:270 +msgid "Severity level" +msgstr "Severity level" + +#: core/plugins/export_report.py:381 core/plugins/export_report.py:463 +#: core/plugins/export_report.py:465 core/plugins/export_report.py:584 +#: core/plugins/export_report.py:611 iast/views/project_report_export.py:274 +msgid "First scan time" +msgstr "First scan time" + +#: core/plugins/export_report.py:385 core/plugins/export_report.py:586 +#: core/plugins/export_report.py:612 iast/views/project_report_export.py:278 +msgid "Last scan time" +msgstr "Last scan time" + +#: core/plugins/export_report.py:389 core/plugins/export_report.py:467 +#: core/plugins/export_report.py:588 core/plugins/export_report.py:613 +#: iast/views/project_report_export.py:282 +msgid "Development language" +msgstr "Development language" + +#: core/plugins/export_report.py:393 core/plugins/export_report.py:469 +#: core/plugins/export_report.py:590 core/plugins/export_report.py:614 +#: iast/views/project_report_export.py:286 +msgid "Vulnerability URL" +msgstr "Vulnerability URL" + +#: core/plugins/export_report.py:395 core/plugins/export_report.py:472 +#: core/plugins/export_report.py:593 core/plugins/export_report.py:615 +#: iast/views/project_report_export.py:288 +msgid "Vulnerability description" +msgstr "Vulnerability description" + +#: core/plugins/export_report.py:513 +#, fuzzy +#| msgid "2.1 Vulnerability Severity Levels Distribution" +msgid "Vulnerability Severity Levels Distribution" +msgstr "2.1 Vulnerability Severity Levels Distribution" + +#: core/plugins/export_report.py:514 +#, fuzzy +#| msgid "2.2 Distribution of Vulnerability" +msgid "Distribution of Vulnerability" +msgstr "2.2 Distribution of Vulnerability" + +#: core/plugins/export_report.py:518 iast/views/vul_details.py:339 +#, fuzzy +#| msgid "2.3 Vulnerability details" +msgid "Vulnerability details" +msgstr "2.3 Vulnerability details" + +#: dongtai/endpoint/__init__.py:275 iast/views/log_delete.py:32 +#: iast/views/openapi.py:29 iast/views/project_detail.py:37 +msgid "success" +msgstr "success" + +#: dongtai/endpoint/__init__.py:288 iast/views/logs.py:87 +msgid "failure" +msgstr "failure" + +#: dongtai/models/agent.py:27 +msgid "server" +msgstr "" + +#: dongtai/models/agent_method_pool.py:50 +msgid "sinks" +msgstr "" + +#: dongtai/models/asset.py:30 dongtai/models/heartbeat.py:39 +#, fuzzy +#| msgid "Agent list" +msgid "agent" +msgstr "Agent list" + +#: dongtai/models/department.py:29 dongtai/models/talent.py:17 +#: dongtai/models/talent.py:36 +msgid "talent" +msgstr "" + +#: dongtai/models/department.py:32 +msgid "" +"The talent this department belongs to. A department will get all permissions " +"granted to each of their talent." +msgstr "" + +#: dongtai/models/department.py:45 +msgid "name" +msgstr "" + +#: dongtai/models/department.py:49 +#, fuzzy +#| msgid "Failed to create, the application name already exists" +msgid "A department with that department name already exists." +msgstr "Failed to create, the application name already exists" + +#: dongtai/models/department.py:52 dongtai/models/project_version.py:20 +#, fuzzy +#| msgid "Latest time" +msgid "create time" +msgstr "Latest time" + +#: dongtai/models/department.py:53 dongtai/models/project_version.py:21 +#, fuzzy +#| msgid "Latest time" +msgid "update time" +msgstr "Latest time" + +#: dongtai/models/department.py:54 +msgid "created by" +msgstr "" + +#: dongtai/models/department.py:55 +#, fuzzy +#| msgid "Number of Agent" +msgid "parent id" +msgstr "Number of Agent" + +#: dongtai/models/hook_strategy.py:17 +msgid "type" +msgstr "" + +#: dongtai/models/hook_strategy.py:20 dongtai/models/user.py:22 +msgid "" +"The department this user belongs to. A user will get all permissions granted " +"to each of their department." +msgstr "" + +#: dongtai/models/talent.py:21 +#, fuzzy +#| msgid "Failed to create, the application name already exists" +msgid "A talent with that talent name already exists." +msgstr "Failed to create, the application name already exists" + +#: dongtai/models/talent.py:28 +msgid "active" +msgstr "" + +#: dongtai/models/talent.py:31 +msgid "" +"Designates whether this user should be treated as active. Unselect this " +"instead of deleting accounts." +msgstr "" + +#: dongtai/models/user.py:19 +#, fuzzy +#| msgid "Default department" +msgid "department" +msgstr "Default department" + +#: i18n/views/setlang.py:29 +#, fuzzy +#| msgid "{} files not supported" +msgid "this language not supported now" +msgstr "{} files not supported" + +#: iast/account/department.py:60 iast/account/department.py:136 +msgid "部门" +msgstr "" + +#: iast/account/department.py:61 +msgid "部门获取" +msgstr "" + +#: iast/account/department.py:62 iast/account/department.py:138 +#: iast/account/user.py:184 +msgid "管理" +msgstr "" + +#: iast/account/department.py:108 iast/account/department.py:152 +msgid "Department {} already exists" +msgstr "Department {} already exists" + +#: iast/account/department.py:118 +msgid "Department name has been modified to {}" +msgstr "Department name has been modified to {}" + +#: iast/account/department.py:123 iast/account/department.py:185 +#: iast/account/user.py:212 +msgid "Department does not exist" +msgstr "Department does not exist" + +#: iast/account/department.py:137 +msgid "增加部门" +msgstr "" + +#: iast/account/department.py:167 +#, fuzzy +#| msgid "Tenant does not exist" +msgid "Talent does not exist" +msgstr "Tenant does not exist" + +#: iast/account/department.py:174 +msgid "Department {} has been created successfully" +msgstr "Department {} has been created successfully" + +#: iast/account/department.py:180 +msgid "Access Denied" +msgstr "Access Denied" + +#: iast/account/department.py:200 +msgid "" +"Delete failed, existence of users under department {}, please implement " +"forced deletion" +msgstr "" +"Delete failed, existence of users under department {}, please implement " +"forced deletion" + +#: iast/account/department.py:207 iast/views/agents_delete.py:31 +#: iast/views/agents_delete.py:79 iast/views/project_report_delete.py:52 +#: iast/views/project_version_delete.py:25 +#: iast/views/project_version_delete.py:53 iast/views/vul_delete.py:18 +#: iast/views/vul_delete.py:43 +msgid "Deleted Successfully" +msgstr "Deleted Successfully" + +#: iast/account/talent.py:25 +msgid "Tenant management" +msgstr "Tenant management" + +#: iast/account/talent.py:83 +msgid "Tenant has been deactivated" +msgstr "Tenant has been deactivated" + +#: iast/account/talent.py:83 +msgid "Tenant does not exist" +msgstr "Tenant does not exist" + +#: iast/account/talent.py:94 +msgid "Tenant name or email is not specified" +msgstr "Tenant name or email is not specified" + +#: iast/account/talent.py:98 +msgid "Tenant {} has been created successfully" +msgstr "Tenant {} has been created successfully" + +#: iast/account/talent.py:99 +msgid "Tenant {} creation failed, error message:{}" +msgstr "Tenant {} creation failed, error message:{}" + +#: iast/account/talent.py:108 +msgid "Tenant: {} Delete successfully" +msgstr "Tenant: {} Delete successfully" + +#: iast/account/talent.py:123 +msgid "Query if the default tenant information exists" +msgstr "Query if the default tenant information exists" + +#: iast/account/talent.py:129 +#, fuzzy +#| msgid "" +#| "The tenant information already existed, please delete the existing " +#| "information first" +msgid "" +"Tenant information already exists, please delete tenant information first" +msgstr "" +"Tenant information already exists, please delete tenant information first" + +#: iast/account/talent.py:130 +msgid "" +"The tenant information already existed, please delete the existing " +"information first" +msgstr "" +"The tenant information already existed, please delete the existing " +"information first" + +#: iast/account/talent.py:132 +msgid "Started creating a tenant" +msgstr "Started creating a tenant" + +#: iast/account/talent.py:138 +msgid "Finished creating tenant, start to create tenant default department" +msgstr "Finished creating tenant, start to create tenant default department" + +#: iast/account/talent.py:144 +msgid "Finished creating department, start to create default user" +msgstr "Finished creating department, start to create default user" + +#: iast/account/talent.py:157 +msgid "Finsihed creating and initializing tenant" +msgstr "Finsihed creating and initializing tenant" + +#: iast/account/talent.py:160 +msgid "Failed to created a tenant, error message:{}" +msgstr "Failed to created a tenant, error message:{}" + +#: iast/account/user.py:96 +msgid "The format of ‘page’ and ‘pageSize’ only can be numberic" +msgstr "The format of ‘page’ and ‘pageSize’ only can be numberic" + +#: iast/account/user.py:133 +msgid "Data update succeeded" +msgstr "Data update succeeded" + +#: iast/account/user.py:138 iast/views/project_detail.py:38 +#: iast/views/project_detail.py:83 iast/views/project_report_export.py:103 +#: iast/views/project_summary.py:107 iast/views/user_detail.py:32 +msgid "no permission" +msgstr "no permission" + +#: iast/account/user.py:173 +msgid "Failed to delete User {}" +msgstr "Failed to delete User {}" + +#: iast/account/user.py:178 +msgid "User {} successfully deleted" +msgstr "User {} successfully deleted" + +#: iast/account/user.py:182 +msgid "用户" +msgstr "" + +#: iast/account/user.py:183 +msgid "增加用户" +msgstr "" + +#: iast/account/user.py:195 +msgid "Consistent" +msgstr "Consistent" + +#: iast/account/user.py:204 +msgid "Username already exists" +msgstr "Username already exists" + +#: iast/account/user.py:244 iast/account/user.py:259 +msgid "User creation failed" +msgstr "User creation failed" + +#: iast/account/user.py:249 iast/views/user_register_batch.py:73 +msgid "User {} has been created successfully" +msgstr "User {} has been created successfully" + +#: iast/account/user.py:254 +msgid "Department does not exist or no permission to access" +msgstr "Department does not exist or no permission to access" + +#: iast/base/agent.py:227 +msgid "{} Line" +msgstr "{} Line" + +#: iast/base/agent.py:243 +msgid "In {} {} call {}. {} (), Incoming parameters {}" +msgstr "In {} {} call {}. {} (), Incoming parameters {}" + +#: iast/base/project_version.py:11 iast/base/project_version.py:124 +#: iast/views/api_route_search.py:41 iast/views/project_report_export.py:29 +#: iast/views/project_summary.py:26 iast/views/project_version_current.py:21 +#: iast/views/project_version_delete.py:19 +#: iast/views/project_version_list.py:19 +msgid "The version id of the project" +msgstr "" + +#: iast/base/project_version.py:13 iast/base/project_version.py:126 +#: iast/views/project_add.py:36 iast/views/project_version_list.py:21 +#: iast/views/vul_details.py:55 +msgid "The version name of the project" +msgstr "" + +#: iast/base/project_version.py:15 iast/views/project_version_list.py:23 +msgid "Description of the project versoin" +msgstr "" + +#: iast/base/project_version.py:16 iast/serializers/vul.py:113 +#: iast/views/api_route_search.py:39 iast/views/project_add.py:37 +#: iast/views/project_delete.py:17 iast/views/project_detail.py:30 +#: iast/views/project_report_export.py:34 +#: iast/views/project_report_sync_add.py:21 iast/views/project_search.py:18 +#: iast/views/project_summary.py:56 iast/views/project_version_current.py:22 +#: iast/views/project_version_delete.py:20 iast/views/vul_details.py:71 +#, fuzzy +#| msgid "Number of Agent" +msgid "The id of the project" +msgstr "Number of Agent" + +#: iast/base/project_version.py:18 iast/views/project_version_list.py:25 +msgid "Whether it is the current version, 1 means yes, 0 means no." +msgstr "" + +#: iast/base/project_version.py:33 iast/threshold/agent_core_status.py:50 +#: iast/views/agent_start.py:43 iast/views/agent_stop.py:39 +#: iast/views/api_route_related_request.py:60 +#: iast/views/api_route_search.py:151 iast/views/engine_hook_rule_status.py:107 +#: iast/views/engine_hook_rule_status.py:156 +#: iast/views/engine_hook_rule_summary.py:47 +#: iast/views/engine_hook_rule_types.py:80 iast/views/engine_hook_rules.py:88 +#: iast/views/engine_hook_rules.py:94 +#: iast/views/engine_method_pool_search.py:173 +#: iast/views/engine_method_pool_search.py:184 iast/views/openapi.py:14 +#: iast/views/openapi.py:68 iast/views/project_add.py:49 +#: iast/views/project_add.py:195 iast/views/project_report_export.py:81 +#: iast/views/project_report_export.py:86 +#: iast/views/project_report_sync_add.py:53 +#: iast/views/project_version_add.py:18 iast/views/project_version_add.py:43 +#: iast/views/project_version_add.py:49 +#: iast/views/project_version_current.py:50 +#: iast/views/project_version_delete.py:23 +#: iast/views/project_version_delete.py:47 +#: iast/views/project_version_delete.py:59 +#: iast/views/project_version_list.py:60 +#: iast/views/project_version_update.py:17 +#: iast/views/project_version_update.py:40 +#: iast/views/project_version_update.py:46 iast/views/sca_summary.py:191 +#: iast/views/scas.py:180 iast/views/strategys_add.py:48 +#: iast/views/vul_summary.py:207 iast/views/vul_summary_project.py:182 +#: iast/views/vul_summary_type.py:184 iast/views/vuls.py:223 +msgid "Parameter error" +msgstr "Parameter error" + +#: iast/base/project_version.py:46 +msgid "Repeated version name" +msgstr "Repeated version name" + +#: iast/base/project_version.py:55 iast/views/project_version_current.py:26 +#: iast/views/project_version_current.py:68 +#: iast/views/project_version_delete.py:24 +#: iast/views/project_version_delete.py:55 +msgid "Version does not exist" +msgstr "Version does not exist" + +#: iast/base/project_version.py:122 iast/views/project_add.py:39 +msgid "Description of the project" +msgstr "" + +#: iast/base/update_project_version.py:18 +msgid "Detects and associates application version information" +msgstr "Detects and associates application version information" + +#: iast/base/update_project_version.py:43 +msgid "Detection finished" +msgstr "Detection finished" + +#: iast/base/update_project_version.py:45 +msgid "Detection failed" +msgstr "Detection failed" + +#: iast/notify/feishu.py:12 +msgid "Maven official crawler" +msgstr "Maven official crawler" + +#: iast/serializers/agent.py:50 iast/views/agents.py:72 +msgid "Online" +msgstr "Online" + +#: iast/serializers/agent.py:50 iast/views/agents.py:72 +msgid "Offline" +msgstr "Offline" + +#: iast/serializers/agent.py:62 iast/views/agents.py:184 +msgid "Load data is not uploaded" +msgstr "Load data is not uploaded" + +#: iast/serializers/agent.py:71 iast/serializers/agent.py:76 +#: iast/views/agents.py:82 iast/views/agents.py:87 +msgid "No flow is detected by the probe" +msgstr "No flow is detected by the probe" + +#: iast/serializers/agent.py:133 iast/serializers/agent.py:140 +#: iast/views/agent_alias_modified.py:18 iast/views/agent_start.py:16 +msgid "The id corresponding to the agent." +msgstr "" + +#: iast/serializers/agent.py:135 iast/views/agent_start.py:18 +#: iast/views/agents_delete.py:27 +msgid "The id corresponding to the agent, use\",\" for segmentation." +msgstr "" + +#: iast/serializers/agent_config.py:13 iast/serializers/agent_config.py:27 +#, fuzzy +#| msgid "Number of Agent" +msgid "The details config to the agent." +msgstr "Number of Agent" + +#: iast/serializers/agent_config.py:14 +#, fuzzy +#| msgid "Number of Agent" +msgid "The hostname of the agent." +msgstr "Number of Agent" + +#: iast/serializers/agent_config.py:15 +#, fuzzy +#| msgid "Number of Agent" +msgid "The ip of the agent." +msgstr "Number of Agent" + +#: iast/serializers/agent_config.py:16 iast/serializers/agent_config.py:17 +#, fuzzy +#| msgid "Number of Agent" +msgid "The port of the agent." +msgstr "Number of Agent" + +#: iast/serializers/agent_config.py:18 iast/serializers/agent_config.py:28 +#: iast/threshold/agent_core_status.py:23 +#, fuzzy +#| msgid "Number of Agent" +msgid "The cluster_name of the agent." +msgstr "Number of Agent" + +#: iast/serializers/agent_config.py:19 +#, fuzzy +#| msgid "Number of Agent" +msgid "The cluster_version of the agent." +msgstr "Number of Agent" + +#: iast/serializers/agent_config.py:20 +#, fuzzy +#| msgid "Number of Agent" +msgid "The priority of the agent." +msgstr "Number of Agent" + +#: iast/serializers/agent_config.py:25 iast/serializers/agent_config.py:33 +#: iast/threshold/agent_core_status.py:21 +#, fuzzy +#| msgid "Number of Agent" +msgid "The id of the webHook." +msgstr "Number of Agent" + +#: iast/serializers/agent_config.py:26 iast/threshold/agent_core_status.py:22 +#, fuzzy +#| msgid "Number of Agent" +msgid "The type of the webHook." +msgstr "Number of Agent" + +#: iast/serializers/hook_strategy.py:14 +msgid "" +"\n" +"Examples in a single case: O, P<1,2,3,4,...>, R\n" +"Combination situation: O&R, O&P1, etc.\n" +"O represents the object itself; R represents the return value; P represents " +"the parameter, and the number represents the position of the parameter\n" +msgstr "" + +#: iast/serializers/hook_strategy.py:33 +msgid "The name of hook rule type." +msgstr "" + +#: iast/serializers/hook_strategy.py:35 iast/views/engine_hook_rule_add.py:20 +#: iast/views/engine_hook_rule_modify.py:30 +msgid "The id of hook rule type." +msgstr "" + +#: iast/serializers/hook_strategy.py:37 +msgid "The user who created the hook rule type." +msgstr "" + +#: iast/serializers/hook_strategy.py:38 iast/views/strategy_delete.py:21 +#: iast/views/strategys_add.py:17 iast/views/strategys_type.py:18 +#, fuzzy +#| msgid "No strategy" +msgid "The id of strategy" +msgstr "No strategy" + +#: iast/serializers/hook_strategy.py:40 iast/views/engine_hook_rule_add.py:22 +#: iast/views/engine_hook_rule_modify.py:32 +#, fuzzy +#| msgid "No strategy" +msgid "The value of strategy" +msgstr "No strategy" + +#: iast/serializers/hook_strategy.py:44 iast/views/engine_hook_rule_add.py:26 +#: iast/views/engine_hook_rule_modify.py:36 +msgid "Source of taint" +msgstr "" + +#: iast/serializers/hook_strategy.py:49 iast/views/engine_hook_rule_add.py:31 +#: iast/views/engine_hook_rule_modify.py:41 +#, fuzzy +#| msgid "Number of Agent" +msgid "Target of taint" +msgstr "Number of Agent" + +#: iast/serializers/hook_strategy.py:55 iast/views/engine_hook_rule_add.py:37 +#: iast/views/engine_hook_rule_modify.py:47 +msgid "" +"Inheritance type, false-only detect current class, true-inspect subclasses, " +"all-check current class and subclasses" +msgstr "" + +#: iast/serializers/hook_strategy.py:61 iast/views/engine_hook_rule_add.py:43 +#: iast/views/engine_hook_rule_modify.py:53 +msgid "" +"Indicates whether taint tracking is required, true-required, false-not " +"required." +msgstr "" + +#: iast/serializers/hook_strategy.py:66 +msgid "The update time of hook strategy" +msgstr "" + +#: iast/serializers/hook_strategy.py:68 +msgid "" +"The enabled state of the hook strategy: 0-disabled, 1-enabled, -1-deleted" +msgstr "" + +#: iast/serializers/login.py:15 +msgid "Username should not be empty" +msgstr "Username should not be empty" + +#: iast/serializers/login.py:22 +msgid "Password should not be blank" +msgstr "Password should not be blank" + +#: iast/serializers/sca.py:40 iast/views/vul_details.py:229 +#: iast/views/vuls.py:31 iast/views/vuls.py:304 +msgid "The application has not been binded" +msgstr "No application" + +#: iast/serializers/sca.py:63 +msgid "No application version has been created" +msgstr "No application version" + +#: iast/serializers/vul.py:66 +#, fuzzy +#| msgid "Delete vulnerability" +msgid "The level name of vulnerablity" +msgstr "Delete vulnerability" + +#: iast/serializers/vul.py:89 iast/views/sca_sidebar_index.py:22 +#: iast/views/sca_summary.py:52 iast/views/scas.py:41 +#: iast/views/vul_details.py:58 iast/views/vul_sidebar_index.py:43 +#: iast/views/vul_summary.py:39 iast/views/vul_summary_project.py:34 +#: iast/views/vul_summary_type.py:33 iast/views/vuls.py:72 +#, fuzzy +#| msgid "Development language" +msgid "programming language" +msgstr "Development language" + +#: iast/serializers/vul.py:91 +msgid "The number of vulnerabilities corresponding to the programming language" +msgstr "" + +#: iast/serializers/vul.py:96 iast/views/vul_details.py:59 +#, fuzzy +#| msgid "Number of Vulnerability" +msgid "The name of vulnerablity level" +msgstr "Number of Vulnerability" + +#: iast/serializers/vul.py:97 iast/views/project_summary.py:49 +msgid "The number of vulnerabilities corresponding to the level" +msgstr "" + +#: iast/serializers/vul.py:98 iast/views/vul_details.py:60 +#, fuzzy +#| msgid "Number of Vulnerability" +msgid "The id of vulnerablity level" +msgstr "Number of Vulnerability" + +#: iast/serializers/vul.py:102 +#, fuzzy +#| msgid "Modify the vulnerability status" +msgid "The name of vulnerablity type" +msgstr "Modify the vulnerability status" + +#: iast/serializers/vul.py:104 +msgid "The number of vulnerabilities corresponding to the vulnerablity type" +msgstr "" + +#: iast/serializers/vul.py:110 iast/views/project_report_export.py:32 +#: iast/views/project_report_sync_add.py:20 +#, fuzzy +#| msgid "Number of Agent" +msgid "The name of the project" +msgstr "Number of Agent" + +#: iast/serializers/vul.py:112 +msgid "The number of vulnerabilities corresponding to the project" +msgstr "" + +#: iast/threshold/agent_core_status.py:16 iast/views/agent_search.py:26 +#: iast/views/agent_start.py:22 iast/views/agent_stop.py:18 +#: iast/views/agent_stop.py:65 +msgid "Suspending ..." +msgstr "Suspending ..." + +#: iast/threshold/agent_core_status.py:28 iast/views/agent_stop.py:22 +msgid "Suspend Agent" +msgstr "Suspend Agent" + +#: iast/threshold/agent_core_status.py:32 iast/threshold/config_setting.py:61 +#: iast/threshold/del_threshold_setting.py:27 +#: iast/threshold/del_webhook_setting.py:27 +#: iast/threshold/get_config_setting.py:26 +#: iast/threshold/get_config_setting_detail.py:26 +#: iast/threshold/webhook_setting.py:50 iast/views/agent.py:69 +#: iast/views/agent_alias_modified.py:31 iast/views/agent_delete.py:38 +#: iast/views/agent_install.py:30 iast/views/agent_search.py:31 +#: iast/views/agent_start.py:30 iast/views/agent_stop.py:26 +#: iast/views/agent_uninstall.py:28 iast/views/agent_upgrade_online.py:40 +#: iast/views/agents.py:63 iast/views/agents_delete.py:44 +#: iast/views/agents_user.py:23 iast/views/details_id.py:70 +#, fuzzy +#| msgid "Agent list" +msgid "Agent" +msgstr "Agent list" + +#: iast/threshold/agent_core_status.py:33 +#, fuzzy +#| msgid "Agent list" +msgid "Agent Status Update" +msgstr "Agent list" + +#: iast/threshold/agent_core_status.py:34 +msgid "Control the running agent by specifying the id." +msgstr "" + +#: iast/threshold/agent_core_status.py:44 +#: iast/threshold/agent_core_status.py:58 +#: iast/threshold/del_threshold_setting.py:37 +#: iast/threshold/del_webhook_setting.py:37 +#: iast/threshold/webhook_setting.py:63 iast/views/engine_hook_rule_add.py:51 +#: iast/views/engine_hook_rule_add.py:109 +#: iast/views/engine_hook_rule_modify.py:21 +#: iast/views/engine_hook_rule_modify.py:92 +msgid "Incomplete parameter, please check again" +msgstr "Incomplete parameter, please check again" + +#: iast/threshold/agent_core_status.py:85 +msgid "状态已下发" +msgstr "" + +#: iast/threshold/config_setting.py:17 +#: iast/threshold/del_threshold_setting.py:17 +#: iast/threshold/del_webhook_setting.py:17 +#: iast/threshold/webhook_setting.py:17 +#, fuzzy +#| msgid "The installation is complete" +msgid "The setting is complete" +msgstr "The installation is complete" + +#: iast/threshold/config_setting.py:18 +#: iast/threshold/del_threshold_setting.py:18 +#: iast/threshold/del_webhook_setting.py:18 +#: iast/threshold/get_config_setting.py:17 +#: iast/threshold/get_config_setting_detail.py:17 +#: iast/threshold/get_webhook_setting.py:19 +#: iast/threshold/webhook_setting.py:18 iast/threshold/webhook_type.py:15 +#, fuzzy +#| msgid "Incomplete parameter, please check again" +msgid "Incomplete parameter, please try again later" +msgstr "Incomplete parameter, please check again" + +#: iast/threshold/config_setting.py:24 iast/threshold/get_config_setting.py:23 +#: iast/threshold/get_config_setting_detail.py:23 +#: iast/threshold/get_webhook_setting.py:25 +msgid "config Agent" +msgstr "" + +#: iast/threshold/config_setting.py:62 iast/threshold/get_config_setting.py:27 +#: iast/threshold/get_config_setting_detail.py:27 +msgid "Agent threshold Config" +msgstr "" + +#: iast/threshold/config_setting.py:63 iast/threshold/get_config_setting.py:28 +#: iast/threshold/get_config_setting_detail.py:28 +msgid "Configure agent disaster recovery strategy" +msgstr "" + +#: iast/threshold/config_setting.py:86 +msgid "保存成功" +msgstr "" + +#: iast/threshold/config_setting.py:88 +msgid "保存失败" +msgstr "" + +#: iast/threshold/del_threshold_setting.py:24 +#: iast/threshold/del_webhook_setting.py:24 +#, fuzzy +#| msgid "Delete Agent" +msgid "del webHook Agent" +msgstr "Delete Agent" + +#: iast/threshold/del_threshold_setting.py:28 +#: iast/threshold/del_webhook_setting.py:28 +#, fuzzy +#| msgid "Agent list" +msgid "Agent webHook delete" +msgstr "Agent list" + +#: iast/threshold/del_threshold_setting.py:29 +#: iast/threshold/del_webhook_setting.py:29 +msgid "Delete agent traffic reporting data forwarding address configuration" +msgstr "" + +#: iast/threshold/del_threshold_setting.py:40 +#: iast/threshold/del_webhook_setting.py:40 +#, fuzzy +#| msgid "Application has been deleted successfully" +msgid "Config has been deleted successfully" +msgstr "Application has been deleted successfully" + +#: iast/threshold/del_threshold_setting.py:42 +#: iast/threshold/del_webhook_setting.py:42 +#, fuzzy +#| msgid "Failed to get {} configuration" +msgid "Failed to delete config" +msgstr "Failed to get {} configuration" + +#: iast/threshold/get_config_setting.py:16 +#: iast/threshold/get_webhook_setting.py:18 +#, fuzzy +#| msgid "Created success" +msgid "Get success" +msgstr "Created success" + +#: iast/threshold/get_config_setting.py:59 +#: iast/threshold/get_config_setting_detail.py:37 +#: iast/threshold/get_webhook_setting.py:42 +#, fuzzy +#| msgid "Deleted Successfully" +msgid "Successfully" +msgstr "Deleted Successfully" + +#: iast/threshold/get_config_setting_detail.py:16 +#, fuzzy +#| msgid "Created success" +msgid "Get detail success" +msgstr "Created success" + +#: iast/threshold/get_webhook_setting.py:28 iast/threshold/webhook_type.py:24 +msgid "WebHook" +msgstr "" + +#: iast/threshold/get_webhook_setting.py:29 +msgid "WebHook threshold Config get" +msgstr "" + +#: iast/threshold/get_webhook_setting.py:30 +msgid "WebHook threshold list" +msgstr "" + +#: iast/threshold/webhook_setting.py:24 +msgid "config webHook Agent" +msgstr "" + +#: iast/threshold/webhook_setting.py:51 +msgid "Agent webHook Config" +msgstr "" + +#: iast/threshold/webhook_setting.py:52 +msgid "Agent traffic reporting data forwarding address configuration" +msgstr "" + +#: iast/threshold/webhook_setting.py:66 +#, fuzzy +#| msgid "Account has been created successfully" +msgid "Config has been created successfully" +msgstr "Account has been created successfully" + +#: iast/threshold/webhook_setting.py:68 +#, fuzzy +#| msgid "Failed to get {} configuration" +msgid "Failed to create config" +msgstr "Failed to get {} configuration" + +#: iast/threshold/webhook_type.py:14 +#, fuzzy +#| msgid "Number of Agent" +msgid "The type is return" +msgstr "Number of Agent" + +#: iast/threshold/webhook_type.py:21 +msgid "get webhook all type " +msgstr "" + +#: iast/threshold/webhook_type.py:25 +#, fuzzy +#| msgid "Agent list" +msgid "Agent webHook type" +msgstr "Agent list" + +#: iast/threshold/webhook_type.py:26 +msgid "type list of agent webHook" +msgstr "" + +#: iast/threshold/webhook_type.py:75 +#, fuzzy +#| msgid "Rule type successfully saved" +msgid "Get type list successfully" +msgstr "Rule type successfully saved" + +#: iast/utils.py:96 +msgid "" +"The http status codes are both 200, please use the status and msg field " +"returned by the response data to troubleshoot" +msgstr "" + +#: iast/utils.py:133 +msgid "status code" +msgstr "" + +#: iast/utils.py:139 +msgid "human readable message" +msgstr "" + +#: iast/views/agent.py:22 iast/views/agent_search.py:20 +#: iast/views/agents_user.py:13 iast/views/project_engines.py:19 +#, fuzzy +#| msgid "Number of Agent" +msgid "The name of agent" +msgstr "Number of Agent" + +#: iast/views/agent.py:23 iast/views/agents_user.py:14 +#: iast/views/strategys.py:28 iast/views/strategys_list.py:15 +#, fuzzy +#| msgid "Number of Agent" +msgid "The id of agent" +msgstr "Number of Agent" + +#: iast/views/agent.py:24 +#, fuzzy +#| msgid "Number of Agent" +msgid "The version of agent" +msgstr "Number of Agent" + +#: iast/views/agent.py:26 +msgid "The latest update time of agent" +msgstr "" + +#: iast/views/agent.py:28 iast/views/agent.py:30 +msgid "The running status of agent" +msgstr "" + +#: iast/views/agent.py:32 +msgid "agent control bit, 1-install, 2-uninstall, 0-no control" +msgstr "" + +#: iast/views/agent.py:34 +msgid "Whether it is in control, 0-No, 1-Yes" +msgstr "" + +#: iast/views/agent.py:36 +msgid "Bundled project ID, if it exists, it will be bundled." +msgstr "" + +#: iast/views/agent.py:39 iast/views/agent_search.py:22 +msgid "Project name, used to start the agent first and then create the project" +msgstr "" + +#: iast/views/agent.py:42 +msgid "1 is running online, 0 is not running, same token, only one online" +msgstr "" + +#: iast/views/agent.py:44 +msgid "Bundled project version ID, if it exists, it will be bundled" +msgstr "" + +#: iast/views/agent.py:47 iast/views/project_summary.py:65 +msgid "Agent language currently included in the project" +msgstr "" + +#: iast/views/agent.py:70 +#, fuzzy +#| msgid "Vulnerability type name" +msgid "Agent Detail" +msgstr "Vulnerability type name" + +#: iast/views/agent.py:72 iast/views/project_version_delete.py:38 +msgid "Delete the specified project version according to the conditions." +msgstr "" + +#: iast/views/agent.py:80 iast/views/agent.py:82 iast/views/agent.py:86 +msgid "Can't find relevant data" +msgstr "Can't find relevant data" + +#: iast/views/agent_alias_modified.py:20 +#, fuzzy +#| msgid "Number of Agent" +msgid "The alias corresponding to the agent." +msgstr "Number of Agent" + +#: iast/views/agent_alias_modified.py:24 iast/views/agent_alias_modified.py:46 +#, fuzzy +#| msgid "Login successful" +msgid "modified successfully" +msgstr "Login successful" + +#: iast/views/agent_alias_modified.py:25 iast/views/agent_delete.py:28 +#: iast/views/agent_delete.py:62 +msgid "Agent does not exist or no permission to access" +msgstr "Agent does not exist or no permission to access" + +#: iast/views/agent_alias_modified.py:26 iast/views/agent_delete.py:29 +#: iast/views/agent_delete.py:65 +msgid "Error while deleting, please try again later" +msgstr "Error while deleting, please try again later" + +#: iast/views/agent_alias_modified.py:33 +#, fuzzy +#| msgid "Agent list" +msgid "Agent Alias Modified" +msgstr "Agent list" + +#: iast/views/agent_alias_modified.py:34 +msgid "Modified the agent alias" +msgstr "" + +#: iast/views/agent_delete.py:27 iast/views/agent_delete.py:60 +msgid "Agent and related data deleted successfully" +msgstr "Agent and related data deleted successfully" + +#: iast/views/agent_delete.py:35 iast/views/agents_delete.py:40 +msgid "Delete Agent" +msgstr "Delete Agent" + +#: iast/views/agent_delete.py:39 +#, fuzzy +#| msgid "Agent list" +msgid "Agent Delete" +msgstr "Agent list" + +#: iast/views/agent_delete.py:41 iast/views/project_delete.py:34 +msgid "Delete the agent by specifying the id." +msgstr "" + +#: iast/views/agent_delete.py:70 iast/views/agents_delete.py:87 +msgid "Error logs deleted successfully, Deletion Amount: {}" +msgstr "Error logs deleted successfully, Deletion Amount: {}" + +#: iast/views/agent_delete.py:72 iast/views/agents_delete.py:89 +msgid "Failed to delete error logs, probe ID: {}, error message: {}" +msgstr "Failed to delete error logs, probe ID: {}, error message: {}" + +#: iast/views/agent_delete.py:77 iast/views/agent_delete.py:84 +#: iast/views/agent_delete.py:91 iast/views/agent_delete.py:98 +#: iast/views/agent_delete.py:105 iast/views/agent_delete.py:112 +#: iast/views/agents_delete.py:94 iast/views/agents_delete.py:101 +#: iast/views/agents_delete.py:108 iast/views/agents_delete.py:115 +#: iast/views/agents_delete.py:122 iast/views/agents_delete.py:129 +msgid "" +"The replay request method pool data was successfully deleted, A total of {} " +"replay requests are deleted" +msgstr "" +"The replay request method pool data was successfully deleted, A total of {} " +"replay requests are deleted" + +#: iast/views/agent_delete.py:79 iast/views/agents_delete.py:96 +msgid "Failed to delete heartbeat data, error message: {}" +msgstr "Failed to delete heartbeat data, error message: {}" + +#: iast/views/agent_delete.py:86 iast/views/agents_delete.py:103 +msgid "Failed to delete unauthorized data, error message: {}" +msgstr "Failed to delete unauthorized data, error message: {}" + +#: iast/views/agent_delete.py:93 iast/views/agents_delete.py:110 +msgid "Failed to delete vulnerability data, error message: {}" +msgstr "Failed to delete vulnerability data, error message: {}" + +#: iast/views/agent_delete.py:100 iast/views/agents_delete.py:117 +msgid "Failed to delete third-party component data, error message: {}" +msgstr "Failed to delete third-party component data, error message: {}" + +#: iast/views/agent_delete.py:107 iast/views/agents_delete.py:124 +msgid "Failed to delete method pool data, error message: {}" +msgstr "Failed to delete method pool data, error message: {}" + +#: iast/views/agent_delete.py:114 +msgid "Failed to delete replay request method pool data, error message: {}" +msgstr "Failed to delete replay request method pool data, error message: {}" + +#: iast/views/agent_delete.py:119 iast/views/agents_delete.py:136 +msgid "Replay request queue deleted successfully, Deletion amount: {}" +msgstr "Replay request queue deleted successfully, Deletion amount: {}" + +#: iast/views/agent_delete.py:121 iast/views/agents_delete.py:131 +#: iast/views/agents_delete.py:138 +msgid "Failed to delete replay request queue, error message: {}" +msgstr "Failed to delete replay request queue, error message: {}" + +#: iast/views/agent_deploy.py:28 iast/views/agent_deploy.py:46 +msgid "Corresponding deployment document could not be found" +msgstr "Corresponding deployment document could not be found" + +#: iast/views/agent_deploy.py:32 iast/views/documents.py:44 +msgid "Documents" +msgstr "" + +#: iast/views/agent_deploy.py:33 iast/views/agent_deploy.py:34 +#, fuzzy +#| msgid "Number of Agent" +msgid "Document of Agent Deploy" +msgstr "Number of Agent" + +#: iast/views/agent_deploy_doc.py:14 iast/views/agent_deploy_info.py:14 +msgid "Agent deployment document" +msgstr "Agent deployment document" + +#: iast/views/agent_deploy_doc.py:38 iast/views/project_report_download.py:49 +#: iast/views/vuls.py:206 +msgid "No data" +msgstr "No data" + +#: iast/views/agent_deploy_submit.py:18 +msgid "Uploading Agent configuration" +msgstr "Uploading Agent configuration" + +#: iast/views/agent_download.py:22 +msgid "Downloading DongTai Agent" +msgstr "Downloading DongTai Agent" + +#: iast/views/agent_install.py:17 iast/views/agent_install.py:43 +msgid "The installation is complete" +msgstr "The installation is complete" + +#: iast/views/agent_install.py:18 iast/views/agent_install.py:45 +#: iast/views/agent_uninstall.py:17 +msgid "The engine is being installed or uninstalled, please try again later" +msgstr "The engine is being installed or uninstalled, please try again later" + +#: iast/views/agent_install.py:19 iast/views/agent_install.py:47 +#: iast/views/agent_start.py:47 iast/views/agent_stop.py:44 +#: iast/views/agent_uninstall.py:18 iast/views/agent_uninstall.py:45 +msgid "Engine does not exist or no permission to access" +msgstr "Engine does not exist or no permission to access" + +#: iast/views/agent_install.py:26 +msgid "Installing an Agent" +msgstr "Installing an Agent" + +#: iast/views/agent_install.py:31 +#, fuzzy +#| msgid "Agent list" +msgid "Agent Install" +msgstr "Agent list" + +#: iast/views/agent_install.py:32 +msgid "Install the running agent by specifying the id." +msgstr "" + +#: iast/views/agent_search.py:18 iast/views/documents.py:19 +#: iast/views/engine_hook_rule_types.py:24 iast/views/messages_list.py:24 +#: iast/views/messages_list.py:40 iast/views/project_report_list.py:15 +#: iast/views/projects.py:21 iast/views/sca_summary.py:47 +#: iast/views/scan_strategys.py:43 iast/views/scan_strategys.py:69 +#: iast/views/scas.py:36 iast/views/sensitive_info_rule.py:107 +#: iast/views/strategys.py:68 iast/views/vul_list_for_plugin.py:35 +#: iast/views/vul_sidebar_index.py:39 iast/views/vuls.py:67 +#: scaupload/views.py:32 +#, fuzzy +#| msgid "Number of Agent" +msgid "Number per page" +msgstr "Number of Agent" + +#: iast/views/agent_search.py:19 iast/views/documents.py:20 +#: iast/views/engine_hook_rule_types.py:25 iast/views/messages_list.py:25 +#: iast/views/project_report_list.py:16 iast/views/projects.py:22 +#: iast/views/sca_summary.py:40 iast/views/scan_strategys.py:44 +#: iast/views/scas.py:29 iast/views/sensitive_info_rule.py:108 +#: iast/views/strategys.py:69 iast/views/vul_list_for_plugin.py:28 +#: iast/views/vul_sidebar_index.py:33 iast/views/vuls.py:60 +#: scaupload/views.py:33 +msgid "Page index" +msgstr "" + +#: iast/views/agent_search.py:32 +msgid "Agent Search" +msgstr "" + +#: iast/views/agent_search.py:34 +msgid "" +"Search for the agent corresponding to the user according to the following " +"parameters" +msgstr "" + +#: iast/views/agent_start.py:26 +msgid "Start Agent" +msgstr "Start Agent" + +#: iast/views/agent_start.py:31 +#, fuzzy +#| msgid "Agent list" +msgid "Agent Start" +msgstr "Agent list" + +#: iast/views/agent_start.py:33 +msgid "Start the stopped agent by specifying the id." +msgstr "" + +#: iast/views/agent_start.py:49 iast/views/agent_stop.py:46 +msgid "Agent is stopping service, please try again later" +msgstr "Agent is stopping service, please try again later" + +#: iast/views/agent_start.py:65 +msgid "Starting…" +msgstr "Starting…" + +#: iast/views/agent_status_update.py:17 iast/views/agent_status_update.py:32 +msgid "Engine status was updated successfully." +msgstr "Engine status was updated successfully." + +#: iast/views/agent_stop.py:27 +#, fuzzy +#| msgid "Agent list" +msgid "Agent Stop" +msgstr "Agent list" + +#: iast/views/agent_stop.py:29 iast/views/agents_delete.py:46 +#: iast/views/agents_user.py:25 +msgid "Stop the running agent by specifying the id." +msgstr "" + +#: iast/views/agent_uninstall.py:16 iast/views/agent_uninstall.py:41 +msgid "Uninstalling ..." +msgstr "Uninstalling ..." + +#: iast/views/agent_uninstall.py:24 +msgid "Uninstall Agent" +msgstr "Uninstall Agent" + +#: iast/views/agent_uninstall.py:29 +#, fuzzy +#| msgid "Agent list" +msgid "Agent Uninstall" +msgstr "Agent list" + +#: iast/views/agent_uninstall.py:30 +msgid "Uninstall the running agent by specifying the id." +msgstr "" + +#: iast/views/agent_uninstall.py:43 +msgid "Agent is being installed or uninstalled, please try again later" +msgstr "Agent is being installed or uninstalled, please try again later" + +#: iast/views/agent_upgrade_offline.py:12 +msgid "Offline Upgrade Agent" +msgstr "Offline Upgrade Agent" + +#: iast/views/agent_upgrade_offline.py:19 +msgid "Upload successful" +msgstr "Upload successful" + +#: iast/views/agent_upgrade_offline.py:20 +msgid "{} files not supported" +msgstr "{} files not supported" + +#: iast/views/agent_upgrade_online.py:19 +msgid "The resource link corresponding to the Agent." +msgstr "" + +#: iast/views/agent_upgrade_online.py:21 +msgid "" +"The Token corresponding to the user is the same as when connecting to " +"openapi." +msgstr "" + +#: iast/views/agent_upgrade_online.py:26 iast/views/agent_upgrade_online.py:50 +msgid "Online upgrade successful" +msgstr "Online upgrade successful" + +#: iast/views/agent_upgrade_online.py:28 iast/views/agent_upgrade_online.py:52 +msgid "" +"Token verification failed, please confirm your input address and token are " +"correct" +msgstr "" +"Token verification failed, please confirm your input address and token are " +"correct" + +#: iast/views/agent_upgrade_online.py:35 +msgid "Online Upgrade Agent" +msgstr "Online Upgrade Agent" + +#: iast/views/agent_upgrade_online.py:41 +msgid "Agent Upgrade Online" +msgstr "" + +#: iast/views/agent_upgrade_online.py:42 +msgid "Agent upgrade" +msgstr "" + +#: iast/views/agents.py:30 +msgid "Agent list" +msgstr "Agent list" + +#: iast/views/agents.py:64 +#, fuzzy +#| msgid "Agent list" +msgid "Agent List" +msgstr "Agent list" + +#: iast/views/agents.py:66 +msgid "Get a list containing Agent information according to conditions." +msgstr "" + +#: iast/views/agents.py:206 +msgid "Incorrect format parameter, please check again" +msgstr "Incorrect format parameter, please check again" + +#: iast/views/agents.py:209 +msgid "Program error" +msgstr "Program error" + +#: iast/views/agents_delete.py:32 iast/views/agents_delete.py:81 +#: iast/views/vul_delete.py:19 iast/views/vul_delete.py:48 +msgid "Deletion failed" +msgstr "Deletion failed" + +#: iast/views/agents_delete.py:33 iast/views/agents_delete.py:82 +msgid "Successfully deleted {} strips, failed to deleted {} strips" +msgstr "Successfully deleted {} strips, failed to deleted {} strips" + +#: iast/views/agents_delete.py:45 +msgid "Agent Delete batch" +msgstr "" + +#: iast/views/agents_user.py:24 +msgid "Agent (with user)" +msgstr "" + +#: iast/views/api_route_cover_rate.py:23 +msgid "The api cover_rate of the project" +msgstr "" + +#: iast/views/api_route_cover_rate.py:37 +#: iast/views/api_route_related_request.py:41 +#: iast/views/api_route_search.py:126 +msgid "API Route" +msgstr "" + +#: iast/views/api_route_cover_rate.py:38 +msgid "API Route Coverrate" +msgstr "" + +#: iast/views/api_route_cover_rate.py:40 +msgid "" +"Get the API route coverrate of the project corresponding to the specified id." +msgstr "" + +#: iast/views/api_route_cover_rate.py:69 +#, fuzzy +#| msgid "Agent and related data deleted successfully" +msgid "API coverage rate obtained successfully" +msgstr "Agent and related data deleted successfully" + +#: iast/views/api_route_related_request.py:42 +msgid "API Route Relation Request" +msgstr "" + +#: iast/views/api_route_related_request.py:44 +msgid "Get the coverrate of the project corresponding to the specified id." +msgstr "" + +#: iast/views/api_route_related_request.py:55 +msgid "API not Fould" +msgstr "" + +#: iast/views/api_route_search.py:32 iast/views/engine_hook_rules.py:29 +#: iast/views/engine_method_pool_search.py:27 +#, fuzzy +#| msgid "Number of Agent" +msgid "number per page" +msgstr "Number of Agent" + +#: iast/views/api_route_search.py:35 +#, fuzzy +#| msgid "Number of Agent" +msgid "The uri of the api route" +msgstr "Number of Agent" + +#: iast/views/api_route_search.py:38 +#, fuzzy +#| msgid "Number of Agent" +msgid "The http method of the api route" +msgstr "Number of Agent" + +#: iast/views/api_route_search.py:43 +msgid "" +"Exclude the api route entry with the following id, this field is used to " +"obtain the data of the entire project in batches." +msgstr "" + +#: iast/views/api_route_search.py:49 iast/views/api_route_search.py:109 +msgid "" +"Whether the api is covered by detection, that is, there is associated " +"request data in the record." +msgstr "" + +#: iast/views/api_route_search.py:60 +msgid "The method bound to this API" +msgstr "" + +#: iast/views/api_route_search.py:62 +msgid "The method bound to this API, in array form" +msgstr "" + +#: iast/views/api_route_search.py:66 iast/views/api_route_search.py:95 +#, fuzzy +#| msgid "Number of Agent" +msgid "The id of api route" +msgstr "Number of Agent" + +#: iast/views/api_route_search.py:67 +#, fuzzy +#| msgid "Number of Agent" +msgid "The name of api route" +msgstr "Number of Agent" + +#: iast/views/api_route_search.py:69 +#, fuzzy +#| msgid "Number of Agent" +msgid "The type of the parameter" +msgstr "Number of Agent" + +#: iast/views/api_route_search.py:71 +msgid "The shortcut of the parameter_type,e.g. java.lang.String -> String" +msgstr "" + +#: iast/views/api_route_search.py:73 +#, fuzzy +#| msgid "Number of Agent" +msgid "The annotaion of the parameter" +msgstr "Number of Agent" + +#: iast/views/api_route_search.py:74 +#, fuzzy +#| msgid "No strategy" +msgid "The route id of parameter" +msgstr "No strategy" + +#: iast/views/api_route_search.py:78 +#, fuzzy +#| msgid "Number of Agent" +msgid "The id of api response" +msgstr "Number of Agent" + +#: iast/views/api_route_search.py:80 +#, fuzzy +#| msgid "Number of Agent" +msgid "The return type of api route" +msgstr "Number of Agent" + +#: iast/views/api_route_search.py:82 +#, fuzzy +#| msgid "No strategy" +msgid "The route id of api response" +msgstr "No strategy" + +#: iast/views/api_route_search.py:84 +msgid "The shortcut of return_type" +msgstr "" + +#: iast/views/api_route_search.py:89 +#, fuzzy +#| msgid "Number of Vulnerability" +msgid "The vulnerablity level id " +msgstr "Number of Vulnerability" + +#: iast/views/api_route_search.py:91 +#, fuzzy +#| msgid "Vulnerability type name" +msgid "The vulnerablity type name" +msgstr "Vulnerability type name" + +#: iast/views/api_route_search.py:96 +#, fuzzy +#| msgid "Number of Agent" +msgid "The uri of api route" +msgstr "Number of Agent" + +#: iast/views/api_route_search.py:97 +#, fuzzy +#| msgid "Number of Agent" +msgid "The class of api route" +msgstr "Number of Agent" + +#: iast/views/api_route_search.py:99 +#, fuzzy +#| msgid "Number of Agent" +msgid "The description of the api route" +msgstr "Number of Agent" + +#: iast/views/api_route_search.py:101 +#, fuzzy +#| msgid "Number of Agent" +msgid "The code file of the api route" +msgstr "Number of Agent" + +#: iast/views/api_route_search.py:103 +#, fuzzy +#| msgid "Number of Agent" +msgid "The controller of the api route" +msgstr "Number of Agent" + +#: iast/views/api_route_search.py:105 +#, fuzzy +#| msgid "Number of Agent" +msgid "The id of the agent reported the api route" +msgstr "Number of Agent" + +#: iast/views/api_route_search.py:127 +#, fuzzy +#| msgid "Number of Agent" +msgid "API Route Search" +msgstr "Number of Agent" + +#: iast/views/api_route_search.py:129 +msgid "" +"Get the API list corresponding to the project according to the following " +"parameters. By default, there is no sorting. Please use the exclude_ids " +"field for pagination." +msgstr "" + +#: iast/views/details_id.py:71 +#, fuzzy +#| msgid "Agent list" +msgid "Agent List with id" +msgstr "Agent list" + +#: iast/views/details_id.py:73 iast/views/details_id.py:94 +#: iast/views/details_id.py:115 iast/views/details_id.py:137 +#: iast/views/projects.py:41 iast/views/scan_strategys.py:114 +#: iast/views/scan_strategys.py:170 iast/views/scan_strategys.py:192 +#: iast/views/sensitive_info_rule.py:134 iast/views/sensitive_info_rule.py:164 +#: iast/views/sensitive_info_rule.py:198 iast/views/sensitive_info_rule.py:221 +#: iast/views/sensitive_info_rule.py:234 iast/views/sensitive_info_rule.py:266 +msgid "" +"Get the item corresponding to the user, support fuzzy search based on name." +msgstr "" + +#: iast/views/details_id.py:91 iast/views/project_add.py:62 +#: iast/views/project_delete.py:32 iast/views/project_detail.py:47 +#: iast/views/project_engines.py:37 iast/views/project_report_delete.py:27 +#: iast/views/project_report_download.py:27 +#: iast/views/project_report_export.py:63 iast/views/project_report_list.py:32 +#: iast/views/project_report_sync_add.py:32 iast/views/project_search.py:34 +#: iast/views/project_summary.py:94 iast/views/project_version_add.py:29 +#: iast/views/project_version_current.py:38 +#: iast/views/project_version_delete.py:35 +#: iast/views/project_version_list.py:39 +#: iast/views/project_version_update.py:28 iast/views/projects.py:38 +#, fuzzy +#| msgid "Number of Agent" +msgid "Project" +msgstr "Number of Agent" + +#: iast/views/details_id.py:92 +#, fuzzy +#| msgid "Vulnerability type name" +msgid "Project List with id" +msgstr "Vulnerability type name" + +#: iast/views/details_id.py:112 iast/views/sca_details.py:84 +#: iast/views/sca_sidebar_index.py:45 iast/views/sca_summary.py:146 +#: iast/views/scas.py:124 +msgid "Component" +msgstr "" + +#: iast/views/details_id.py:113 +#, fuzzy +#| msgid "Vulnerability type name" +msgid "Component List with id" +msgstr "Vulnerability type name" + +#: iast/views/details_id.py:134 iast/views/vul_count_for_plugin.py:26 +#: iast/views/vul_delete.py:29 iast/views/vul_details.py:343 +#: iast/views/vul_list_for_plugin.py:60 iast/views/vul_recheck.py:132 +#: iast/views/vul_recheck.py:212 iast/views/vul_recheck_v2.py:121 +#: iast/views/vul_recheck_v2.py:201 iast/views/vul_sidebar_index.py:72 +#: iast/views/vul_status.py:60 iast/views/vul_summary.py:151 +#: iast/views/vul_summary_project.py:126 iast/views/vul_summary_type.py:128 +#: iast/views/vulnerability_status.py:78 iast/views/vuls.py:187 +#, fuzzy +#| msgid "Vulnerability URL" +msgid "Vulnerability" +msgstr "Vulnerability URL" + +#: iast/views/details_id.py:135 +#, fuzzy +#| msgid "Vulnerability URL" +msgid "Vulnerability List with id" +msgstr "Vulnerability URL" + +#: iast/views/documents.py:23 +msgid "Document's corresponding programming language" +msgstr "" + +#: iast/views/documents.py:42 +msgid "Get documents" +msgstr "" + +#: iast/views/documents.py:43 +msgid "Get help documentation." +msgstr "" + +#: iast/views/engine_hook_rule_add.py:50 iast/views/strategy_enable.py:18 +#: iast/views/strategy_enable.py:46 +msgid "Policy enabled success, total {} hook rules" +msgstr "Policy enabled success, total {} hook rules" + +#: iast/views/engine_hook_rule_add.py:52 iast/views/engine_hook_rule_add.py:120 +#: iast/views/engine_hook_rule_modify.py:22 +#: iast/views/engine_hook_rule_modify.py:108 +msgid "Failed to create strategy" +msgstr "Failed to create strategy" + +#: iast/views/engine_hook_rule_add.py:99 +#: iast/views/engine_hook_rule_modify.py:80 +#: iast/views/engine_hook_rule_status.py:93 +#: iast/views/engine_hook_rule_status.py:141 +#: iast/views/engine_hook_rule_summary.py:37 +#: iast/views/engine_hook_rule_type_add.py:90 +#: iast/views/engine_hook_rule_type_disable.py:37 +#: iast/views/engine_hook_rule_type_enable.py:43 +#: iast/views/engine_hook_rule_types.py:74 iast/views/engine_hook_rules.py:80 +msgid "Hook Rule" +msgstr "" + +#: iast/views/engine_hook_rule_add.py:100 +msgid "Hook Rule Add" +msgstr "" + +#: iast/views/engine_hook_rule_add.py:102 iast/views/strategys.py:180 +#: iast/views/strategys_add.py:37 +msgid "" +"Generate corresponding strategy group according to the strategy selected by " +"the user." +msgstr "" + +#: iast/views/engine_hook_rule_add.py:119 +msgid "Strategy has been created successfully" +msgstr "Strategy has been created successfully" + +#: iast/views/engine_hook_rule_modify.py:20 +#: iast/views/engine_hook_rule_modify.py:107 +#, fuzzy +#| msgid "Strategy has been created successfully" +msgid "strategy has been created successfully" +msgstr "Strategy has been created successfully" + +#: iast/views/engine_hook_rule_modify.py:28 +#: iast/views/engine_hook_rule_status.py:23 +msgid "The id of hook rule" +msgstr "" + +#: iast/views/engine_hook_rule_modify.py:81 +msgid "Hook Rule Modify" +msgstr "" + +#: iast/views/engine_hook_rule_modify.py:82 +msgid "Modify the rule corresponding to the specified id" +msgstr "" + +#: iast/views/engine_hook_rule_status.py:25 +#, fuzzy +#| msgid "Number of Agent" +msgid "The id of hook rule type" +msgstr "Number of Agent" + +#: iast/views/engine_hook_rule_status.py:28 +#: iast/views/engine_hook_rule_status.py:42 +msgid "The state of the hook rule" +msgstr "" + +#: iast/views/engine_hook_rule_status.py:31 +#, fuzzy +#| msgid "Number of Agent" +msgid "The scope of the hook rule" +msgstr "Number of Agent" + +#: iast/views/engine_hook_rule_status.py:33 +msgid "The language_id" +msgstr "" + +#: iast/views/engine_hook_rule_status.py:35 +#, fuzzy +#| msgid "Number of Agent" +msgid "The type of hook rule" +msgstr "Number of Agent" + +#: iast/views/engine_hook_rule_status.py:40 +msgid "The id corresponding to the hook type, use\",\" for segmentation." +msgstr "" + +#: iast/views/engine_hook_rule_status.py:46 +#: iast/views/engine_hook_rule_status.py:52 +#: iast/views/engine_hook_rule_status.py:135 +#: iast/views/engine_hook_rule_status.py:161 +msgid "Operation success" +msgstr "Operation success" + +#: iast/views/engine_hook_rule_status.py:47 +#: iast/views/engine_hook_rule_status.py:53 +#: iast/views/engine_hook_rule_status.py:113 +#: iast/views/engine_hook_rule_status.py:150 +#: iast/views/engine_hook_rule_type_enable.py:25 +msgid "Operation type does not exist" +msgstr "Operation type does not exist" + +#: iast/views/engine_hook_rule_status.py:48 +#: iast/views/engine_hook_rule_status.py:137 +#: iast/views/engine_hook_rule_type_disable.py:22 +#: iast/views/engine_hook_rule_type_disable.py:47 +#: iast/views/engine_hook_rule_type_enable.py:53 +#: iast/views/strategy_disable.py:18 iast/views/strategy_disable.py:46 +#: iast/views/strategy_enable.py:19 iast/views/strategy_enable.py:48 +msgid "Strategy does not exist" +msgstr "Strategy does not exist" + +#: iast/views/engine_hook_rule_status.py:54 +#: iast/views/engine_hook_rule_status.py:163 iast/views/vul_status.py:20 +#: iast/views/vul_status.py:97 +msgid "Incorrect parameter" +msgstr "Incorrect parameter" + +#: iast/views/engine_hook_rule_status.py:94 +#, fuzzy +#| msgid "Vulnerability status is modified to {}" +msgid "Hook Rule Status Modify" +msgstr "Vulnerability status is modified to {}" + +#: iast/views/engine_hook_rule_status.py:95 +msgid "Modify the status of the rule corresponding to the specified id." +msgstr "" + +#: iast/views/engine_hook_rule_status.py:116 +msgid "Policy type {} operation success, total of {} Policy types" +msgstr "Policy type {} operation success, total of {} Policy types" + +#: iast/views/engine_hook_rule_status.py:127 +msgid "total of {} Policy types" +msgstr "" + +#: iast/views/engine_hook_rule_status.py:132 +msgid "Policy {} succeed" +msgstr "Policy {} succeed" + +#: iast/views/engine_hook_rule_status.py:142 +msgid "Hook Rule Status Modify (Batch)" +msgstr "" + +#: iast/views/engine_hook_rule_status.py:143 +msgid "Batch modify the status of the rule corresponding to the specified id" +msgstr "" + +#: iast/views/engine_hook_rule_status.py:160 +msgid "Strategy operation success, total {}" +msgstr "Strategy operation success, total {}" + +#: iast/views/engine_hook_rule_summary.py:18 +msgid "Total number of rule types" +msgstr "" + +#: iast/views/engine_hook_rule_summary.py:19 +msgid "Total number of rules" +msgstr "" + +#: iast/views/engine_hook_rule_summary.py:21 +msgid "Total number of sink type rules" +msgstr "" + +#: iast/views/engine_hook_rule_summary.py:26 +#: iast/views/engine_hook_rule_types.py:33 iast/views/engine_hook_rules.py:35 +#, fuzzy +#| msgid "Development language" +msgid "The id of programming language" +msgstr "Development language" + +#: iast/views/engine_hook_rule_summary.py:38 +msgid "Hook Rule Summary" +msgstr "" + +#: iast/views/engine_hook_rule_summary.py:39 +msgid "Statistics on the number of hook rules" +msgstr "" + +#: iast/views/engine_hook_rule_type_add.py:26 +#: iast/views/engine_hook_rule_types.py:29 iast/views/engine_hook_rules.py:26 +msgid "" +"type of hook rule \n" +" 1 represents the propagation method, 2 represents the source method, 3 " +"represents the filter method, and 4 represents the taint method" +msgstr "" + +#: iast/views/engine_hook_rule_type_add.py:32 +msgid "The enabled state of the hook strategy: 0-disabled, 1-enabled" +msgstr "" + +#: iast/views/engine_hook_rule_type_add.py:34 +msgid "The name of hook type" +msgstr "" + +#: iast/views/engine_hook_rule_type_add.py:38 +msgid "The short name of hook type" +msgstr "" + +#: iast/views/engine_hook_rule_type_add.py:45 +#, fuzzy +#| msgid "Development language" +msgid "The id of programming language,find it in the programming language api" +msgstr "Development language" + +#: iast/views/engine_hook_rule_type_add.py:51 +#: iast/views/engine_hook_rule_type_add.py:111 +msgid "Rule type successfully saved" +msgstr "Rule type successfully saved" + +#: iast/views/engine_hook_rule_type_add.py:52 +#: iast/views/engine_hook_rule_type_add.py:99 +msgid "Incomplete data" +msgstr "Incomplete data" + +#: iast/views/engine_hook_rule_type_add.py:85 +#: iast/views/engine_hook_rule_types.py:67 iast/views/engine_hook_rules.py:75 +msgid "Parameter parsing failed, error message: {}" +msgstr "Parameter parsing failed, error message: {}" + +#: iast/views/engine_hook_rule_type_add.py:91 +msgid "Hook Rule Type Add" +msgstr "" + +#: iast/views/engine_hook_rule_type_add.py:92 +msgid "Create hook rule type based on incoming parameters" +msgstr "" + +#: iast/views/engine_hook_rule_type_disable.py:15 +#: iast/views/engine_hook_rule_type_enable.py:19 +msgid "The id of hook type" +msgstr "" + +#: iast/views/engine_hook_rule_type_disable.py:20 +#: iast/views/engine_hook_rule_type_disable.py:55 +msgid "Forbidden success" +msgstr "Forbidden success" + +#: iast/views/engine_hook_rule_type_disable.py:21 +#: iast/views/engine_hook_rule_type_disable.py:56 +#: iast/views/engine_hook_rule_type_enable.py:26 +#: iast/views/engine_hook_rule_type_enable.py:62 +#: iast/views/engine_hook_rule_types.py:82 iast/views/engine_hook_rules.py:96 +msgid "Strategy type does not exist" +msgstr "Strategy type does not exist" + +#: iast/views/engine_hook_rule_type_disable.py:38 +msgid "Hook Rule Status Disable" +msgstr "" + +#: iast/views/engine_hook_rule_type_disable.py:40 +msgid "Disable the status of the rule corresponding to the specified id." +msgstr "" + +#: iast/views/engine_hook_rule_type_enable.py:24 +#: iast/views/engine_hook_rule_type_enable.py:61 +msgid "Enable successfully" +msgstr "Enable successfully" + +#: iast/views/engine_hook_rule_type_enable.py:38 +msgid "Parameter processing failed, error message: {}" +msgstr "Parameter processing failed, error message: {}" + +#: iast/views/engine_hook_rule_type_enable.py:44 +msgid "Hook Rule Status Enable" +msgstr "" + +#: iast/views/engine_hook_rule_type_enable.py:46 +msgid "Enable the status of the rule corresponding to the specified id." +msgstr "" + +#: iast/views/engine_hook_rule_types.py:72 +msgid "Hook Types List" +msgstr "" + +#: iast/views/engine_hook_rule_types.py:73 +msgid "Get Hook Types List" +msgstr "" + +#: iast/views/engine_hook_rules.py:30 +msgid "page index" +msgstr "" + +#: iast/views/engine_hook_rules.py:32 +msgid "The id of hook_type" +msgstr "" + +#: iast/views/engine_hook_rules.py:37 +#, fuzzy +#| msgid "Temporary search" +msgid "The keyword for search" +msgstr "Temporary search" + +#: iast/views/engine_hook_rules.py:81 +msgid "Hook Rule List" +msgstr "" + +#: iast/views/engine_hook_rules.py:82 +msgid "Get the list of hook strategies" +msgstr "" + +#: iast/views/engine_hook_rules.py:119 +msgid "Rule read error, error message: {}" +msgstr "Rule read error, error message: {}" + +#: iast/views/engine_method_pool_detail.py:22 +msgid "Engine - search data according to policy" +msgstr "Engine - search data according to policy" + +#: iast/views/engine_method_pool_detail.py:48 +msgid "Not queried" +msgstr "Not queried" + +#: iast/views/engine_method_pool_detail.py:55 +msgid "Acquisition fail" +msgstr "Acquisition fail" + +#: iast/views/engine_method_pool_detail.py:68 +msgid "Temporary search" +msgstr "Temporary search" + +#: iast/views/engine_method_pool_detail.py:94 +msgid "Searching, current {} page" +msgstr "Searching, current {} page" + +#: iast/views/engine_method_pool_sca.py:25 +#: iast/views/engine_method_pool_search.py:147 +#: iast/views/engine_method_pool_time_range.py:38 +msgid "Method Pool" +msgstr "" + +#: iast/views/engine_method_pool_sca.py:26 +#, fuzzy +#| msgid "Method pool ID is empty" +msgid "Method Pool Component" +msgstr "Method pool ID is empty" + +#: iast/views/engine_method_pool_sca.py:27 +msgid "Get the component information list of the tainted call chain." +msgstr "" + +#: iast/views/engine_method_pool_sca.py:34 +msgid "method_pool_id is empty" +msgstr "method_pool_id is empty" + +#: iast/views/engine_method_pool_sca.py:39 +msgid "method_pool does not exist" +msgstr "method_pool does not exist" + +#: iast/views/engine_method_pool_sca.py:45 +msgid "method_pool has no permission" +msgstr "method_pool has no permission" + +#: iast/views/engine_method_pool_search.py:31 +msgid "" +"Whether to enable highlighting, the text where the regular expression " +"matches will be highlighted" +msgstr "" + +#: iast/views/engine_method_pool_search.py:36 +msgid "" +"Exclude the method_pool entry with the following id, this field is used to " +"obtain the data of the entire project in batches." +msgstr "" + +#: iast/views/engine_method_pool_search.py:41 +msgid "time format such as 1,1628190947242" +msgstr "" + +#: iast/views/engine_method_pool_search.py:45 +msgid "" +"Time range, the default is the current time to the previous seven days, " +"separated by',', format such as 1,1628190947242" +msgstr "" + +#: iast/views/engine_method_pool_search.py:48 +msgid "The url of the method pool, search using regular syntax" +msgstr "" + +#: iast/views/engine_method_pool_search.py:51 +msgid "The response header of the method pood, search using regular syntax" +msgstr "" + +#: iast/views/engine_method_pool_search.py:54 +msgid "The response body of the calling chain, search using regular syntax" +msgstr "" + +#: iast/views/engine_method_pool_search.py:57 +msgid "The request header of the calling chain, search using regular syntax" +msgstr "" + +#: iast/views/engine_method_pool_search.py:61 +msgid "The request data of the calling chain, search using regular syntax" +msgstr "" + +#: iast/views/engine_method_pool_search.py:64 +msgid "The sinkvalues of the calling chain, search using regular syntax" +msgstr "" + +#: iast/views/engine_method_pool_search.py:67 +msgid "The signature of the calling chain, search using regular syntax" +msgstr "" + +#: iast/views/engine_method_pool_search.py:70 +msgid "" +"The filter field will return the method call chain with the update time " +"after this time, which can be combined with the exclude_ids field to handle " +"paging" +msgstr "" + +#: iast/views/engine_method_pool_search.py:74 +msgid "the search_mode , 1-regex match ,2-regex not match " +msgstr "" + +#: iast/views/engine_method_pool_search.py:148 +#, fuzzy +#| msgid "Method pool ID is empty" +msgid "Method Pool Search" +msgstr "Method pool ID is empty" + +#: iast/views/engine_method_pool_search.py:150 +msgid "" +"Search for the method pool information according to the following " +"conditions, the default is regular expression input, regular specifications " +"refer to REGEX POSIX 1003.2" +msgstr "" + +#: iast/views/engine_method_pool_search.py:228 +msgid "The regular expression format is wrong, please use REGEX POSIX 1003.2" +msgstr "The regular expression format is wrong, please use REGEX POSIX 1003.2" + +#: iast/views/engine_method_pool_time_range.py:34 +msgid "the eariest time of method_pool" +msgstr "" + +#: iast/views/engine_method_pool_time_range.py:39 +#, fuzzy +#| msgid "Method pool ID is empty" +msgid "Method Pool Time Range" +msgstr "Method pool ID is empty" + +#: iast/views/engine_method_pool_time_range.py:40 +#, fuzzy +#| msgid "method_pool does not exist" +msgid "get method_pool eariest time" +msgstr "method_pool does not exist" + +#: iast/views/filereplace.py:48 +msgid "" +"this file is disallowed to modifyupload failed,this file is disallowed to " +"modify." +msgstr "" + +#: iast/views/filereplace.py:60 +#, fuzzy +#| msgid "Program error" +msgid "upload error" +msgstr "Program error" + +#: iast/views/filereplace.py:65 +#, fuzzy +#| msgid "Upload successful" +msgid "upload sussess" +msgstr "Upload successful" + +#: iast/views/filereplace.py:74 +msgid "upload error, fail back to default" +msgstr "" + +#: iast/views/health.py:32 iast/views/openapi.py:30 iast/views/openapi.py:48 +#: iast/views/oss_health.py:23 +msgid "Get OpenAPI configuration failed" +msgstr "Get OpenAPI configuration failed" + +#: iast/views/health.py:34 +msgid "OpenAPI service is down, Please check it." +msgstr "" + +#: iast/views/log_clear.py:14 +msgid "Log clear" +msgstr "Log clear" + +#: iast/views/log_delete.py:14 +msgid "Log delete" +msgstr "Log delete" + +#: iast/views/log_delete.py:34 +msgid "The data to be deleted should not be empty" +msgstr "The data to be deleted should not be empty" + +#: iast/views/log_export.py:58 +msgid "Export failed, error message: Log id should not be empty" +msgstr "Export failed, error message: Log id should not be empty" + +#: iast/views/logs.py:17 +msgid "Log list" +msgstr "Log list" + +#: iast/views/logs.py:84 iast/views/vul_recheck.py:47 +#: iast/views/vul_recheck.py:189 iast/views/vul_recheck_v2.py:37 +#: iast/views/vul_recheck_v2.py:178 +msgid "No permission to access" +msgstr "No permission to access" + +#: iast/views/messages_del.py:28 +#, fuzzy +#| msgid "Number of Agent" +msgid "The id of Message" +msgstr "Number of Agent" + +#: iast/views/messages_del.py:32 +msgid "delete all messages when all is True" +msgstr "" + +#: iast/views/messages_del.py:38 +#, fuzzy +#| msgid "Agent list" +msgid "Messages Delete" +msgstr "Agent list" + +#: iast/views/messages_del.py:39 +msgid "Used by the user to delete the corresponding message" +msgstr "" + +#: iast/views/messages_del.py:40 iast/views/messages_list.py:55 +#: iast/views/messages_new.py:38 iast/views/messages_send.py:41 +msgid "Messages" +msgstr "" + +#: iast/views/messages_list.py:38 +msgid "total_number" +msgstr "" + +#: iast/views/messages_list.py:39 +#, fuzzy +#| msgid "Number of Agent" +msgid "the number of pages" +msgstr "Number of Agent" + +#: iast/views/messages_list.py:52 +msgid "Get Messages List" +msgstr "" + +#: iast/views/messages_list.py:54 iast/views/messages_send.py:40 +msgid "Used to get the message list corresponding to the user" +msgstr "" + +#: iast/views/messages_new.py:26 +msgid "total number of new messages" +msgstr "" + +#: iast/views/messages_new.py:35 +#, fuzzy +#| msgid "Agent list" +msgid "Messages Count" +msgstr "Agent list" + +#: iast/views/messages_new.py:37 +msgid "Used to get the number of messages corresponding to the user" +msgstr "" + +#: iast/views/messages_send.py:38 +#, fuzzy +#| msgid "Number of Agent" +msgid "Send Message" +msgstr "Number of Agent" + +#: iast/views/method_graph.py:35 +msgid "Method pool ID is empty" +msgstr "Method pool ID is empty" + +#: iast/views/method_graph.py:58 +msgid "Stain call map type does not exist" +msgstr "Stain call map type does not exist" + +#: iast/views/method_graph.py:61 +msgid "Data does not exist or no permission to access" +msgstr "Data does not exist or no permission to access" + +#: iast/views/method_graph.py:67 +msgid "Page and PageSize can only be numeric" +msgstr "Page and PageSize can only be numeric" + +#: iast/views/openapi.py:12 iast/views/openapi.py:73 iast/views/openapi.py:77 +#: iast/views/project_add.py:50 iast/views/project_report_sync_add.py:65 +#: iast/views/project_version_add.py:19 iast/views/project_version_add.py:45 +msgid "Created success" +msgstr "Created success" + +#: iast/views/openapi.py:13 iast/views/openapi.py:63 iast/views/profile.py:34 +#: iast/views/profile.py:92 +msgid "Current users have no permission to modify" +msgstr "Current users have no permission to modify" + +#: iast/views/openapi.py:36 iast/views/openapi.py:53 iast/views/profile.py:18 +#: iast/views/profile.py:30 iast/views/profile.py:68 iast/views/profile.py:88 +msgid "Profile" +msgstr "" + +#: iast/views/openapi.py:37 +msgid "Profile DongTai-OpenApi Retrieve" +msgstr "" + +#: iast/views/openapi.py:38 +msgid "Get the uri of DongTai-OpenApi" +msgstr "" + +#: iast/views/openapi.py:54 +msgid "Profile DongTai-OpenApi Modify" +msgstr "" + +#: iast/views/openapi.py:56 +msgid "" +"To set the url address of DongTai-OpenApi, administrator rights are required" +msgstr "" + +#: iast/views/oss_health.py:25 +#, fuzzy +#| msgid "Get OpenAPI configuration failed" +msgid "OpenAPI configuration error" +msgstr "Get OpenAPI configuration failed" + +#: iast/views/profile.py:13 iast/views/profile.py:59 iast/views/profile.py:79 +msgid "profile value" +msgstr "" + +#: iast/views/profile.py:16 +msgid "Get Profile" +msgstr "" + +#: iast/views/profile.py:17 +msgid "Get Profile with key" +msgstr "" + +#: iast/views/profile.py:24 +msgid "Failed to get {} configuration" +msgstr "Failed to get {} configuration" + +#: iast/views/profile.py:27 iast/views/profile.py:84 +msgid "Profile modify" +msgstr "" + +#: iast/views/profile.py:29 iast/views/profile.py:86 +msgid "Modifiy Profile with key" +msgstr "" + +#: iast/views/profile.py:49 +msgid "Update {} failed" +msgstr "Update {} failed" + +#: iast/views/profile.py:53 iast/views/profile.py:58 iast/views/profile.py:80 +msgid "profile key" +msgstr "" + +#: iast/views/profile.py:57 +msgid "profile id" +msgstr "" + +#: iast/views/profile.py:64 +msgid "GetProfileBatch" +msgstr "" + +#: iast/views/profile.py:66 +msgid "Get Profile with key batch" +msgstr "" + +#: iast/views/profile.py:74 +#, fuzzy +#| msgid "Failed to get {} configuration" +msgid "Failed to get configuration" +msgstr "Failed to get {} configuration" + +#: iast/views/profile.py:105 +#, fuzzy +#| msgid "Get OpenAPI configuration failed" +msgid "Update configuration failed" +msgstr "Get OpenAPI configuration failed" + +#: iast/views/program_language.py:19 +#, fuzzy +#| msgid "Development language" +msgid "The id of program language" +msgstr "Development language" + +#: iast/views/program_language.py:20 +#, fuzzy +#| msgid "Number of Agent" +msgid "The name of program language" +msgstr "Number of Agent" + +#: iast/views/program_language.py:33 +#, fuzzy +#| msgid "Development language" +msgid "Program Language" +msgstr "Development language" + +#: iast/views/program_language.py:34 +#, fuzzy +#| msgid "Development language" +msgid "Program Language List" +msgstr "Development language" + +#: iast/views/program_language.py:35 +#, fuzzy +#| msgid "Development language" +msgid "Get a list of program language." +msgstr "Development language" + +#: iast/views/project_add.py:30 iast/views/project_detail.py:21 +#: iast/views/project_search.py:19 iast/views/project_summary.py:53 +#: iast/views/vul_details.py:53 +#, fuzzy +#| msgid "Number of Agent" +msgid "The name of project" +msgstr "Number of Agent" + +#: iast/views/project_add.py:32 iast/views/project_detail.py:23 +msgid "The id corresponding to the agent, use, for segmentation." +msgstr "" + +#: iast/views/project_add.py:34 iast/views/project_detail.py:27 +msgid "The id corresponding to the scanning strategy." +msgstr "" + +#: iast/views/project_add.py:51 iast/views/project_add.py:162 +msgid "Agent has been bound by other application" +msgstr "Agent has been bound by other application" + +#: iast/views/project_add.py:52 iast/views/project_add.py:126 +msgid "Failed to create, the application name already exists" +msgstr "Failed to create, the application name already exists" + +#: iast/views/project_add.py:58 +msgid "New application" +msgstr "New application" + +#: iast/views/project_add.py:63 +msgid "Projects Add" +msgstr "" + +#: iast/views/project_add.py:65 +msgid "" +"Create a new project according to the given conditions;\n" +" when specifying the project id, update the item corresponding to " +"the id according to the given condition." +msgstr "" + +#: iast/views/project_add.py:97 iast/views/project_add.py:99 +msgid "base_url validate failed" +msgstr "" + +#: iast/views/project_add.py:105 +msgid "Agent parse error" +msgstr "" + +#: iast/views/project_add.py:110 +msgid "Required scan strategy and name" +msgstr "" + +#: iast/views/project_add.py:151 +#, fuzzy +#| msgid "Verification code error" +msgid "Version Update Error" +msgstr "Verification code error" + +#: iast/views/project_delete.py:21 iast/views/project_delete.py:48 +msgid "Application has been deleted successfully" +msgstr "Application has been deleted successfully" + +#: iast/views/project_delete.py:22 iast/views/project_delete.py:51 +#, fuzzy +#| msgid "Failed to delete User {}" +msgid "Failed to delete the project." +msgstr "Failed to delete User {}" + +#: iast/views/project_delete.py:28 +msgid "Delete application" +msgstr "Delete application" + +#: iast/views/project_delete.py:33 +#, fuzzy +#| msgid "Number of Agent" +msgid "Projects Delete" +msgstr "Number of Agent" + +#: iast/views/project_detail.py:25 iast/views/project_summary.py:55 +#, fuzzy +#| msgid "Number of Agent" +msgid "The mode of project" +msgstr "Number of Agent" + +#: iast/views/project_detail.py:29 iast/views/project_summary.py:59 +msgid "Version information about the project" +msgstr "" + +#: iast/views/project_detail.py:44 +msgid "View item details" +msgstr "View item details" + +#: iast/views/project_detail.py:48 +#, fuzzy +#| msgid "Vulnerability type name" +msgid "Projects Detail" +msgstr "Vulnerability type name" + +#: iast/views/project_detail.py:50 +msgid "" +"Get project information by project id, including the current version " +"information of the project." +msgstr "" + +#: iast/views/project_engines.py:18 +#, fuzzy +#| msgid "Number of Agent" +msgid "The id of the agent" +msgstr "Number of Agent" + +#: iast/views/project_engines.py:21 +#, fuzzy +#| msgid "Number of Agent" +msgid "The short name of the agent" +msgstr "Number of Agent" + +#: iast/views/project_engines.py:34 +msgid "View engine list" +msgstr "View engine list" + +#: iast/views/project_engines.py:38 +#, fuzzy +#| msgid "Number of Agent" +msgid "Projects Agents" +msgstr "Number of Agent" + +#: iast/views/project_engines.py:39 +msgid "Get the agent list corresponding to the project id." +msgstr "" + +#: iast/views/project_report_delete.py:15 +#: iast/views/project_report_download.py:18 +#, fuzzy +#| msgid "Number of Agent" +msgid "The id of the project report" +msgstr "Number of Agent" + +#: iast/views/project_report_delete.py:23 +#, fuzzy +#| msgid "Delete vulnerability" +msgid "Delete Vulnerability Report" +msgstr "Delete vulnerability" + +#: iast/views/project_report_delete.py:28 +#: iast/views/project_report_export.py:64 +msgid "Projects Report Export" +msgstr "" + +#: iast/views/project_report_delete.py:30 +msgid "" +"According to the conditions, delete the report of the specified project or " +"the project of the specified vulnerability." +msgstr "" + +#: iast/views/project_report_download.py:23 +#, fuzzy +#| msgid "Vulnerability Report Generate - Word" +msgid "Vulnerability Report Download" +msgstr "Vulnerability Report Generate - Word" + +#: iast/views/project_report_download.py:28 +#, fuzzy +#| msgid "Vulnerability type name" +msgid "Projects Report Download" +msgstr "Vulnerability type name" + +#: iast/views/project_report_download.py:30 +#: iast/views/project_report_export.py:66 +msgid "" +"According to the conditions, export the report of the specified project or " +"the project of the specified vulnerability." +msgstr "" + +#: iast/views/project_report_download.py:52 +msgid "Record is not ready" +msgstr "" + +#: iast/views/project_report_export.py:39 +msgid "Vulnerability Report Generate - Word" +msgstr "Vulnerability Report Generate - Word" + +#: iast/views/project_report_export.py:93 +msgid "Vulnerability Report - {}. {}" +msgstr "Vulnerability Report - {}. {}" + +#: iast/views/project_report_list.py:17 +#, fuzzy +#| msgid "Number of Agent" +msgid "Project id" +msgstr "Number of Agent" + +#: iast/views/project_report_list.py:28 +#, fuzzy +#| msgid "Vulnerability URL" +msgid "Vulnerability Report List" +msgstr "Vulnerability URL" + +#: iast/views/project_report_list.py:33 +#, fuzzy +#| msgid "Vulnerability type name" +msgid "Projects Report List" +msgstr "Vulnerability type name" + +#: iast/views/project_report_list.py:35 +msgid "" +"According to the conditions, list the report of the specified project or the " +"project of the specified vulnerability." +msgstr "" + +#: iast/views/project_report_sync_add.py:18 +#, fuzzy +#| msgid "Number of Agent" +msgid "The vulnerability id of the project" +msgstr "Number of Agent" + +#: iast/views/project_report_sync_add.py:23 +#, fuzzy +#| msgid "Number of Vulnerability" +msgid "The type of the vulnerability report" +msgstr "Number of Vulnerability" + +#: iast/views/project_report_sync_add.py:28 +#: iast/views/project_report_sync_add.py:33 +#, fuzzy +#| msgid "Vulnerability Report Generate - Word" +msgid "Vulnerability Report Async Export" +msgstr "Vulnerability Report Generate - Word" + +#: iast/views/project_report_sync_add.py:35 +msgid "" +"According to the conditions, export the report of the specified project or " +"the project of the specified vulnerability async." +msgstr "" + +#: iast/views/project_report_sync_add.py:57 +#, fuzzy +#| msgid "User does not exist" +msgid "Project not exist" +msgstr "User does not exist" + +#: iast/views/project_report_sync_add.py:59 +msgid "Report type error" +msgstr "" + +#: iast/views/project_search.py:16 +msgid "Project name, support fuzzy search." +msgstr "" + +#: iast/views/project_search.py:35 iast/views/project_summary.py:95 +#, fuzzy +#| msgid "Number of Agent" +msgid "Projects Search" +msgstr "Number of Agent" + +#: iast/views/project_search.py:37 iast/views/project_summary.py:97 +msgid "" +"Get the id and name of the item according to the search keyword matching the " +"item name, in descending order of time." +msgstr "" + +#: iast/views/project_summary.py:30 +#, fuzzy +#| msgid "Number of Vulnerability" +msgid "Name of vulnerability" +msgstr "Number of Vulnerability" + +#: iast/views/project_summary.py:32 +#, fuzzy +#| msgid "Modify the vulnerability status" +msgid "Count of thi vulnerablity type" +msgstr "Modify the vulnerability status" + +#: iast/views/project_summary.py:34 iast/views/sca_sidebar_index.py:27 +#: iast/views/vul_sidebar_index.py:59 iast/views/vul_summary.py:58 +#: iast/views/vul_summary_project.py:53 iast/views/vul_summary_type.py:52 +#: iast/views/vuls.py:93 +#, fuzzy +#| msgid "Delete vulnerability" +msgid "Level of vulnerability" +msgstr "Delete vulnerability" + +#: iast/views/project_summary.py:38 +msgid "Timestamp, format %M-%d" +msgstr "" + +#: iast/views/project_summary.py:40 +msgid "The number of vulnerabilities corresponding to the time" +msgstr "" + +#: iast/views/project_summary.py:45 iast/views/strategys_type.py:28 +#, fuzzy +#| msgid "Delete vulnerability" +msgid "Level name of vulnerability" +msgstr "Delete vulnerability" + +#: iast/views/project_summary.py:47 iast/views/strategys_type.py:26 +#, fuzzy +#| msgid "Delete vulnerability" +msgid "Level id of vulnerability" +msgstr "Delete vulnerability" + +#: iast/views/project_summary.py:57 +msgid "The latest update time of the project" +msgstr "" + +#: iast/views/project_summary.py:62 +msgid "Statistics on the number of types of vulnerabilities" +msgstr "" + +#: iast/views/project_summary.py:69 +msgid "Statistics on the number of danger levels of vulnerabilities" +msgstr "" + +#: iast/views/project_summary.py:78 +msgid "Item details - Summary" +msgstr "Item details - Summary" + +#: iast/views/project_version_add.py:25 +msgid "New application version information" +msgstr "New application version information" + +#: iast/views/project_version_add.py:30 +msgid "Projects Version Add" +msgstr "" + +#: iast/views/project_version_add.py:32 +msgid "" +"Add project version information according to the given conditions;\n" +" if the version id is specified, the corresponding version " +"information is updated according to the given conditions." +msgstr "" + +#: iast/views/project_version_current.py:27 +#: iast/views/project_version_current.py:72 +msgid "Version setting failed" +msgstr "Version setting failed" + +#: iast/views/project_version_current.py:28 +#: iast/views/project_version_current.py:66 +msgid "Version setting success" +msgstr "Version setting success" + +#: iast/views/project_version_current.py:34 +msgid "Set to the current application version" +msgstr "Set to the current application version" + +#: iast/views/project_version_current.py:39 +msgid "Projects Version Current" +msgstr "" + +#: iast/views/project_version_current.py:41 +msgid "" +"Specify the selected version as the current version of the project according " +"to the given conditions." +msgstr "" + +#: iast/views/project_version_delete.py:31 +msgid "Delete application version information" +msgstr "Delete application version information" + +#: iast/views/project_version_delete.py:36 +msgid "Projects Version Delete" +msgstr "" + +#: iast/views/project_version_list.py:36 +msgid "View application version list" +msgstr "View application version list" + +#: iast/views/project_version_list.py:40 +msgid "Projects Version List" +msgstr "" + +#: iast/views/project_version_list.py:41 +msgid "Get the version information list of the item corresponding to the id" +msgstr "" + +#: iast/views/project_version_list.py:57 +msgid "Search successful" +msgstr "Search successful" + +#: iast/views/project_version_update.py:18 +#: iast/views/project_version_update.py:42 iast/views/version_update.py:49 +msgid "Update completed" +msgstr "Update completed" + +#: iast/views/project_version_update.py:24 +msgid "Update application version information" +msgstr "Update application version information" + +#: iast/views/project_version_update.py:29 +msgid "Projects Version Update" +msgstr "" + +#: iast/views/project_version_update.py:31 +msgid "Update the version information of the corresponding version id." +msgstr "" + +#: iast/views/projects.py:26 iast/views/scan_strategys.py:48 +#: iast/views/sensitive_info_rule.py:113 iast/views/strategys.py:73 +msgid "The name of the item to be searched, supports fuzzy search." +msgstr "" + +#: iast/views/projects.py:34 +msgid "View item list" +msgstr "View item list" + +#: iast/views/projects.py:39 +msgid "Projects List" +msgstr "" + +#: iast/views/sca_details.py:57 iast/views/sca_summary.py:99 +#: iast/views/scas.py:88 iast/views/vul_details.py:280 +#: iast/views/vul_status.py:51 iast/views/vul_summary.py:103 +#: iast/views/vul_summary_project.py:98 iast/views/vul_summary_type.py:97 +#: iast/views/vulnerability_status.py:41 iast/views/vuls.py:147 +msgid "Get data sample" +msgstr "" + +#: iast/views/sca_details.py:59 iast/views/sca_summary.py:101 +#: iast/views/scas.py:90 iast/views/vul_details.py:282 +#: iast/views/vul_status.py:53 iast/views/vul_summary.py:105 +#: iast/views/vul_summary_project.py:100 iast/views/vul_summary_type.py:99 +#: iast/views/vulnerability_status.py:43 iast/views/vuls.py:149 +msgid "" +"The aggregation results are programming language, risk level, vulnerability " +"type, project" +msgstr "" + +#: iast/views/sca_details.py:85 +#, fuzzy +#| msgid "Vulnerability type name" +msgid "Component Detail" +msgstr "Vulnerability type name" + +#: iast/views/sca_details.py:87 +msgid "Get the details of the corresponding component by specifying the id." +msgstr "" + +#: iast/views/sca_details.py:100 +msgid "Components do not exist or no permission to access" +msgstr "Components do not exist or no permission to access" + +#: iast/views/sca_details.py:137 +msgid "Current version stopped for maintenance or it is not a secure version" +msgstr "Current version stopped for maintenance or it is not a secure version" + +#: iast/views/sca_details.py:152 +msgid "Component information query failed" +msgstr "Component information query failed" + +#: iast/views/sca_sidebar_index.py:40 iast/views/sca_summary.py:91 +#: iast/views/scas.py:80 iast/views/vul_list_for_plugin.py:54 +#: iast/views/vul_sidebar_index.py:70 iast/views/vul_summary.py:96 +#: iast/views/vul_summary_project.py:91 iast/views/vul_summary_type.py:90 +#: iast/views/vuls.py:140 +msgid "Sorted index" +msgstr "" + +#: iast/views/sca_sidebar_index.py:46 +#, fuzzy +#| msgid "Vulnerability type name" +msgid "Component List" +msgstr "Vulnerability type name" + +#: iast/views/sca_sidebar_index.py:48 +msgid "" +"Use the specified project information to obtain the corresponding component." +msgstr "" + +#: iast/views/sca_summary.py:31 +msgid "Three-party components overview" +msgstr "Three-party components overview" + +#: iast/views/sca_summary.py:58 iast/views/scas.py:47 +#: iast/views/vul_summary.py:50 iast/views/vul_summary_project.py:45 +#: iast/views/vul_summary_type.py:44 +#, fuzzy +#| msgid "Number of Agent" +msgid "Name of Project" +msgstr "Number of Agent" + +#: iast/views/sca_summary.py:63 iast/views/scas.py:57 +#: iast/views/vul_summary.py:63 iast/views/vul_summary_project.py:58 +#: iast/views/vul_summary_type.py:57 iast/views/vuls.py:32 +#: iast/views/vuls.py:107 +msgid "Id of Project" +msgstr "" + +#: iast/views/sca_summary.py:68 +#, fuzzy +#| msgid "Delete vulnerability" +msgid "The id level of vulnerability" +msgstr "Delete vulnerability" + +#: iast/views/sca_summary.py:76 iast/views/scas.py:65 +#: iast/views/vul_summary.py:71 iast/views/vul_summary_project.py:66 +#: iast/views/vul_summary_type.py:65 iast/views/vuls.py:115 +msgid "The default is the current version id of the project." +msgstr "" + +#: iast/views/sca_summary.py:82 iast/views/scas.py:71 +msgid "Fuzzy keyword search field for package_name." +msgstr "" + +#: iast/views/sca_summary.py:147 +#, fuzzy +#| msgid "Vulnerability type name" +msgid "Component Summary (with project)" +msgstr "Vulnerability type name" + +#: iast/views/sca_summary.py:149 +msgid "" +"Use the specified project information to get the corresponding component " +"summary" +msgstr "" + +#: iast/views/scan_strategys.py:75 iast/views/scan_strategys.py:111 +#: iast/views/scan_strategys.py:140 iast/views/scan_strategys.py:167 +#: iast/views/scan_strategys.py:189 iast/views/scan_strategys.py:211 +#: iast/views/scan_strategys.py:225 iast/views/scan_strategys.py:242 +#, fuzzy +#| msgid "No strategy" +msgid "ScanStrategy" +msgstr "No strategy" + +#: iast/views/scan_strategys.py:76 +msgid "ScanStrategy Relation Projects" +msgstr "" + +#: iast/views/scan_strategys.py:78 +msgid "Get scan strategy relation projects" +msgstr "" + +#: iast/views/scan_strategys.py:112 +#, fuzzy +#| msgid "Strategy does not exist" +msgid "ScanStrategy List" +msgstr "Strategy does not exist" + +#: iast/views/scan_strategys.py:141 +#, fuzzy +#| msgid "Strategy does not exist" +msgid "ScanStrategy Create" +msgstr "Strategy does not exist" + +#: iast/views/scan_strategys.py:143 +msgid "Create ScanStrategy" +msgstr "" + +#: iast/views/scan_strategys.py:159 iast/views/sensitive_info_rule.py:190 +#, fuzzy +#| msgid "Created success" +msgid "create success" +msgstr "Created success" + +#: iast/views/scan_strategys.py:168 +#, fuzzy +#| msgid "Strategy does not exist" +msgid "ScanStrategy Update" +msgstr "Strategy does not exist" + +#: iast/views/scan_strategys.py:186 iast/views/scan_strategys.py:234 +#: iast/views/scan_strategys.py:249 iast/views/sensitive_info_rule.py:215 +#: iast/views/sensitive_info_rule.py:339 iast/views/sensitive_info_rule.py:355 +#, fuzzy +#| msgid "Created success" +msgid "update success" +msgstr "Created success" + +#: iast/views/scan_strategys.py:190 +#, fuzzy +#| msgid "Strategy does not exist" +msgid "ScanStrategy delete" +msgstr "Strategy does not exist" + +#: iast/views/scan_strategys.py:205 iast/views/sensitive_info_rule.py:228 +#, fuzzy +#| msgid "Handle success" +msgid "delete success" +msgstr "Handle success" + +#: iast/views/scan_strategys.py:212 +#, fuzzy +#| msgid "Strategy does not exist" +msgid "ScanStrategy get" +msgstr "Strategy does not exist" + +#: iast/views/scan_strategys.py:213 +msgid "Get the item with pk" +msgstr "" + +#: iast/views/scan_strategys.py:226 +msgid "ScanStrategy batch status" +msgstr "" + +#: iast/views/scan_strategys.py:227 iast/views/sensitive_info_rule.py:333 +msgid "batch update status." +msgstr "" + +#: iast/views/scan_strategys.py:243 +#, fuzzy +#| msgid "Strategy does not exist" +msgid "ScanStrategy all status" +msgstr "Strategy does not exist" + +#: iast/views/scan_strategys.py:244 iast/views/sensitive_info_rule.py:349 +msgid "all update status." +msgstr "" + +#: iast/views/scas.py:52 +#, fuzzy +#| msgid "Delete vulnerability" +msgid "The id of level of vulnerability" +msgstr "Delete vulnerability" + +#: iast/views/scas.py:125 +#, fuzzy +#| msgid "Vulnerability type name" +msgid "Component List (with project)" +msgstr "Vulnerability type name" + +#: iast/views/scas.py:127 +msgid "" +"use the specified project information to obtain the corresponding component." +msgstr "" + +#: iast/views/sensitive_info_rule.py:116 +msgid "regex pattern" +msgstr "" + +#: iast/views/sensitive_info_rule.py:117 +msgid "the data for test regex" +msgstr "" + +#: iast/views/sensitive_info_rule.py:131 iast/views/sensitive_info_rule.py:161 +#: iast/views/sensitive_info_rule.py:195 iast/views/sensitive_info_rule.py:218 +#: iast/views/sensitive_info_rule.py:231 iast/views/sensitive_info_rule.py:249 +#: iast/views/sensitive_info_rule.py:263 iast/views/sensitive_info_rule.py:331 +#: iast/views/sensitive_info_rule.py:347 +msgid "SensitiveInfoRule" +msgstr "" + +#: iast/views/sensitive_info_rule.py:132 +msgid "SensitiveInfoRule List" +msgstr "" + +#: iast/views/sensitive_info_rule.py:162 +msgid "SensitiveInfoRule Create" +msgstr "" + +#: iast/views/sensitive_info_rule.py:196 +msgid "SensitiveInfoRule Update" +msgstr "" + +#: iast/views/sensitive_info_rule.py:219 +msgid "SensitiveInfoRule delete" +msgstr "" + +#: iast/views/sensitive_info_rule.py:232 +msgid "SensitiveInfoRule get" +msgstr "" + +#: iast/views/sensitive_info_rule.py:250 +msgid "SensitiveInfoRule Pattern Type List" +msgstr "" + +#: iast/views/sensitive_info_rule.py:252 +#, fuzzy +#| msgid "Number of Agent" +msgid "Get the item corresponding to the user." +msgstr "Number of Agent" + +#: iast/views/sensitive_info_rule.py:264 +msgid "SensitiveInfoRule validated_data" +msgstr "" + +#: iast/views/sensitive_info_rule.py:332 +msgid "SensitiveInfoRule batch status" +msgstr "" + +#: iast/views/sensitive_info_rule.py:348 +msgid "SensitiveInfoRule all status" +msgstr "" + +#: iast/views/strategy_delete.py:30 iast/views/strategy_disable.py:24 +#: iast/views/strategy_enable.py:26 iast/views/strategy_modified.py:27 +#: iast/views/strategys.py:80 iast/views/strategys.py:104 +#: iast/views/strategys.py:177 iast/views/strategys_add.py:34 +#: iast/views/strategys_list.py:27 iast/views/strategys_type.py:40 +#, fuzzy +#| msgid "No strategy" +msgid "Strategy" +msgstr "No strategy" + +#: iast/views/strategy_delete.py:31 +#, fuzzy +#| msgid "Strategy does not exist" +msgid "Strategy Delete" +msgstr "Strategy does not exist" + +#: iast/views/strategy_delete.py:33 +msgid "Delete the corresponding strategy according to id" +msgstr "" + +#: iast/views/strategy_delete.py:41 +msgid "This strategy does not exist" +msgstr "This strategy does not exist" + +#: iast/views/strategy_disable.py:17 iast/views/strategy_disable.py:44 +msgid "Strategy is disabled, total {} hook rules" +msgstr "Strategy is disabled, total {} hook rules" + +#: iast/views/strategy_disable.py:25 +msgid "Strategy Disable" +msgstr "" + +#: iast/views/strategy_disable.py:27 +msgid "Disable the corresponding strategy according to id" +msgstr "" + +#: iast/views/strategy_enable.py:27 +msgid "Strategy Enbale" +msgstr "" + +#: iast/views/strategy_enable.py:29 +msgid "Enable the corresponding strategy according to id" +msgstr "" + +#: iast/views/strategy_modified.py:13 iast/views/strategys.py:29 +#: iast/views/strategys.py:43 iast/views/strategys_type.py:19 +msgid "The name of the vulnerability type targeted by the strategy" +msgstr "" + +#: iast/views/strategy_modified.py:14 iast/views/strategys.py:30 +#: iast/views/strategys.py:44 +#, fuzzy +#| msgid "Number of Vulnerability" +msgid "Types of vulnerabilities targeted by the strategy" +msgstr "Number of Vulnerability" + +#: iast/views/strategy_modified.py:15 iast/views/strategys.py:31 +#: iast/views/strategys.py:45 +msgid "This field indicates whether the vulnerability is enabled, 1 or 0" +msgstr "" + +#: iast/views/strategy_modified.py:16 iast/views/strategys.py:32 +#: iast/views/strategys.py:46 +msgid "Description of the corresponding vulnerabilities of the strategy" +msgstr "" + +#: iast/views/strategy_modified.py:18 iast/views/strategys.py:34 +#: iast/views/strategys.py:49 iast/views/strategys_type.py:21 +msgid "The strategy corresponds to the level of vulnerability" +msgstr "" + +#: iast/views/strategy_modified.py:20 iast/views/strategys.py:38 +#: iast/views/strategys.py:53 +msgid "Suggestions for repairing vulnerabilities corresponding to the strategy" +msgstr "" + +#: iast/views/strategy_modified.py:28 +#, fuzzy +#| msgid "No strategy" +msgid "Strategy modified" +msgstr "No strategy" + +#: iast/views/strategy_modified.py:30 iast/views/strategys.py:107 +#: iast/views/strategys_list.py:30 +msgid "Get a list of strategies." +msgstr "" + +#: iast/views/strategys.py:36 +#, fuzzy +#| msgid "Strategy does not exist" +msgid "Strategy update time" +msgstr "Strategy does not exist" + +#: iast/views/strategys.py:81 +#, fuzzy +#| msgid "Strategy does not exist" +msgid "Strategy retrieve" +msgstr "Strategy does not exist" + +#: iast/views/strategys.py:83 +msgid "Get a strategiey by id." +msgstr "" + +#: iast/views/strategys.py:105 +#, fuzzy +#| msgid "Strategy does not exist" +msgid "Strategy List" +msgstr "Strategy does not exist" + +#: iast/views/strategys.py:174 +msgid "No strategy" +msgstr "No strategy" + +#: iast/views/strategys.py:178 +#, fuzzy +#| msgid "No strategy" +msgid "Strategy Add" +msgstr "No strategy" + +#: iast/views/strategys_add.py:20 +msgid "The id corresponding to the strategys, use\",\" for segmentation." +msgstr "" + +#: iast/views/strategys_add.py:21 +#, fuzzy +#| msgid "No strategy" +msgid "The name of strategy" +msgstr "No strategy" + +#: iast/views/strategys_add.py:35 +#, fuzzy +#| msgid "No strategy" +msgid "Sacn Strategy Add" +msgstr "No strategy" + +#: iast/views/strategys_list.py:16 +#, fuzzy +#| msgid "Failed to create strategy" +msgid "The name of the strategy" +msgstr "Failed to create strategy" + +#: iast/views/strategys_list.py:28 +#, fuzzy +#| msgid "Vulnerability type name" +msgid "Strategy List (with user)" +msgstr "Vulnerability type name" + +#: iast/views/strategys_type.py:41 +msgid "Strategy Type" +msgstr "" + +#: iast/views/strategys_type.py:43 +msgid "Get a list of strategy types." +msgstr "" + +#: iast/views/system_info.py:14 +msgid "API - System Information Page" +msgstr "API - System Information Page" + +#: iast/views/user_info.py:20 +msgid "User Info" +msgstr "User Info" + +#: iast/views/user_login.py:20 +msgid "User login" +msgstr "User login" + +#: iast/views/user_login.py:35 +msgid "Captcha timed out" +msgstr "Captcha timed out" + +#: iast/views/user_login.py:43 +msgid "Login successful" +msgstr "Login successful" + +#: iast/views/user_login.py:48 iast/views/user_login.py:55 +msgid "Login failed" +msgstr "Login failed" + +#: iast/views/user_login.py:50 +msgid "Verification code error" +msgstr "Verification code error" + +#: iast/views/user_login.py:52 +msgid "verification code should not be empty" +msgstr "verification code should not be empty" + +#: iast/views/user_logout.py:21 +msgid "Sign out" +msgstr "Sign out" + +#: iast/views/user_logout.py:28 +msgid "Sign out successfully" +msgstr "Sign out successfully" + +#: iast/views/user_passwrd.py:18 +msgid "Change Password" +msgstr "Change Password" + +#: iast/views/user_passwrd.py:24 +msgid "Password should not be empty" +msgstr "Password should not be empty" + +#: iast/views/user_passwrd.py:32 +msgid "Password has been changed successfully" +msgstr "Password has been changed successfully" + +#: iast/views/user_passwrd.py:34 +msgid "Incorrect old password" +msgstr "Incorrect old password" + +#: iast/views/user_passwrd.py:37 +#, fuzzy +#| msgid "Incorrect parameter" +msgid "Incorrect" +msgstr "Incorrect parameter" + +#: iast/views/user_passwrd_reset.py:19 +msgid "Reset Password" +msgstr "Reset Password" + +#: iast/views/user_passwrd_reset.py:30 +msgid "User {} password reset success" +msgstr "User {} password reset success" + +#: iast/views/user_passwrd_reset.py:33 +msgid "User does not exist" +msgstr "User does not exist" + +#: iast/views/user_passwrd_reset.py:37 iast/views/user_passwrd_reset.py:38 +msgid "UserID is empty" +msgstr "UserID is empty" + +#: iast/views/user_passwrd_reset.py:41 +msgid "UserID must be a numeric" +msgstr "UserID must be a numeric" + +#: iast/views/user_passwrd_reset.py:44 +#, fuzzy, python-brace-format +#| msgid "Password reset failed, error message: {E}" +msgid "Password reset failed, reasons: {E}" +msgstr "Password reset failed, reasons: {E}" + +#: iast/views/user_register_batch.py:44 +msgid "Account has been created successfully" +msgstr "Account has been created successfully" + +#: iast/views/user_register_batch.py:70 +msgid "User {} already exists" +msgstr "User {} already exists" + +#: iast/views/user_register_batch.py:75 +msgid "Failed to create user, error message: token is incorrect" +msgstr "Failed to create user, error message: token is incorrect" + +#: iast/views/user_register_batch.py:76 +msgid "Account registration successful" +msgstr "Account registration successful" + +#: iast/views/user_register_batch.py:91 +msgid "User account file read error" +msgstr "User account file read error" + +#: iast/views/user_token.py:19 +msgid "Get OpenAPI token" +msgstr "Get OpenAPI token" + +#: iast/views/version_update.py:20 +msgid "Updated is currently not allowed" +msgstr "Updated is currently not allowed" + +#: iast/views/vul_count_for_plugin.py:27 +#, fuzzy +#| msgid "Vulnerability type name" +msgid "Vulnerability Count (with agent name)" +msgstr "Vulnerability type name" + +#: iast/views/vul_count_for_plugin.py:29 +msgid "Get the number of vulnerabilities corresponding to the Agent." +msgstr "" + +#: iast/views/vul_count_for_plugin.py:35 iast/views/vul_list_for_plugin.py:70 +msgid "Please input agent name." +msgstr "Please input agent name." + +#: iast/views/vul_count_for_plugin.py:41 iast/views/vul_list_for_plugin.py:76 +msgid "agent_name not found" +msgstr "agent_name not found" + +#: iast/views/vul_delete.py:25 +msgid "Delete vulnerability" +msgstr "Delete vulnerability" + +#: iast/views/vul_delete.py:28 +#, fuzzy +#| msgid "Vulnerability URL" +msgid "Vulnerability Delete" +msgstr "Vulnerability URL" + +#: iast/views/vul_delete.py:31 +msgid "Delete the corresponding vulnerability by specifying the id" +msgstr "" + +#: iast/views/vul_delete.py:45 +msgid "Failed to delete, error message: Vulnerability does not exist" +msgstr "Failed to delete, error message: Vulnerability does not exist" + +#: iast/views/vul_details.py:137 +msgid "Source method" +msgstr "Source method" + +#: iast/views/vul_details.py:139 +msgid "Hazardous method" +msgstr "Hazardous method" + +#: iast/views/vul_details.py:141 +msgid "Propagation method" +msgstr "Propagation method" + +#: iast/views/vul_details.py:158 +msgid "Analysis of errovence analysis of stain call diagram: {}" +msgstr "Analysis of errovence analysis of stain call diagram: {}" + +#: iast/views/vul_details.py:168 +msgid "Error analysis of Header, error message: {}" +msgstr "Error analysis of Header, error message: {}" + +#: iast/views/vul_details.py:207 iast/views/vul_details.py:361 +msgid "[{}] Vulnerability information parsing error, error message: {}" +msgstr "[{}] Vulnerability information parsing error, error message: {}" + +#: iast/views/vul_details.py:341 +msgid "" +"Use the corresponding id of the vulnerability to query the details of the " +"vulnerability" +msgstr "" + +#: iast/views/vul_details.py:362 +msgid "Vulnerability data query error" +msgstr "Vulnerability data query error" + +#: iast/views/vul_levels.py:27 +#, fuzzy +#| msgid "View engine list" +msgid "Vul level list" +msgstr "View engine list" + +#: iast/views/vul_levels.py:28 +#, fuzzy +#| msgid "Vulnerability URL" +msgid "Vul level List" +msgstr "Vulnerability URL" + +#: iast/views/vul_levels.py:29 +#, fuzzy +#| msgid "Development language" +msgid "Get a list of vul level." +msgstr "Development language" + +#: iast/views/vul_list_for_plugin.py:40 +#, fuzzy +#| msgid "Number of Agent" +msgid "Name of agent" +msgstr "Number of Agent" + +#: iast/views/vul_list_for_plugin.py:45 iast/views/vul_sidebar_index.py:63 +#: iast/views/vul_summary.py:87 iast/views/vul_summary_project.py:82 +#: iast/views/vul_summary_type.py:81 iast/views/vuls.py:131 +msgid "The URL corresponding to the vulnerability" +msgstr "" + +#: iast/views/vul_list_for_plugin.py:61 +#, fuzzy +#| msgid "Vulnerability type name" +msgid "Vulnerability List (with agent name)" +msgstr "Vulnerability type name" + +#: iast/views/vul_list_for_plugin.py:63 +msgid "Use the agent name to get the corresponding list of vulnerabilities" +msgstr "" + +#: iast/views/vul_recheck.py:30 +#, fuzzy +#| msgid "Strategy type does not exist" +msgid "Whether the project does not exist agent" +msgstr "Strategy type does not exist" + +#: iast/views/vul_recheck.py:32 +msgid "Waiting queue length for replay" +msgstr "" + +#: iast/views/vul_recheck.py:34 +msgid "Success queue length for replay" +msgstr "" + +#: iast/views/vul_recheck.py:36 +msgid "Checking queue length for replay" +msgstr "" + +#: iast/views/vul_recheck.py:42 iast/views/vul_recheck.py:52 +#: iast/views/vul_recheck.py:164 iast/views/vul_recheck.py:242 +#: iast/views/vul_recheck_v2.py:32 iast/views/vul_recheck_v2.py:41 +#: iast/views/vul_recheck_v2.py:153 +msgid "Handle success" +msgstr "Handle success" + +#: iast/views/vul_recheck.py:43 iast/views/vul_recheck.py:224 +#: iast/views/vul_recheck_v2.py:33 iast/views/vul_recheck_v2.py:213 +msgid "Item ID should not be empty" +msgstr "Item ID should not be empty" + +#: iast/views/vul_recheck.py:44 iast/views/vul_recheck.py:243 +#: iast/views/vul_recheck_v2.py:34 iast/views/vul_recheck_v2.py:232 +msgid "Incorrect format parameter" +msgstr "Incorrect format parameter" + +#: iast/views/vul_recheck.py:45 iast/views/vul_recheck.py:192 +#: iast/views/vul_recheck.py:247 iast/views/vul_recheck_v2.py:35 +#: iast/views/vul_recheck_v2.py:181 iast/views/vul_recheck_v2.py:236 +msgid "Batch playback error" +msgstr "Batch playback error" + +#: iast/views/vul_recheck.py:46 iast/views/vul_recheck.py:186 +#: iast/views/vul_recheck_v2.py:36 iast/views/vul_recheck_v2.py:175 +msgid "" +"Current application has not been associated with probes and cannot be " +"reproduced." +msgstr "" +"Current application has not been associated with probes and cannot be " +"reproduced." + +#: iast/views/vul_recheck.py:53 iast/views/vul_recheck.py:146 +#: iast/views/vul_recheck_v2.py:42 iast/views/vul_recheck_v2.py:135 +msgid "IDS should not be empty" +msgstr "IDS should not be empty" + +#: iast/views/vul_recheck.py:54 iast/views/vul_recheck.py:150 +#: iast/views/vul_recheck_v2.py:43 iast/views/vul_recheck_v2.py:139 +msgid "IDS must be: Vulnerability ID, Vulnerability ID Format" +msgstr "IDS must be: Vulnerability ID, Vulnerability ID Format" + +#: iast/views/vul_recheck.py:55 iast/views/vul_recheck.py:168 +#: iast/views/vul_recheck_v2.py:44 iast/views/vul_recheck_v2.py:157 +#: iast/views/vul_request_replay.py:238 +msgid "Vulnerability replay error" +msgstr "Vulnerability replay error" + +#: iast/views/vul_recheck.py:121 iast/views/vul_recheck.py:201 +#: iast/views/vul_recheck_v2.py:110 iast/views/vul_recheck_v2.py:190 +msgid "" +"available options are (\"all\",\"project\").\n" +" Corresponding to all or specific project respectively." +msgstr "" + +#: iast/views/vul_recheck.py:129 iast/views/vul_recheck.py:209 +#: iast/views/vul_recheck_v2.py:118 iast/views/vul_recheck_v2.py:198 +msgid "" +"The corresponding id of the Project.\n" +" Only If the type is project, the projectId here will be used." +msgstr "" + +#: iast/views/vul_recheck.py:133 iast/views/vul_recheck.py:213 +#: iast/views/vul_recheck_v2.py:122 iast/views/vul_recheck_v2.py:202 +#, fuzzy +#| msgid "Vulnerability description" +msgid "Vulnerability verification" +msgstr "Vulnerability description" + +#: iast/views/vul_recheck.py:134 iast/views/vul_recheck.py:214 +#: iast/views/vul_recheck_v2.py:123 iast/views/vul_recheck_v2.py:203 +msgid "" +"Verify the user's corresponding vulnerabilities.\n" +" Need to specify the type" +msgstr "" + +#: iast/views/vul_recheck_v2.py:222 iast/views/vul_recheck_v2.py:231 +#, fuzzy +#| msgid "Verification code error" +msgid "Verification in progress" +msgstr "Verification code error" + +#: iast/views/vul_request_replay.py:77 +msgid "HTTP request parsing error, error message: {}" +msgstr "HTTP request parsing error, error message: {}" + +#: iast/views/vul_request_replay.py:200 +msgid "Stain pool data does not exist or no permission to access" +msgstr "Stain pool data does not exist or no permission to access" + +#: iast/views/vul_request_replay.py:214 +msgid "" +"The probe has been destroyed or suspended, please check the probe status" +msgstr "" +"The probe has been destroyed or suspended, please check the probe status" + +#: iast/views/vul_request_replay.py:220 +msgid "Replay request is illegal" +msgstr "Replay request is illegal" + +#: iast/views/vul_request_replay.py:233 +msgid "Relay request success" +msgstr "Relay request success" + +#: iast/views/vul_request_replay.py:252 +msgid "Response header analysis error, error message: {}" +msgstr "Response header analysis error, error message: {}" + +#: iast/views/vul_request_replay.py:283 iast/views/vul_request_replay.py:301 +msgid "Replay request does not exist or no permission to access" +msgstr "Replay request does not exist or no permission to access" + +#: iast/views/vul_request_replay.py:304 +msgid "Replay request processing" +msgstr "Replay request processing" + +#: iast/views/vul_request_replay.py:318 +msgid "Replay failed" +msgstr "Replay failed" + +#: iast/views/vul_sidebar_index.py:48 iast/views/vul_summary.py:44 +#: iast/views/vul_summary_project.py:39 iast/views/vul_summary_type.py:38 +#: iast/views/vuls.py:77 +#, fuzzy +#| msgid "Number of Vulnerability" +msgid "Type of vulnerability" +msgstr "Number of Vulnerability" + +#: iast/views/vul_sidebar_index.py:52 +#, fuzzy +#| msgid "Modify the vulnerability status" +msgid "ID of the vulnerability type" +msgstr "Modify the vulnerability status" + +#: iast/views/vul_sidebar_index.py:73 +#, fuzzy +#| msgid "Vulnerability URL" +msgid "Vulnerability List" +msgstr "Vulnerability URL" + +#: iast/views/vul_sidebar_index.py:75 +msgid "Get the list of vulnerabilities corresponding to the user." +msgstr "" + +#: iast/views/vul_status.py:19 iast/views/vul_status.py:91 +msgid "Vulnerability status is modified to {}" +msgstr "Vulnerability status is modified to {}" + +#: iast/views/vul_status.py:25 +msgid "Modify the vulnerability status" +msgstr "Modify the vulnerability status" + +#: iast/views/vul_status.py:31 +msgid "Update with status_id" +msgstr "" + +#: iast/views/vul_status.py:33 +#, fuzzy +#| msgid "Modify the vulnerability status" +msgid "Update vulnerability status with status id." +msgstr "Modify the vulnerability status" + +#: iast/views/vul_status.py:40 +msgid "Update with status name(Not recommended)" +msgstr "" + +#: iast/views/vul_status.py:42 +#, fuzzy +#| msgid "Vulnerability type name" +msgid "Update vulnerability status with status name." +msgstr "Vulnerability type name" + +#: iast/views/vul_status.py:61 +#, fuzzy +#| msgid "Vulnerability status is modified to {}" +msgid "Vulnerability Status Modify" +msgstr "Vulnerability status is modified to {}" + +#: iast/views/vul_status.py:62 +msgid "" +"Modify the vulnerability status of the specified id. \n" +" The status is specified by the following two parameters. \n" +" Status corresponds to the status noun and status_id corresponds to " +"the status id. \n" +" Both can be obtained from the vulnerability status list API, and " +"status_id is preferred." +msgstr "" + +#: iast/views/vul_summary.py:31 iast/views/vul_summary_project.py:27 +#: iast/views/vul_summary_type.py:26 +msgid "Applied vulnerability overview" +msgstr "Applied vulnerability overview" + +#: iast/views/vul_summary.py:77 iast/views/vul_summary_project.py:72 +#: iast/views/vul_summary_type.py:71 iast/views/vuls.py:121 +msgid "Name of status" +msgstr "" + +#: iast/views/vul_summary.py:82 iast/views/vul_summary_project.py:77 +#: iast/views/vul_summary_type.py:76 iast/views/vuls.py:126 +msgid "Id of status" +msgstr "" + +#: iast/views/vul_summary.py:152 iast/views/vul_summary_project.py:127 +#: iast/views/vul_summary_type.py:129 +#, fuzzy +#| msgid "Vulnerability URL" +msgid "Vulnerability Summary" +msgstr "Vulnerability URL" + +#: iast/views/vul_summary.py:154 iast/views/vul_summary_project.py:129 +#: iast/views/vul_summary_type.py:131 +msgid "" +"Use the following conditions to view the statistics of the number of " +"vulnerabilities in the project." +msgstr "" + +#: iast/views/vulnerability_status.py:79 +#, fuzzy +#| msgid "Vulnerability status is modified to {}" +msgid "Vulnerability Status List" +msgstr "Vulnerability status is modified to {}" + +#: iast/views/vulnerability_status.py:81 +msgid "" +"Vulnerability status list, which contains the optional status of " +"vulnerabilities. \n" +" When calling the vulnerability status modification API, please " +"obtain the vulnerability status data from this API first." +msgstr "" + +#: iast/views/vuls.py:30 iast/views/vuls.py:83 +#, fuzzy +#| msgid "Number of Agent" +msgid "name of project" +msgstr "Number of Agent" + +#: iast/views/vuls.py:101 +#, fuzzy +#| msgid "Delete vulnerability" +msgid "The id Level of vulnerability" +msgstr "Delete vulnerability" + +#: iast/views/vuls.py:188 +#, fuzzy +#| msgid "Vulnerability type name" +msgid "Vulnerability List (with project)" +msgstr "Vulnerability type name" + +#: iast/views/vuls.py:191 +msgid "Get the list of vulnerabilities corresponding to the project" +msgstr "" + +#: scaupload/views.py:73 scaupload/views.py:94 scaupload/views.py:130 +msgid "Get sca db bulk" +msgstr "" + +#: scaupload/views.py:74 scaupload/views.py:95 scaupload/views.py:131 +#: scaupload/views.py:156 scaupload/views.py:165 scaupload/views.py:181 +#: scaupload/views.py:190 scaupload/views.py:232 scaupload/views.py:238 +#: scaupload/views.py:249 +#, fuzzy +#| msgid "Agent list" +msgid "Get sca list" +msgstr "Agent list" + +#: scaupload/views.py:75 scaupload/views.py:96 scaupload/views.py:132 +#: scaupload/views.py:157 scaupload/views.py:166 scaupload/views.py:182 +#: scaupload/views.py:191 scaupload/views.py:233 scaupload/views.py:239 +#: scaupload/views.py:250 +msgid "SCA DB" +msgstr "" + +#: scaupload/views.py:155 scaupload/views.py:164 scaupload/views.py:180 +#: scaupload/views.py:189 +msgid "Get sca db" +msgstr "" + +#: scaupload/views.py:231 scaupload/views.py:237 +msgid "Get sca license list" +msgstr "" + +#: scaupload/views.py:248 +msgid "Get sca stat " +msgstr "" + +#: webapi/settings.py:383 +#, python-brace-format +msgid "" +"Here is the API documentation in webapi. The corresponding management part " +"API can be found through the relevant tag.\n" +"\n" +"There are two authentication methods. You can obtain csrf_token and " +"sessionid through the login process, or access the corresponding API through " +"the user's corresponding Token.\n" +"\n" +"The Token method is recommended here, and users can find it in the Agent " +"installation interface such as -H\n" +" 'Authorization: Token {token}', here is the token corresponding to the " +"user, the token method also requires a token like this on the request header." +msgstr "" + +#~ msgid "Default department" +#~ msgstr "Default department" + +#, fuzzy +#~| msgid "2.3 Vulnerability details" +#~ msgid "Vulnerability details query " +#~ msgstr "2.3 Vulnerability details" + +#, fuzzy +#~| msgid "Vulnerability type name" +#~ msgid "component" +#~ msgstr "Vulnerability type name" + +#, fuzzy +#~| msgid "Vulnerability description" +#~ msgid "Vulnerability Aggregation" +#~ msgstr "Vulnerability description" + +#~ msgid "Confirmed" +#~ msgstr "Confirmed" + +#~ msgid "" +#~ "Deletion failed, please forcely delete all the users belong to the " +#~ "department{}" +#~ msgstr "" +#~ "Deletion failed, please forcely delete all the users belong to the " +#~ "department{}" + +#~ msgid "Running" +#~ msgstr "Running" + +#~ msgid "Stopped" +#~ msgstr "Stopped" diff --git a/static/i18n/views/setlang.py b/static/i18n/views/setlang.py new file mode 100644 index 000000000..a0442433d --- /dev/null +++ b/static/i18n/views/setlang.py @@ -0,0 +1,61 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : setlang +# @created : Thursday Aug 05, 2021 14:57:40 CST +# +# @description : +###################################################################### + +from dongtai_common.endpoint import R, AnonymousAndUserEndPoint +from django.conf import settings +from django.http import HttpResponse, HttpResponseRedirect, JsonResponse +from dongtai_web.utils import extend_schema_with_envcheck +from dongtai_conf.settings import LANGUAGES +from django.utils.translation import gettext_lazy as _ +LANGUAGE_QUERY_PARAMETER = 'language' + +ALLOWED_LANG_CODE = list(map(lambda x: x[0], LANGUAGES)) + + +class LanguageSetting(AnonymousAndUserEndPoint): + @extend_schema_with_envcheck([{ + 'name': LANGUAGE_QUERY_PARAMETER, + 'type': str, + 'description': 'The options are (en,zh)' + }]) + def get(self, request): + lang_code = request.GET.get(LANGUAGE_QUERY_PARAMETER) + if lang_code not in ALLOWED_LANG_CODE: + return R.failure(msg=_('this language not supported now')) + response = JsonResponse({'status': 201}) + if request.user.is_authenticated: + user = request.user + user.default_language = lang_code + user.save() + response.set_cookie( + settings.LANGUAGE_COOKIE_NAME, + lang_code, + ) + return response + +#from dongtai.endpoint import R, TalentAdminEndPoint +#from configparser import ConfigParser +#from webapi.settings import BASE_DIR +# +# +#class DefaultLanguageSetting(AnonymousAndUserEndPoint): +# def post(self, request): +# config = ConfigParser() +# default_language = request.data.get('default_language', None) +# CONFIGPATH = os.path.join(BASE_DIR, 'conf/config.ini') +# config.read(CONFIGPATH) +# config.set('other', 'default_language', default_language) +# with open(CONFIGPATH, 'w') as configfile: +# config.write(configfile) +# +# def get(self, request): +# config = ConfigParser() +# CONFIGPATH = os.path.join(BASE_DIR, 'conf/config.ini') +# config.read(CONFIGPATH) +# default_language = config.get('other', 'default_language') +# return R.success(data={'default_language': default_language}) diff --git a/static/i18n/zh/LC_MESSAGES/django.po b/static/i18n/zh/LC_MESSAGES/django.po new file mode 100644 index 000000000..2e3d3c2a4 --- /dev/null +++ b/static/i18n/zh/LC_MESSAGES/django.po @@ -0,0 +1,3663 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR , YEAR. +# +msgid "" +msgstr "" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2022-03-27 21:46+0800\n" +"PO-Revision-Date: 2021-08-09 12:55+0800\n" +"Last-Translator: \n" +"Language-Team: \n" +"Language: zh\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Application-Id-Version: \n" +"X-Generator: Poedit 2.2.2\n" + +#: apiserver/report/handler/api_route_handler.py:33 +msgid "No such agent" +msgstr "没有找到对应agent" + +#: apiserver/report/handler/api_route_handler.py:70 +msgid "API navigation log record successfully" +msgstr "API覆盖率获取成功" + +#: apiserver/report/handler/api_route_handler.py:75 +msgid "API navigation log failed, why: {}" +msgstr "" + +#: apiserver/report/handler/error_log_handler.py:38 +msgid "Error log report saving success" +msgstr "导出报告成功" + +#: apiserver/report/handler/error_log_handler.py:40 +msgid "Error log report saves failed, why: {}" +msgstr "" + +#: apiserver/report/handler/hardencode_vul_handler.py:33 +msgid "class name" +msgstr "" + +#: apiserver/report/handler/hardencode_vul_handler.py:36 +msgid "field" +msgstr "" + +#: apiserver/report/handler/heartbeat_handler.py:105 +msgid "There is no probe under the project" +msgstr "没有探针在项目中" + +#: apiserver/report/handler/heartbeat_handler.py:111 +msgid "Replay request does not exist" +msgstr "部门不存在" + +#: apiserver/report/handler/heartbeat_handler.py:131 +#, fuzzy +#| msgid "modified successfully" +msgid "Reproduction request issued successfully" +msgstr "修改成功" + +#: apiserver/report/handler/heartbeat_handler.py:135 +#, fuzzy +#| msgid "Password reset failed, reasons: {E}" +msgid "Replay request query failed, reason: {}" +msgstr "密码重置失败,原因:{e}" + +#: apiserver/report/handler/report_handler_interface.py:67 +msgid "[{}] Report resolution start" +msgstr "" + +#: apiserver/report/handler/report_handler_interface.py:75 +#, python-brace-format +msgid "[{classname}] Report Analysis Completed" +msgstr "" + +#: apiserver/report/handler/report_handler_interface.py:81 +#, python-brace-format +msgid "" +"[{classname}] report resolution failed, Agent does not exist or no right to " +"access, report data: {report}" +msgstr "" + +#: apiserver/report/handler/sca_handler.py:49 +#, fuzzy +#| msgid "Incomplete data" +msgid "Data is incomplete, data: {}" +msgstr "数据不完整" + +#: apiserver/report/report_handler_factory.py:52 +#, fuzzy +#| msgid "Operation type does not exist" +msgid "Report type {} handler does not exist" +msgstr "操作类型不存在" + +#: apiserver/report/report_handler_factory.py:63 +msgid "Registration report type {} handler {}" +msgstr "" + +#: apiserver/views/agent_download.py:66 +#, fuzzy, python-brace-format +#| msgid "Tenant {} creation failed, error message:{}" +msgid "Agent configuration file creation failed, reason: {E}" +msgstr "租户{} 创建失败,原因:{}" + +#: apiserver/views/agent_download.py:321 +msgid "Agent download failed, user: {}, error details: {}" +msgstr "Agent 下载失败,用户:{},详细信息:{}" + +#: apiserver/views/agent_register.py:132 +msgid "The server port does not exist, has been set to the default: 0" +msgstr "" + +#: apiserver/views/agent_register.py:172 +#, fuzzy +#| msgid "Operation success" +msgid "Server record creation success" +msgstr "操作成功" + +#: apiserver/views/agent_register.py:248 +msgid "auto create project {}" +msgstr "" + +#: apiserver/views/agent_register.py:251 +#, fuzzy +#| msgid "Description of the project versoin" +msgid "auto create project version {}" +msgstr "项目版本的描述" + +#: apiserver/views/agent_update.py:48 +#, fuzzy +#| msgid "Operation success" +msgid "Server record update success" +msgstr "操作成功" + +#: apiserver/views/engine_heartbeat.py:46 +#, fuzzy +#| msgid "Agent and related data deleted successfully" +msgid "[{}] Heartbeat data is successful" +msgstr "agent及相关数据删除成功" + +#: apiserver/views/engine_heartbeat.py:49 +#, fuzzy +#| msgid "Parameter parsing failed, error message: {}" +msgid "Heartbeat data failed, error reason: {}" +msgstr "参数解析出错,错误原因:{}" + +#: apiserver/views/engine_heartbeat.py:63 +#, fuzzy +#| msgid "Password reset failed, reasons: {E}" +msgid "Client IP acquisition failed, reasons: {}" +msgstr "密码重置失败,原因:{e}" + +#: apitest/views.py:38 +msgid "no project found" +msgstr "没有找到对应的项目" + +#: apitest/views.py:41 +msgid "Please enter the parameters required for the test first" +msgstr "请先在项目设置项目地址" + +#: apitest/views.py:46 apitest/views.py:72 +msgid "No API collected" +msgstr "暂无收集到的api" + +#: apitest/views.py:57 +msgid "Starting API Test" +msgstr "启动API测试" + +#: core/plugins/export_report.py:92 iast/base/agent.py:212 +#, python-brace-format +msgid "" +"We found that there is {1} in the {0} page, attacker can modify the value of " +"{2} to attack:" +msgstr "我们发现在{0}页面中存在{1},攻击者可以改变{2}的值进行攻击:" + +#: core/plugins/export_report.py:116 +#, python-brace-format +msgid "call {3} at line {2} of file {1}, incoming parameters {0}" +msgstr "在文件{1} {2}行,调用{3},传入恶意参数{0}" + +#: core/plugins/export_report.py:124 +#, python-brace-format +msgid "call function {2} at line {1} of {0}" +msgstr "文件{0} {1}行的函数{2}" + +#: core/plugins/export_report.py:131 +#, python-brace-format +msgid "run sink function {2} at line {1} of file {0}" +msgstr "最终在文件{0}{1}行的{2}执行敏感操作" + +#: core/plugins/export_report.py:137 +#, python-brace-format +msgid "" +"Code call chain: \n" +"{0}, and then {1},\n" +" {2}" +msgstr "" +"漏洞触发过程如下:\n" +"{0},经{1}传播,\n" +" {2}" + +#: core/plugins/export_report.py:139 +#, python-brace-format +msgid "Code call chain: call {1} at {0}" +msgstr "漏洞触发过程如下:在{0}调用{1}" + +#: core/plugins/export_report.py:141 iast/base/agent.py:246 +msgid "{} Appears in {} {}" +msgstr "{}出现在{}的{}" + +#: core/plugins/export_report.py:244 +msgid "Report export success" +msgstr "导出报告成功" + +#: core/plugins/export_report.py:269 core/plugins/export_report.py:519 +#: iast/views/project_report_export.py:148 +msgid "Security Testing Report" +msgstr "安全测试报告" + +#: core/plugins/export_report.py:281 core/plugins/export_report.py:511 +#: iast/views/project_report_export.py:161 +msgid "First, project information" +msgstr "一 、项目基础信息" + +#: core/plugins/export_report.py:290 core/plugins/export_report.py:506 +#: iast/views/project_report_export.py:169 +msgid "Application name" +msgstr "项目名称" + +#: core/plugins/export_report.py:293 core/plugins/export_report.py:507 +#: iast/views/project_report_export.py:172 +msgid "Author" +msgstr "创建人员" + +#: core/plugins/export_report.py:296 iast/views/project_report_export.py:175 +msgid "Application type" +msgstr "项目类型" + +#: core/plugins/export_report.py:299 core/plugins/export_report.py:508 +#: iast/views/project_report_export.py:178 +msgid "Number of Vulnerability" +msgstr "漏洞数量" + +#: core/plugins/export_report.py:302 core/plugins/export_report.py:509 +#: iast/views/project_report_export.py:181 +msgid "Number of Agent" +msgstr "Agent数量" + +#: core/plugins/export_report.py:305 iast/views/project_report_export.py:184 +msgid "Latest time" +msgstr "最新时间" + +#: core/plugins/export_report.py:322 core/plugins/export_report.py:512 +#: iast/views/project_report_export.py:206 +msgid "Second, the result analysis" +msgstr "二、结果分析" + +#: core/plugins/export_report.py:326 iast/views/project_report_export.py:210 +msgid "2.1 Vulnerability Severity Levels Distribution" +msgstr "2.1 漏洞等级分布" + +#: core/plugins/export_report.py:336 iast/views/project_report_export.py:220 +msgid "2.2 Distribution of Vulnerability" +msgstr "2.2 漏洞类型分布" + +#: core/plugins/export_report.py:343 core/plugins/export_report.py:515 +#: core/plugins/export_report.py:610 iast/views/project_report_export.py:228 +msgid "Severity levels" +msgstr "漏洞等级" + +#: core/plugins/export_report.py:344 core/plugins/export_report.py:516 +#: core/plugins/export_report.py:609 iast/views/project_report_export.py:229 +msgid "Vulnerability type name" +msgstr "漏洞类型名称" + +#: core/plugins/export_report.py:345 core/plugins/export_report.py:517 +#: iast/views/project_report_export.py:230 +msgid "Number" +msgstr "数量" + +#: core/plugins/export_report.py:356 iast/views/project_report_export.py:243 +msgid "2.3 Vulnerability details" +msgstr "2.3 漏洞详情" + +#: core/plugins/export_report.py:373 core/plugins/export_report.py:459 +#: core/plugins/export_report.py:580 iast/views/project_report_export.py:266 +msgid "Summary" +msgstr "概要信息" + +#: core/plugins/export_report.py:377 core/plugins/export_report.py:461 +#: core/plugins/export_report.py:582 iast/views/project_report_export.py:270 +msgid "Severity level" +msgstr "危害等级" + +#: core/plugins/export_report.py:381 core/plugins/export_report.py:463 +#: core/plugins/export_report.py:465 core/plugins/export_report.py:584 +#: core/plugins/export_report.py:611 iast/views/project_report_export.py:274 +msgid "First scan time" +msgstr "首次检测时间" + +#: core/plugins/export_report.py:385 core/plugins/export_report.py:586 +#: core/plugins/export_report.py:612 iast/views/project_report_export.py:278 +msgid "Last scan time" +msgstr "最近检测时间" + +#: core/plugins/export_report.py:389 core/plugins/export_report.py:467 +#: core/plugins/export_report.py:588 core/plugins/export_report.py:613 +#: iast/views/project_report_export.py:282 +msgid "Development language" +msgstr "开发语言" + +#: core/plugins/export_report.py:393 core/plugins/export_report.py:469 +#: core/plugins/export_report.py:590 core/plugins/export_report.py:614 +#: iast/views/project_report_export.py:286 +msgid "Vulnerability URL" +msgstr "漏洞url" + +#: core/plugins/export_report.py:395 core/plugins/export_report.py:472 +#: core/plugins/export_report.py:593 core/plugins/export_report.py:615 +#: iast/views/project_report_export.py:288 +msgid "Vulnerability description" +msgstr "漏洞描述" + +#: core/plugins/export_report.py:513 +msgid "Vulnerability Severity Levels Distribution" +msgstr "2.1 漏洞等级分布" + +#: core/plugins/export_report.py:514 +msgid "Distribution of Vulnerability" +msgstr "2.2 漏洞类型分布" + +#: core/plugins/export_report.py:518 iast/views/vul_details.py:339 +msgid "Vulnerability details" +msgstr "漏洞详情" + +#: dongtai/endpoint/__init__.py:275 iast/views/log_delete.py:32 +#: iast/views/openapi.py:29 iast/views/project_detail.py:37 +msgid "success" +msgstr "操作成功" + +#: dongtai/endpoint/__init__.py:288 iast/views/logs.py:87 +msgid "failure" +msgstr "操作失败" + +#: dongtai/models/agent.py:27 +msgid "server" +msgstr "" + +#: dongtai/models/agent_method_pool.py:50 +msgid "sinks" +msgstr "" + +#: dongtai/models/asset.py:30 dongtai/models/heartbeat.py:39 +msgid "agent" +msgstr "探针" + +#: dongtai/models/department.py:29 dongtai/models/talent.py:17 +#: dongtai/models/talent.py:36 +msgid "talent" +msgstr "" + +#: dongtai/models/department.py:32 +msgid "" +"The talent this department belongs to. A department will get all permissions " +"granted to each of their talent." +msgstr "" + +#: dongtai/models/department.py:45 +msgid "name" +msgstr "" + +#: dongtai/models/department.py:49 +msgid "A department with that department name already exists." +msgstr "创建失败,项目名称已存在" + +#: dongtai/models/department.py:52 dongtai/models/project_version.py:20 +msgid "create time" +msgstr "最新时间" + +#: dongtai/models/department.py:53 dongtai/models/project_version.py:21 +msgid "update time" +msgstr "最新时间" + +#: dongtai/models/department.py:54 +msgid "created by" +msgstr "" + +#: dongtai/models/department.py:55 +msgid "parent id" +msgstr "项目id" + +#: dongtai/models/hook_strategy.py:17 +msgid "type" +msgstr "" + +#: dongtai/models/hook_strategy.py:20 dongtai/models/user.py:22 +msgid "" +"The department this user belongs to. A user will get all permissions granted " +"to each of their department." +msgstr "" + +#: dongtai/models/talent.py:21 +msgid "A talent with that talent name already exists." +msgstr "创建失败,项目名称已存在" + +#: dongtai/models/talent.py:28 +msgid "active" +msgstr "" + +#: dongtai/models/talent.py:31 +msgid "" +"Designates whether this user should be treated as active. Unselect this " +"instead of deleting accounts." +msgstr "" + +#: dongtai/models/user.py:19 +#, fuzzy +#| msgid "Default department" +msgid "department" +msgstr "默认部门" + +#: i18n/views/setlang.py:29 +msgid "this language not supported now" +msgstr "该语言暂不支持" + +#: iast/account/department.py:60 iast/account/department.py:136 +msgid "部门" +msgstr "" + +#: iast/account/department.py:61 +msgid "部门获取" +msgstr "" + +#: iast/account/department.py:62 iast/account/department.py:138 +#: iast/account/user.py:184 +msgid "管理" +msgstr "" + +#: iast/account/department.py:108 iast/account/department.py:152 +msgid "Department {} already exists" +msgstr "部门{}已存在" + +#: iast/account/department.py:118 +msgid "Department name has been modified to {}" +msgstr "部门名称已修改为{}" + +#: iast/account/department.py:123 iast/account/department.py:185 +#: iast/account/user.py:212 +msgid "Department does not exist" +msgstr "部门不存在" + +#: iast/account/department.py:137 +msgid "增加部门" +msgstr "增加部门" + +#: iast/account/department.py:167 +msgid "Talent does not exist" +msgstr "租户不存在" + +#: iast/account/department.py:174 +msgid "Department {} has been created successfully" +msgstr "部门{}创建成功" + +#: iast/account/department.py:180 +msgid "Access Denied" +msgstr "父部门无访问权限" + +#: iast/account/department.py:200 +msgid "" +"Delete failed, existence of users under department {}, please implement " +"forced deletion" +msgstr "删除失败,该部门 {} 存在用户 " + +#: iast/account/department.py:207 iast/views/agents_delete.py:31 +#: iast/views/agents_delete.py:79 iast/views/project_report_delete.py:52 +#: iast/views/project_version_delete.py:25 +#: iast/views/project_version_delete.py:53 iast/views/vul_delete.py:18 +#: iast/views/vul_delete.py:43 +msgid "Deleted Successfully" +msgstr "删除成功" + +#: iast/account/talent.py:25 +msgid "Tenant management" +msgstr "租户管理" + +#: iast/account/talent.py:83 +msgid "Tenant has been deactivated" +msgstr "租户已停用" + +#: iast/account/talent.py:83 +msgid "Tenant does not exist" +msgstr "租户不存在" + +#: iast/account/talent.py:94 +msgid "Tenant name or email is not specified" +msgstr "租户名称或联系邮箱未指定" + +#: iast/account/talent.py:98 +msgid "Tenant {} has been created successfully" +msgstr "租户{}创建成功" + +#: iast/account/talent.py:99 +msgid "Tenant {} creation failed, error message:{}" +msgstr "租户{} 创建失败,原因:{}" + +#: iast/account/talent.py:108 +msgid "Tenant: {} Delete successfully" +msgstr "租户:{} 删除成功" + +#: iast/account/talent.py:123 +msgid "Query if the default tenant information exists" +msgstr "查询默认租户信息是否存在" + +#: iast/account/talent.py:129 +msgid "" +"Tenant information already exists, please delete tenant information first" +msgstr "租户信息已存在,请先删除原有租户信息" + +#: iast/account/talent.py:130 +msgid "" +"The tenant information already existed, please delete the existing " +"information first" +msgstr "租户信息已存在,请先删除原有租户信息" + +#: iast/account/talent.py:132 +msgid "Started creating a tenant" +msgstr "开始创建租户" + +#: iast/account/talent.py:138 +msgid "Finished creating tenant, start to create tenant default department" +msgstr "租户创建完成,开始创建默认部门" + +#: iast/account/talent.py:144 +msgid "Finished creating department, start to create default user" +msgstr "部门创建完成,开始创建默认用户" + +#: iast/account/talent.py:157 +msgid "Finsihed creating and initializing tenant" +msgstr "租户创建及初始化完成" + +#: iast/account/talent.py:160 +msgid "Failed to created a tenant, error message:{}" +msgstr "创建租户失败,错误原因:{}" + +#: iast/account/user.py:96 +msgid "The format of ‘page’ and ‘pageSize’ only can be numberic" +msgstr "page和pageSize必须是数字" + +#: iast/account/user.py:133 +msgid "Data update succeeded" +msgstr "数据更新成功" + +#: iast/account/user.py:138 iast/views/project_detail.py:38 +#: iast/views/project_detail.py:83 iast/views/project_report_export.py:103 +#: iast/views/project_summary.py:107 iast/views/user_detail.py:32 +msgid "no permission" +msgstr "无权访问" + +#: iast/account/user.py:173 +msgid "Failed to delete User {}" +msgstr "用户{}删除失败" + +#: iast/account/user.py:178 +msgid "User {} successfully deleted" +msgstr "用户{}删除成功" + +#: iast/account/user.py:182 +msgid "用户" +msgstr "" + +#: iast/account/user.py:183 +msgid "增加用户" +msgstr "" + +#: iast/account/user.py:195 +msgid "Consistent" +msgstr "密码一致" + +#: iast/account/user.py:204 +msgid "Username already exists" +msgstr "用户名已存在" + +#: iast/account/user.py:244 iast/account/user.py:259 +msgid "User creation failed" +msgstr "用户创建失败" + +#: iast/account/user.py:249 iast/views/user_register_batch.py:73 +msgid "User {} has been created successfully" +msgstr "用户{}创建成功" + +#: iast/account/user.py:254 +msgid "Department does not exist or no permission to access" +msgstr "部门不存在或无访问权限" + +#: iast/base/agent.py:227 +msgid "{} Line" +msgstr "的{}行" + +#: iast/base/agent.py:243 +msgid "In {} {} call {}. {} (), Incoming parameters {}" +msgstr "在{}{}调用{}.{}(),传入参数{}" + +#: iast/base/project_version.py:11 iast/base/project_version.py:124 +#: iast/views/api_route_search.py:41 iast/views/project_report_export.py:29 +#: iast/views/project_summary.py:26 iast/views/project_version_current.py:21 +#: iast/views/project_version_delete.py:19 +#: iast/views/project_version_list.py:19 +msgid "The version id of the project" +msgstr "项目的版本id" + +#: iast/base/project_version.py:13 iast/base/project_version.py:126 +#: iast/views/project_add.py:36 iast/views/project_version_list.py:21 +#: iast/views/vul_details.py:55 +msgid "The version name of the project" +msgstr "项目的版本名。" + +#: iast/base/project_version.py:15 iast/views/project_version_list.py:23 +msgid "Description of the project versoin" +msgstr "项目版本的描述" + +#: iast/base/project_version.py:16 iast/serializers/vul.py:113 +#: iast/views/api_route_search.py:39 iast/views/project_add.py:37 +#: iast/views/project_delete.py:17 iast/views/project_detail.py:30 +#: iast/views/project_report_export.py:34 +#: iast/views/project_report_sync_add.py:21 iast/views/project_search.py:18 +#: iast/views/project_summary.py:56 iast/views/project_version_current.py:22 +#: iast/views/project_version_delete.py:20 iast/views/vul_details.py:71 +msgid "The id of the project" +msgstr "项目ID" + +#: iast/base/project_version.py:18 iast/views/project_version_list.py:25 +msgid "Whether it is the current version, 1 means yes, 0 means no." +msgstr "表示是否为当前版本,1代表是,0代表否" + +#: iast/base/project_version.py:33 iast/threshold/agent_core_status.py:50 +#: iast/views/agent_start.py:43 iast/views/agent_stop.py:39 +#: iast/views/api_route_related_request.py:60 +#: iast/views/api_route_search.py:151 iast/views/engine_hook_rule_status.py:107 +#: iast/views/engine_hook_rule_status.py:156 +#: iast/views/engine_hook_rule_summary.py:47 +#: iast/views/engine_hook_rule_types.py:80 iast/views/engine_hook_rules.py:88 +#: iast/views/engine_hook_rules.py:94 +#: iast/views/engine_method_pool_search.py:173 +#: iast/views/engine_method_pool_search.py:184 iast/views/openapi.py:14 +#: iast/views/openapi.py:68 iast/views/project_add.py:49 +#: iast/views/project_add.py:195 iast/views/project_report_export.py:81 +#: iast/views/project_report_export.py:86 +#: iast/views/project_report_sync_add.py:53 +#: iast/views/project_version_add.py:18 iast/views/project_version_add.py:43 +#: iast/views/project_version_add.py:49 +#: iast/views/project_version_current.py:50 +#: iast/views/project_version_delete.py:23 +#: iast/views/project_version_delete.py:47 +#: iast/views/project_version_delete.py:59 +#: iast/views/project_version_list.py:60 +#: iast/views/project_version_update.py:17 +#: iast/views/project_version_update.py:40 +#: iast/views/project_version_update.py:46 iast/views/sca_summary.py:191 +#: iast/views/scas.py:180 iast/views/strategys_add.py:48 +#: iast/views/vul_summary.py:207 iast/views/vul_summary_project.py:182 +#: iast/views/vul_summary_type.py:184 iast/views/vuls.py:223 +msgid "Parameter error" +msgstr "参数错误" + +#: iast/base/project_version.py:46 +msgid "Repeated version name" +msgstr "版本名称重复" + +#: iast/base/project_version.py:55 iast/views/project_version_current.py:26 +#: iast/views/project_version_current.py:68 +#: iast/views/project_version_delete.py:24 +#: iast/views/project_version_delete.py:55 +msgid "Version does not exist" +msgstr "版本不存在" + +#: iast/base/project_version.py:122 iast/views/project_add.py:39 +msgid "Description of the project" +msgstr "项目的描述信息" + +#: iast/base/update_project_version.py:18 +msgid "Detects and associates application version information" +msgstr "检测并关联项目版本信息" + +#: iast/base/update_project_version.py:43 +msgid "Detection finished" +msgstr "检测完成" + +#: iast/base/update_project_version.py:45 +msgid "Detection failed" +msgstr "检测失败" + +#: iast/notify/feishu.py:12 +msgid "Maven official crawler" +msgstr "Maven官方爬虫" + +#: iast/serializers/agent.py:50 iast/views/agents.py:72 +msgid "Online" +msgstr "运行中" + +#: iast/serializers/agent.py:50 iast/views/agents.py:72 +msgid "Offline" +msgstr "已下线" + +#: iast/serializers/agent.py:62 iast/views/agents.py:184 +msgid "Load data is not uploaded" +msgstr "负载数据暂未上传" + +#: iast/serializers/agent.py:71 iast/serializers/agent.py:76 +#: iast/views/agents.py:82 iast/views/agents.py:87 +msgid "No flow is detected by the probe" +msgstr "未探测到服务器地址" + +#: iast/serializers/agent.py:133 iast/serializers/agent.py:140 +#: iast/views/agent_alias_modified.py:18 iast/views/agent_start.py:16 +msgid "The id corresponding to the agent." +msgstr "探针的对应id" + +#: iast/serializers/agent.py:135 iast/views/agent_start.py:18 +#: iast/views/agents_delete.py:27 +msgid "The id corresponding to the agent, use\",\" for segmentation." +msgstr "agent对应的id,使用\",\"进行切分。" + +#: iast/serializers/agent_config.py:13 iast/serializers/agent_config.py:27 +#, fuzzy +#| msgid "The alias corresponding to the agent." +msgid "The details config to the agent." +msgstr "探针的别名" + +#: iast/serializers/agent_config.py:14 +#, fuzzy +#| msgid "The short name of the agent" +msgid "The hostname of the agent." +msgstr "探针简称" + +#: iast/serializers/agent_config.py:15 +#, fuzzy +#| msgid "The id of the agent" +msgid "The ip of the agent." +msgstr "探针id" + +#: iast/serializers/agent_config.py:16 iast/serializers/agent_config.py:17 +#, fuzzy +#| msgid "The id of the agent" +msgid "The port of the agent." +msgstr "探针id" + +#: iast/serializers/agent_config.py:18 iast/serializers/agent_config.py:28 +#: iast/threshold/agent_core_status.py:23 +#, fuzzy +#| msgid "The short name of the agent" +msgid "The cluster_name of the agent." +msgstr "探针简称" + +#: iast/serializers/agent_config.py:19 +#, fuzzy +#| msgid "The version of agent" +msgid "The cluster_version of the agent." +msgstr "探针版本" + +#: iast/serializers/agent_config.py:20 +#, fuzzy +#| msgid "The id of the agent" +msgid "The priority of the agent." +msgstr "探针id" + +#: iast/serializers/agent_config.py:25 iast/serializers/agent_config.py:33 +#: iast/threshold/agent_core_status.py:21 +#, fuzzy +#| msgid "The id of the agent" +msgid "The id of the webHook." +msgstr "探针id" + +#: iast/serializers/agent_config.py:26 iast/threshold/agent_core_status.py:22 +#, fuzzy +#| msgid "The type of hook rule" +msgid "The type of the webHook." +msgstr "Hook规则类型" + +#: iast/serializers/hook_strategy.py:14 +msgid "" +"\n" +"Examples in a single case: O, P<1,2,3,4,...>, R\n" +"Combination situation: O&R, O&P1, etc.\n" +"O represents the object itself; R represents the return value; P represents " +"the parameter, and the number represents the position of the parameter\n" +msgstr "" +"\n" +"单数时的例子: O, P<1,2,3,4,...>, R\n" +"组合情况下的样例: O&R, O&P1, etc.\n" +"O 代表对象本身; R 代表返回值; P 代表参数, 数字代表参数位置\n" + +#: iast/serializers/hook_strategy.py:33 +msgid "The name of hook rule type." +msgstr "Hook规则的名称" + +#: iast/serializers/hook_strategy.py:35 iast/views/engine_hook_rule_add.py:20 +#: iast/views/engine_hook_rule_modify.py:30 +msgid "The id of hook rule type." +msgstr "Hook规则类型名称" + +#: iast/serializers/hook_strategy.py:37 +msgid "The user who created the hook rule type." +msgstr "Hook规则类型的创建者" + +#: iast/serializers/hook_strategy.py:38 iast/views/strategy_delete.py:21 +#: iast/views/strategys_add.py:17 iast/views/strategys_type.py:18 +msgid "The id of strategy" +msgstr "策略id" + +#: iast/serializers/hook_strategy.py:40 iast/views/engine_hook_rule_add.py:22 +#: iast/views/engine_hook_rule_modify.py:32 +msgid "The value of strategy" +msgstr "策略名" + +#: iast/serializers/hook_strategy.py:44 iast/views/engine_hook_rule_add.py:26 +#: iast/views/engine_hook_rule_modify.py:36 +msgid "Source of taint" +msgstr "污点源" + +#: iast/serializers/hook_strategy.py:49 iast/views/engine_hook_rule_add.py:31 +#: iast/views/engine_hook_rule_modify.py:41 +msgid "Target of taint" +msgstr "污点目标" + +#: iast/serializers/hook_strategy.py:55 iast/views/engine_hook_rule_add.py:37 +#: iast/views/engine_hook_rule_modify.py:47 +msgid "" +"Inheritance type, false-only detect current class, true-inspect subclasses, " +"all-check current class and subclasses" +msgstr "继承类型, false代表当前类, true代表子类, all代表当前类和子类" + +#: iast/serializers/hook_strategy.py:61 iast/views/engine_hook_rule_add.py:43 +#: iast/views/engine_hook_rule_modify.py:53 +msgid "" +"Indicates whether taint tracking is required, true-required, false-not " +"required." +msgstr "表示是否启用污点追踪,true代表是,false代表否" + +#: iast/serializers/hook_strategy.py:66 +msgid "The update time of hook strategy" +msgstr "hook策略的更新时间" + +#: iast/serializers/hook_strategy.py:68 +msgid "" +"The enabled state of the hook strategy: 0-disabled, 1-enabled, -1-deleted" +msgstr "hook策略的状态:0-禁用,1-启用,-1-删除" + +#: iast/serializers/login.py:15 +msgid "Username should not be empty" +msgstr "用户名不能为空" + +#: iast/serializers/login.py:22 +msgid "Password should not be blank" +msgstr "密码不能为空" + +#: iast/serializers/sca.py:40 iast/views/vul_details.py:229 +#: iast/views/vuls.py:31 iast/views/vuls.py:304 +msgid "The application has not been binded" +msgstr "暂未绑定项目" + +#: iast/serializers/sca.py:63 +msgid "No application version has been created" +msgstr "暂未创建项目版本" + +#: iast/serializers/vul.py:66 +msgid "The level name of vulnerablity" +msgstr "漏洞等级名称" + +#: iast/serializers/vul.py:89 iast/views/sca_sidebar_index.py:22 +#: iast/views/sca_summary.py:52 iast/views/scas.py:41 +#: iast/views/vul_details.py:58 iast/views/vul_sidebar_index.py:43 +#: iast/views/vul_summary.py:39 iast/views/vul_summary_project.py:34 +#: iast/views/vul_summary_type.py:33 iast/views/vuls.py:72 +msgid "programming language" +msgstr "编程语言" + +#: iast/serializers/vul.py:91 +msgid "The number of vulnerabilities corresponding to the programming language" +msgstr "编程语言对应的漏洞数量" + +#: iast/serializers/vul.py:96 iast/views/vul_details.py:59 +msgid "The name of vulnerablity level" +msgstr "漏洞类型名称" + +#: iast/serializers/vul.py:97 iast/views/project_summary.py:49 +msgid "The number of vulnerabilities corresponding to the level" +msgstr "漏洞级别对应的漏洞数量" + +#: iast/serializers/vul.py:98 iast/views/vul_details.py:60 +msgid "The id of vulnerablity level" +msgstr "漏洞类型id" + +#: iast/serializers/vul.py:102 +msgid "The name of vulnerablity type" +msgstr "漏洞类型的对应名称" + +#: iast/serializers/vul.py:104 +msgid "The number of vulnerabilities corresponding to the vulnerablity type" +msgstr "漏洞类型对应的漏洞数量" + +#: iast/serializers/vul.py:110 iast/views/project_report_export.py:32 +#: iast/views/project_report_sync_add.py:20 +msgid "The name of the project" +msgstr "项目名" + +#: iast/serializers/vul.py:112 +msgid "The number of vulnerabilities corresponding to the project" +msgstr "项目对应的漏洞数量" + +#: iast/threshold/agent_core_status.py:16 iast/views/agent_search.py:26 +#: iast/views/agent_start.py:22 iast/views/agent_stop.py:18 +#: iast/views/agent_stop.py:65 +msgid "Suspending ..." +msgstr "正在暂停..." + +#: iast/threshold/agent_core_status.py:28 iast/views/agent_stop.py:22 +msgid "Suspend Agent" +msgstr "暂停agent" + +#: iast/threshold/agent_core_status.py:32 iast/threshold/config_setting.py:61 +#: iast/threshold/del_threshold_setting.py:27 +#: iast/threshold/del_webhook_setting.py:27 +#: iast/threshold/get_config_setting.py:26 +#: iast/threshold/get_config_setting_detail.py:26 +#: iast/threshold/webhook_setting.py:50 iast/views/agent.py:69 +#: iast/views/agent_alias_modified.py:31 iast/views/agent_delete.py:38 +#: iast/views/agent_install.py:30 iast/views/agent_search.py:31 +#: iast/views/agent_start.py:30 iast/views/agent_stop.py:26 +#: iast/views/agent_uninstall.py:28 iast/views/agent_upgrade_online.py:40 +#: iast/views/agents.py:63 iast/views/agents_delete.py:44 +#: iast/views/agents_user.py:23 iast/views/details_id.py:70 +msgid "Agent" +msgstr "探针" + +#: iast/threshold/agent_core_status.py:33 +#, fuzzy +#| msgid "Agent Start" +msgid "Agent Status Update" +msgstr "探针启动" + +#: iast/threshold/agent_core_status.py:34 +#, fuzzy +#| msgid "Stop the running agent by specifying the id." +msgid "Control the running agent by specifying the id." +msgstr "使用探针对应的id来停止探针" + +#: iast/threshold/agent_core_status.py:44 +#: iast/threshold/agent_core_status.py:58 +#: iast/threshold/del_threshold_setting.py:37 +#: iast/threshold/del_webhook_setting.py:37 +#: iast/threshold/webhook_setting.py:63 iast/views/engine_hook_rule_add.py:51 +#: iast/views/engine_hook_rule_add.py:109 +#: iast/views/engine_hook_rule_modify.py:21 +#: iast/views/engine_hook_rule_modify.py:92 +msgid "Incomplete parameter, please check again" +msgstr "参数不完整,请检查" + +#: iast/threshold/agent_core_status.py:85 +msgid "状态已下发" +msgstr "" + +#: iast/threshold/config_setting.py:17 +#: iast/threshold/del_threshold_setting.py:17 +#: iast/threshold/del_webhook_setting.py:17 +#: iast/threshold/webhook_setting.py:17 +#, fuzzy +#| msgid "The installation is complete" +msgid "The setting is complete" +msgstr "安装完成" + +#: iast/threshold/config_setting.py:18 +#: iast/threshold/del_threshold_setting.py:18 +#: iast/threshold/del_webhook_setting.py:18 +#: iast/threshold/get_config_setting.py:17 +#: iast/threshold/get_config_setting_detail.py:17 +#: iast/threshold/get_webhook_setting.py:19 +#: iast/threshold/webhook_setting.py:18 iast/threshold/webhook_type.py:15 +#, fuzzy +#| msgid "Incomplete parameter, please check again" +msgid "Incomplete parameter, please try again later" +msgstr "参数不完整,请检查" + +#: iast/threshold/config_setting.py:24 iast/threshold/get_config_setting.py:23 +#: iast/threshold/get_config_setting_detail.py:23 +#: iast/threshold/get_webhook_setting.py:25 +msgid "config Agent" +msgstr "" + +#: iast/threshold/config_setting.py:62 iast/threshold/get_config_setting.py:27 +#: iast/threshold/get_config_setting_detail.py:27 +msgid "Agent threshold Config" +msgstr "" + +#: iast/threshold/config_setting.py:63 iast/threshold/get_config_setting.py:28 +#: iast/threshold/get_config_setting_detail.py:28 +msgid "Configure agent disaster recovery strategy" +msgstr "" + +#: iast/threshold/config_setting.py:86 +msgid "保存成功" +msgstr "" + +#: iast/threshold/config_setting.py:88 +msgid "保存失败" +msgstr "" + +#: iast/threshold/del_threshold_setting.py:24 +#: iast/threshold/del_webhook_setting.py:24 +#, fuzzy +#| msgid "Delete Agent" +msgid "del webHook Agent" +msgstr "删除探针" + +#: iast/threshold/del_threshold_setting.py:28 +#: iast/threshold/del_webhook_setting.py:28 +#, fuzzy +#| msgid "Agent Delete" +msgid "Agent webHook delete" +msgstr "删除探针" + +#: iast/threshold/del_threshold_setting.py:29 +#: iast/threshold/del_webhook_setting.py:29 +msgid "Delete agent traffic reporting data forwarding address configuration" +msgstr "" + +#: iast/threshold/del_threshold_setting.py:40 +#: iast/threshold/del_webhook_setting.py:40 +#, fuzzy +#| msgid "Application has been deleted successfully" +msgid "Config has been deleted successfully" +msgstr "项目删除成功" + +#: iast/threshold/del_threshold_setting.py:42 +#: iast/threshold/del_webhook_setting.py:42 +#, fuzzy +#| msgid "Failed to get configuration" +msgid "Failed to delete config" +msgstr "获取配置失败" + +#: iast/threshold/get_config_setting.py:16 +#: iast/threshold/get_webhook_setting.py:18 +#, fuzzy +#| msgid "create success" +msgid "Get success" +msgstr "创建成功" + +#: iast/threshold/get_config_setting.py:59 +#: iast/threshold/get_config_setting_detail.py:37 +#: iast/threshold/get_webhook_setting.py:42 +#, fuzzy +#| msgid "Deleted Successfully" +msgid "Successfully" +msgstr "删除成功" + +#: iast/threshold/get_config_setting_detail.py:16 +#, fuzzy +#| msgid "create success" +msgid "Get detail success" +msgstr "创建成功" + +#: iast/threshold/get_webhook_setting.py:28 iast/threshold/webhook_type.py:24 +msgid "WebHook" +msgstr "" + +#: iast/threshold/get_webhook_setting.py:29 +msgid "WebHook threshold Config get" +msgstr "" + +#: iast/threshold/get_webhook_setting.py:30 +msgid "WebHook threshold list" +msgstr "" + +#: iast/threshold/webhook_setting.py:24 +msgid "config webHook Agent" +msgstr "" + +#: iast/threshold/webhook_setting.py:51 +msgid "Agent webHook Config" +msgstr "" + +#: iast/threshold/webhook_setting.py:52 +msgid "Agent traffic reporting data forwarding address configuration" +msgstr "" + +#: iast/threshold/webhook_setting.py:66 +#, fuzzy +#| msgid "Account has been created successfully" +msgid "Config has been created successfully" +msgstr "账号创建成功" + +#: iast/threshold/webhook_setting.py:68 +#, fuzzy +#| msgid "Failed to get configuration" +msgid "Failed to create config" +msgstr "获取配置失败" + +#: iast/threshold/webhook_type.py:14 +#, fuzzy +#| msgid "The type of the parameter" +msgid "The type is return" +msgstr "参数类型" + +#: iast/threshold/webhook_type.py:21 +msgid "get webhook all type " +msgstr "" + +#: iast/threshold/webhook_type.py:25 +#, fuzzy +#| msgid "Agent Delete" +msgid "Agent webHook type" +msgstr "删除探针" + +#: iast/threshold/webhook_type.py:26 +msgid "type list of agent webHook" +msgstr "" + +#: iast/threshold/webhook_type.py:75 +#, fuzzy +#| msgid "Rule type successfully saved" +msgid "Get type list successfully" +msgstr "规则类型保存成功" + +#: iast/utils.py:96 +msgid "" +"The http status codes are both 200, please use the status and msg field " +"returned by the response data to troubleshoot" +msgstr "" +"http状态码均为200,请点击查看详情,通过响应数据返回的status与msg字段进行排查" + +#: iast/utils.py:133 +msgid "status code" +msgstr "状态码" + +#: iast/utils.py:139 +msgid "human readable message" +msgstr "状态信息" + +#: iast/views/agent.py:22 iast/views/agent_search.py:20 +#: iast/views/agents_user.py:13 iast/views/project_engines.py:19 +msgid "The name of agent" +msgstr "探针名称" + +#: iast/views/agent.py:23 iast/views/agents_user.py:14 +#: iast/views/strategys.py:28 iast/views/strategys_list.py:15 +msgid "The id of agent" +msgstr "探针的id" + +#: iast/views/agent.py:24 +msgid "The version of agent" +msgstr "探针版本" + +#: iast/views/agent.py:26 +msgid "The latest update time of agent" +msgstr "探针的最后更新时间" + +#: iast/views/agent.py:28 iast/views/agent.py:30 +msgid "The running status of agent" +msgstr "探针的运行状态" + +#: iast/views/agent.py:32 +msgid "agent control bit, 1-install, 2-uninstall, 0-no control" +msgstr "探针状态位,1代表安装,2代表卸载,0代表失去控制" + +#: iast/views/agent.py:34 +msgid "Whether it is in control, 0-No, 1-Yes" +msgstr "表示是否受控,0代表否,1代表是" + +#: iast/views/agent.py:36 +msgid "Bundled project ID, if it exists, it will be bundled." +msgstr "绑定的项目id,如果存在则探针被绑定" + +#: iast/views/agent.py:39 iast/views/agent_search.py:22 +msgid "Project name, used to start the agent first and then create the project" +msgstr "项目名,用来初次启动探针并创建项目" + +#: iast/views/agent.py:42 +msgid "1 is running online, 0 is not running, same token, only one online" +msgstr "1代表运行中,0代表离线,同一token的探针只能有一个在线" + +#: iast/views/agent.py:44 +msgid "Bundled project version ID, if it exists, it will be bundled" +msgstr "绑定项目的版本id,若果存在,则探针被绑定" + +#: iast/views/agent.py:47 iast/views/project_summary.py:65 +msgid "Agent language currently included in the project" +msgstr "项目中所涉及的探针对应的语言" + +#: iast/views/agent.py:70 +msgid "Agent Detail" +msgstr "探针详情" + +#: iast/views/agent.py:72 iast/views/project_version_delete.py:38 +msgid "Delete the specified project version according to the conditions." +msgstr "通过条件删除指定的项目版本" + +#: iast/views/agent.py:80 iast/views/agent.py:82 iast/views/agent.py:86 +msgid "Can't find relevant data" +msgstr "找不到相关数据" + +#: iast/views/agent_alias_modified.py:20 +msgid "The alias corresponding to the agent." +msgstr "探针的别名" + +#: iast/views/agent_alias_modified.py:24 iast/views/agent_alias_modified.py:46 +msgid "modified successfully" +msgstr "修改成功" + +#: iast/views/agent_alias_modified.py:25 iast/views/agent_delete.py:28 +#: iast/views/agent_delete.py:62 +msgid "Agent does not exist or no permission to access" +msgstr "agent不存在或无权限访问" + +#: iast/views/agent_alias_modified.py:26 iast/views/agent_delete.py:29 +#: iast/views/agent_delete.py:65 +msgid "Error while deleting, please try again later" +msgstr "删除过程出错,请稍后重试" + +#: iast/views/agent_alias_modified.py:33 +msgid "Agent Alias Modified" +msgstr "探针别名修改" + +#: iast/views/agent_alias_modified.py:34 +msgid "Modified the agent alias" +msgstr "修改探针别名" + +#: iast/views/agent_delete.py:27 iast/views/agent_delete.py:60 +msgid "Agent and related data deleted successfully" +msgstr "agent及相关数据删除成功" + +#: iast/views/agent_delete.py:35 iast/views/agents_delete.py:40 +msgid "Delete Agent" +msgstr "删除探针" + +#: iast/views/agent_delete.py:39 +msgid "Agent Delete" +msgstr "删除探针" + +#: iast/views/agent_delete.py:41 iast/views/project_delete.py:34 +msgid "Delete the agent by specifying the id." +msgstr "使用探针对应的id来删除探针" + +#: iast/views/agent_delete.py:70 iast/views/agents_delete.py:87 +msgid "Error logs deleted successfully, Deletion Amount: {}" +msgstr "错误日志删除成功,共删除:{}条" + +#: iast/views/agent_delete.py:72 iast/views/agents_delete.py:89 +msgid "Failed to delete error logs, probe ID: {}, error message: {}" +msgstr "错误日志删除失败,探针ID: {},原因:{}" + +#: iast/views/agent_delete.py:77 iast/views/agent_delete.py:84 +#: iast/views/agent_delete.py:91 iast/views/agent_delete.py:98 +#: iast/views/agent_delete.py:105 iast/views/agent_delete.py:112 +#: iast/views/agents_delete.py:94 iast/views/agents_delete.py:101 +#: iast/views/agents_delete.py:108 iast/views/agents_delete.py:115 +#: iast/views/agents_delete.py:122 iast/views/agents_delete.py:129 +msgid "" +"The replay request method pool data was successfully deleted, A total of {} " +"replay requests are deleted" +msgstr "重放请求方法池数据删除成功,共删除:{}条" + +#: iast/views/agent_delete.py:79 iast/views/agents_delete.py:96 +msgid "Failed to delete heartbeat data, error message: {}" +msgstr "心跳数据删除失败,原因:{}" + +#: iast/views/agent_delete.py:86 iast/views/agents_delete.py:103 +msgid "Failed to delete unauthorized data, error message: {}" +msgstr "越权相关数据删除失败,原因:{}" + +#: iast/views/agent_delete.py:93 iast/views/agents_delete.py:110 +msgid "Failed to delete vulnerability data, error message: {}" +msgstr "漏洞数据删除失败,原因:{}" + +#: iast/views/agent_delete.py:100 iast/views/agents_delete.py:117 +msgid "Failed to delete third-party component data, error message: {}" +msgstr "第三方组件数据删除失败,原因:{}" + +#: iast/views/agent_delete.py:107 iast/views/agents_delete.py:124 +msgid "Failed to delete method pool data, error message: {}" +msgstr "方法池数据删除失败,原因:{}" + +#: iast/views/agent_delete.py:114 +msgid "Failed to delete replay request method pool data, error message: {}" +msgstr "重放请求方法池数据删除失败,原因:{}" + +#: iast/views/agent_delete.py:119 iast/views/agents_delete.py:136 +msgid "Replay request queue deleted successfully, Deletion amount: {}" +msgstr "重放请求队列删除成功,共删除:{}条" + +#: iast/views/agent_delete.py:121 iast/views/agents_delete.py:131 +#: iast/views/agents_delete.py:138 +msgid "Failed to delete replay request queue, error message: {}" +msgstr "重放请求队列删除失败,原因:{}" + +#: iast/views/agent_deploy.py:28 iast/views/agent_deploy.py:46 +msgid "Corresponding deployment document could not be found" +msgstr "找不到对应部署文档" + +#: iast/views/agent_deploy.py:32 iast/views/documents.py:44 +msgid "Documents" +msgstr "文档" + +#: iast/views/agent_deploy.py:33 iast/views/agent_deploy.py:34 +msgid "Document of Agent Deploy" +msgstr "探针部署的文档" + +#: iast/views/agent_deploy_doc.py:14 iast/views/agent_deploy_info.py:14 +msgid "Agent deployment document" +msgstr "Agent部署文档" + +#: iast/views/agent_deploy_doc.py:38 iast/views/project_report_download.py:49 +#: iast/views/vuls.py:206 +msgid "No data" +msgstr "暂无数据" + +#: iast/views/agent_deploy_submit.py:18 +msgid "Uploading Agent configuration" +msgstr "上传Agent配置" + +#: iast/views/agent_download.py:22 +msgid "Downloading DongTai Agent" +msgstr "下载洞态Agent" + +#: iast/views/agent_install.py:17 iast/views/agent_install.py:43 +msgid "The installation is complete" +msgstr "安装完成" + +#: iast/views/agent_install.py:18 iast/views/agent_install.py:45 +#: iast/views/agent_uninstall.py:17 +msgid "The engine is being installed or uninstalled, please try again later" +msgstr "引擎正在被安装或卸载,请稍后再试" + +#: iast/views/agent_install.py:19 iast/views/agent_install.py:47 +#: iast/views/agent_start.py:47 iast/views/agent_stop.py:44 +#: iast/views/agent_uninstall.py:18 iast/views/agent_uninstall.py:45 +msgid "Engine does not exist or no permission to access" +msgstr "引擎不存在或无权操作" + +#: iast/views/agent_install.py:26 +msgid "Installing an Agent" +msgstr "安装agent" + +#: iast/views/agent_install.py:31 +msgid "Agent Install" +msgstr "探针安装" + +#: iast/views/agent_install.py:32 +msgid "Install the running agent by specifying the id." +msgstr "通过指定id来安装运行中的探针" + +#: iast/views/agent_search.py:18 iast/views/documents.py:19 +#: iast/views/engine_hook_rule_types.py:24 iast/views/messages_list.py:24 +#: iast/views/messages_list.py:40 iast/views/project_report_list.py:15 +#: iast/views/projects.py:21 iast/views/sca_summary.py:47 +#: iast/views/scan_strategys.py:43 iast/views/scan_strategys.py:69 +#: iast/views/scas.py:36 iast/views/sensitive_info_rule.py:107 +#: iast/views/strategys.py:68 iast/views/vul_list_for_plugin.py:35 +#: iast/views/vul_sidebar_index.py:39 iast/views/vuls.py:67 +#: scaupload/views.py:32 +msgid "Number per page" +msgstr "每页数量" + +#: iast/views/agent_search.py:19 iast/views/documents.py:20 +#: iast/views/engine_hook_rule_types.py:25 iast/views/messages_list.py:25 +#: iast/views/project_report_list.py:16 iast/views/projects.py:22 +#: iast/views/sca_summary.py:40 iast/views/scan_strategys.py:44 +#: iast/views/scas.py:29 iast/views/sensitive_info_rule.py:108 +#: iast/views/strategys.py:69 iast/views/vul_list_for_plugin.py:28 +#: iast/views/vul_sidebar_index.py:33 iast/views/vuls.py:60 +#: scaupload/views.py:33 +msgid "Page index" +msgstr "对应页码" + +#: iast/views/agent_search.py:32 +msgid "Agent Search" +msgstr "探针搜索" + +#: iast/views/agent_search.py:34 +msgid "" +"Search for the agent corresponding to the user according to the following " +"parameters" +msgstr "" + +#: iast/views/agent_start.py:26 +msgid "Start Agent" +msgstr "启动agent" + +#: iast/views/agent_start.py:31 +msgid "Agent Start" +msgstr "探针启动" + +#: iast/views/agent_start.py:33 +msgid "Start the stopped agent by specifying the id." +msgstr "通过指定id来启动停止的探针" + +#: iast/views/agent_start.py:49 iast/views/agent_stop.py:46 +msgid "Agent is stopping service, please try again later" +msgstr "agent正在进行非启动停止操作,请稍后再试" + +#: iast/views/agent_start.py:65 +msgid "Starting…" +msgstr "正在启动..." + +#: iast/views/agent_status_update.py:17 iast/views/agent_status_update.py:32 +msgid "Engine status was updated successfully." +msgstr "引擎状态更新成功" + +#: iast/views/agent_stop.py:27 +msgid "Agent Stop" +msgstr "探针停止" + +#: iast/views/agent_stop.py:29 iast/views/agents_delete.py:46 +#: iast/views/agents_user.py:25 +msgid "Stop the running agent by specifying the id." +msgstr "使用探针对应的id来停止探针" + +#: iast/views/agent_uninstall.py:16 iast/views/agent_uninstall.py:41 +msgid "Uninstalling ..." +msgstr "正在卸载..." + +#: iast/views/agent_uninstall.py:24 +msgid "Uninstall Agent" +msgstr "卸载agent" + +#: iast/views/agent_uninstall.py:29 +msgid "Agent Uninstall" +msgstr "探针卸载" + +#: iast/views/agent_uninstall.py:30 +msgid "Uninstall the running agent by specifying the id." +msgstr "通过指定id来卸载运行中的探针" + +#: iast/views/agent_uninstall.py:43 +msgid "Agent is being installed or uninstalled, please try again later" +msgstr "agent正在被安装或卸载,请稍后再试" + +#: iast/views/agent_upgrade_offline.py:12 +msgid "Offline Upgrade Agent" +msgstr "离线升级agent" + +#: iast/views/agent_upgrade_offline.py:19 +msgid "Upload successful" +msgstr "上传成功" + +#: iast/views/agent_upgrade_offline.py:20 +msgid "{} files not supported" +msgstr "不支持{}文件" + +#: iast/views/agent_upgrade_online.py:19 +msgid "The resource link corresponding to the Agent." +msgstr "探针对应的资源连接" + +#: iast/views/agent_upgrade_online.py:21 +msgid "" +"The Token corresponding to the user is the same as when connecting to " +"openapi." +msgstr "与用户用来连接openapi组件的Token相同" + +#: iast/views/agent_upgrade_online.py:26 iast/views/agent_upgrade_online.py:50 +msgid "Online upgrade successful" +msgstr "在线升级成功" + +#: iast/views/agent_upgrade_online.py:28 iast/views/agent_upgrade_online.py:52 +msgid "" +"Token verification failed, please confirm your input address and token are " +"correct" +msgstr "token验证失败,请确认输入的地址和token是正确的" + +#: iast/views/agent_upgrade_online.py:35 +msgid "Online Upgrade Agent" +msgstr "在线升级agent" + +#: iast/views/agent_upgrade_online.py:41 +msgid "Agent Upgrade Online" +msgstr "探针在线升级" + +#: iast/views/agent_upgrade_online.py:42 +msgid "Agent upgrade" +msgstr "探针升级" + +#: iast/views/agents.py:30 +msgid "Agent list" +msgstr "探针列表" + +#: iast/views/agents.py:64 +msgid "Agent List" +msgstr "探针列表" + +#: iast/views/agents.py:66 +msgid "Get a list containing Agent information according to conditions." +msgstr "基于所给条件获取探针信息列表" + +#: iast/views/agents.py:206 +msgid "Incorrect format parameter, please check again" +msgstr "参数格式不正确,请检查。" + +#: iast/views/agents.py:209 +msgid "Program error" +msgstr "程序错误" + +#: iast/views/agents_delete.py:32 iast/views/agents_delete.py:81 +#: iast/views/vul_delete.py:19 iast/views/vul_delete.py:48 +msgid "Deletion failed" +msgstr "删除失败" + +#: iast/views/agents_delete.py:33 iast/views/agents_delete.py:82 +msgid "Successfully deleted {} strips, failed to deleted {} strips" +msgstr "成功删除{}条,删除失败{}条" + +#: iast/views/agents_delete.py:45 +msgid "Agent Delete batch" +msgstr "探针删除(批量)" + +#: iast/views/agents_user.py:24 +msgid "Agent (with user)" +msgstr "探针(用户相关)" + +#: iast/views/api_route_cover_rate.py:23 +msgid "The api cover_rate of the project" +msgstr "项目的api覆盖率" + +#: iast/views/api_route_cover_rate.py:37 +#: iast/views/api_route_related_request.py:41 +#: iast/views/api_route_search.py:126 +msgid "API Route" +msgstr "API 导航" + +#: iast/views/api_route_cover_rate.py:38 +msgid "API Route Coverrate" +msgstr "API导航覆盖率" + +#: iast/views/api_route_cover_rate.py:40 +msgid "" +"Get the API route coverrate of the project corresponding to the specified id." +msgstr "通过指定id来获取对应项目的API覆盖率" + +#: iast/views/api_route_cover_rate.py:69 +msgid "API coverage rate obtained successfully" +msgstr "API覆盖率获取成功" + +#: iast/views/api_route_related_request.py:42 +msgid "API Route Relation Request" +msgstr "API导航相关的请求" + +#: iast/views/api_route_related_request.py:44 +msgid "Get the coverrate of the project corresponding to the specified id." +msgstr "通过指定id来获取对应项目的覆盖率" + +#: iast/views/api_route_related_request.py:55 +msgid "API not Fould" +msgstr "找不到API" + +#: iast/views/api_route_search.py:32 iast/views/engine_hook_rules.py:29 +#: iast/views/engine_method_pool_search.py:27 +msgid "number per page" +msgstr "每页数量" + +#: iast/views/api_route_search.py:35 +msgid "The uri of the api route" +msgstr "Api导航对应的uri" + +#: iast/views/api_route_search.py:38 +msgid "The http method of the api route" +msgstr "该API对应的http方法" + +#: iast/views/api_route_search.py:43 +msgid "" +"Exclude the api route entry with the following id, this field is used to " +"obtain the data of the entire project in batches." +msgstr "排除以下id的api route,该字段用于分批获取整个项目的数据。" + +#: iast/views/api_route_search.py:49 iast/views/api_route_search.py:109 +msgid "" +"Whether the api is covered by detection, that is, there is associated " +"request data in the record." +msgstr "表示该api是否被覆盖,覆盖指记录到了相关的方法调用链" + +#: iast/views/api_route_search.py:60 +msgid "The method bound to this API" +msgstr "该API所绑定的方法" + +#: iast/views/api_route_search.py:62 +msgid "The method bound to this API, in array form" +msgstr "该API所绑定的方法,数组形式" + +#: iast/views/api_route_search.py:66 iast/views/api_route_search.py:95 +msgid "The id of api route" +msgstr "API的ID" + +#: iast/views/api_route_search.py:67 +msgid "The name of api route" +msgstr "该API的名称" + +#: iast/views/api_route_search.py:69 +msgid "The type of the parameter" +msgstr "参数类型" + +#: iast/views/api_route_search.py:71 +msgid "The shortcut of the parameter_type,e.g. java.lang.String -> String" +msgstr "参数类型缩写,如:java.lang.String -> String" + +#: iast/views/api_route_search.py:73 +msgid "The annotaion of the parameter" +msgstr "关于该参数的注释" + +#: iast/views/api_route_search.py:74 +msgid "The route id of parameter" +msgstr "该参数的对应的api ID" + +#: iast/views/api_route_search.py:78 +msgid "The id of api response" +msgstr "API响应数据的ID" + +#: iast/views/api_route_search.py:80 +msgid "The return type of api route" +msgstr "该api响应的类型" + +#: iast/views/api_route_search.py:82 +msgid "The route id of api response" +msgstr "该响应的对应的api ID" + +#: iast/views/api_route_search.py:84 +msgid "The shortcut of return_type" +msgstr "return_type的简称" + +#: iast/views/api_route_search.py:89 +msgid "The vulnerablity level id " +msgstr "漏洞类型id" + +#: iast/views/api_route_search.py:91 +msgid "The vulnerablity type name" +msgstr "漏洞类型名称" + +#: iast/views/api_route_search.py:96 +msgid "The uri of api route" +msgstr "API的uri" + +#: iast/views/api_route_search.py:97 +msgid "The class of api route" +msgstr "该api的对应类" + +#: iast/views/api_route_search.py:99 +msgid "The description of the api route" +msgstr "Api导航对应的注释" + +#: iast/views/api_route_search.py:101 +msgid "The code file of the api route" +msgstr "Api导航对应的代码文件" + +#: iast/views/api_route_search.py:103 +msgid "The controller of the api route" +msgstr "Api导航对应的controller" + +#: iast/views/api_route_search.py:105 +msgid "The id of the agent reported the api route" +msgstr "上报该api的探针的id" + +#: iast/views/api_route_search.py:127 +msgid "API Route Search" +msgstr "API 导航搜索" + +#: iast/views/api_route_search.py:129 +msgid "" +"Get the API list corresponding to the project according to the following " +"parameters. By default, there is no sorting. Please use the exclude_ids " +"field for pagination." +msgstr "根据以下参数搜索用户对应的agent" + +#: iast/views/details_id.py:71 +msgid "Agent List with id" +msgstr "探针列表" + +#: iast/views/details_id.py:73 iast/views/details_id.py:94 +#: iast/views/details_id.py:115 iast/views/details_id.py:137 +#: iast/views/projects.py:41 iast/views/scan_strategys.py:114 +#: iast/views/scan_strategys.py:170 iast/views/scan_strategys.py:192 +#: iast/views/sensitive_info_rule.py:134 iast/views/sensitive_info_rule.py:164 +#: iast/views/sensitive_info_rule.py:198 iast/views/sensitive_info_rule.py:221 +#: iast/views/sensitive_info_rule.py:234 iast/views/sensitive_info_rule.py:266 +msgid "" +"Get the item corresponding to the user, support fuzzy search based on name." +msgstr "项目对应的用户,支持模糊匹配" + +#: iast/views/details_id.py:91 iast/views/project_add.py:62 +#: iast/views/project_delete.py:32 iast/views/project_detail.py:47 +#: iast/views/project_engines.py:37 iast/views/project_report_delete.py:27 +#: iast/views/project_report_download.py:27 +#: iast/views/project_report_export.py:63 iast/views/project_report_list.py:32 +#: iast/views/project_report_sync_add.py:32 iast/views/project_search.py:34 +#: iast/views/project_summary.py:94 iast/views/project_version_add.py:29 +#: iast/views/project_version_current.py:38 +#: iast/views/project_version_delete.py:35 +#: iast/views/project_version_list.py:39 +#: iast/views/project_version_update.py:28 iast/views/projects.py:38 +msgid "Project" +msgstr "项目" + +#: iast/views/details_id.py:92 +msgid "Project List with id" +msgstr "项目列表" + +#: iast/views/details_id.py:112 iast/views/sca_details.py:84 +#: iast/views/sca_sidebar_index.py:45 iast/views/sca_summary.py:146 +#: iast/views/scas.py:124 +msgid "Component" +msgstr "组件" + +#: iast/views/details_id.py:113 +msgid "Component List with id" +msgstr "组件列表(项目相关)" + +#: iast/views/details_id.py:134 iast/views/vul_count_for_plugin.py:26 +#: iast/views/vul_delete.py:29 iast/views/vul_details.py:343 +#: iast/views/vul_list_for_plugin.py:60 iast/views/vul_recheck.py:132 +#: iast/views/vul_recheck.py:212 iast/views/vul_recheck_v2.py:121 +#: iast/views/vul_recheck_v2.py:201 iast/views/vul_sidebar_index.py:72 +#: iast/views/vul_status.py:60 iast/views/vul_summary.py:151 +#: iast/views/vul_summary_project.py:126 iast/views/vul_summary_type.py:128 +#: iast/views/vulnerability_status.py:78 iast/views/vuls.py:187 +msgid "Vulnerability" +msgstr "漏洞" + +#: iast/views/details_id.py:135 +msgid "Vulnerability List with id" +msgstr "漏洞列表" + +#: iast/views/documents.py:23 +msgid "Document's corresponding programming language" +msgstr "文档对应的编程语言" + +#: iast/views/documents.py:42 +msgid "Get documents" +msgstr "获取文档" + +#: iast/views/documents.py:43 +msgid "Get help documentation." +msgstr "获取对应帮助文档." + +#: iast/views/engine_hook_rule_add.py:50 iast/views/strategy_enable.py:18 +#: iast/views/strategy_enable.py:46 +msgid "Policy enabled success, total {} hook rules" +msgstr "策略启用成功,共{}条hook规则" + +#: iast/views/engine_hook_rule_add.py:52 iast/views/engine_hook_rule_add.py:120 +#: iast/views/engine_hook_rule_modify.py:22 +#: iast/views/engine_hook_rule_modify.py:108 +msgid "Failed to create strategy" +msgstr "策略创建失败" + +#: iast/views/engine_hook_rule_add.py:99 +#: iast/views/engine_hook_rule_modify.py:80 +#: iast/views/engine_hook_rule_status.py:93 +#: iast/views/engine_hook_rule_status.py:141 +#: iast/views/engine_hook_rule_summary.py:37 +#: iast/views/engine_hook_rule_type_add.py:90 +#: iast/views/engine_hook_rule_type_disable.py:37 +#: iast/views/engine_hook_rule_type_enable.py:43 +#: iast/views/engine_hook_rule_types.py:74 iast/views/engine_hook_rules.py:80 +msgid "Hook Rule" +msgstr "Hook 规则" + +#: iast/views/engine_hook_rule_add.py:100 +msgid "Hook Rule Add" +msgstr "Hook规则添加" + +#: iast/views/engine_hook_rule_add.py:102 iast/views/strategys.py:180 +#: iast/views/strategys_add.py:37 +msgid "" +"Generate corresponding strategy group according to the strategy selected by " +"the user." +msgstr "根据选择的策略生成对应的策略组" + +#: iast/views/engine_hook_rule_add.py:119 +msgid "Strategy has been created successfully" +msgstr "策略创建成功" + +#: iast/views/engine_hook_rule_modify.py:20 +#: iast/views/engine_hook_rule_modify.py:107 +msgid "strategy has been created successfully" +msgstr "策略创建成功" + +#: iast/views/engine_hook_rule_modify.py:28 +#: iast/views/engine_hook_rule_status.py:23 +msgid "The id of hook rule" +msgstr "Hook规则的id" + +#: iast/views/engine_hook_rule_modify.py:81 +msgid "Hook Rule Modify" +msgstr "Hook规则修改" + +#: iast/views/engine_hook_rule_modify.py:82 +msgid "Modify the rule corresponding to the specified id" +msgstr "通过指定id来获取对应hook规则" + +#: iast/views/engine_hook_rule_status.py:25 +msgid "The id of hook rule type" +msgstr "Hook规则类型ID" + +#: iast/views/engine_hook_rule_status.py:28 +#: iast/views/engine_hook_rule_status.py:42 +msgid "The state of the hook rule" +msgstr "Hook规则的状态" + +#: iast/views/engine_hook_rule_status.py:31 +msgid "The scope of the hook rule" +msgstr "Hook规则的状态" + +#: iast/views/engine_hook_rule_status.py:33 +msgid "The language_id" +msgstr "" + +#: iast/views/engine_hook_rule_status.py:35 +msgid "The type of hook rule" +msgstr "Hook规则类型" + +#: iast/views/engine_hook_rule_status.py:40 +msgid "The id corresponding to the hook type, use\",\" for segmentation." +msgstr "Hook规则的对应的id,使用\",\"进行切分。" + +#: iast/views/engine_hook_rule_status.py:46 +#: iast/views/engine_hook_rule_status.py:52 +#: iast/views/engine_hook_rule_status.py:135 +#: iast/views/engine_hook_rule_status.py:161 +msgid "Operation success" +msgstr "操作成功" + +#: iast/views/engine_hook_rule_status.py:47 +#: iast/views/engine_hook_rule_status.py:53 +#: iast/views/engine_hook_rule_status.py:113 +#: iast/views/engine_hook_rule_status.py:150 +#: iast/views/engine_hook_rule_type_enable.py:25 +msgid "Operation type does not exist" +msgstr "操作类型不存在" + +#: iast/views/engine_hook_rule_status.py:48 +#: iast/views/engine_hook_rule_status.py:137 +#: iast/views/engine_hook_rule_type_disable.py:22 +#: iast/views/engine_hook_rule_type_disable.py:47 +#: iast/views/engine_hook_rule_type_enable.py:53 +#: iast/views/strategy_disable.py:18 iast/views/strategy_disable.py:46 +#: iast/views/strategy_enable.py:19 iast/views/strategy_enable.py:48 +msgid "Strategy does not exist" +msgstr "策略不存在" + +#: iast/views/engine_hook_rule_status.py:54 +#: iast/views/engine_hook_rule_status.py:163 iast/views/vul_status.py:20 +#: iast/views/vul_status.py:97 +msgid "Incorrect parameter" +msgstr "参数不正确" + +#: iast/views/engine_hook_rule_status.py:94 +msgid "Hook Rule Status Modify" +msgstr "Hook规则状态修改" + +#: iast/views/engine_hook_rule_status.py:95 +msgid "Modify the status of the rule corresponding to the specified id." +msgstr "通过指定id来修改对应规则状态" + +#: iast/views/engine_hook_rule_status.py:116 +msgid "Policy type {} operation success, total of {} Policy types" +msgstr "策略类型{}操作成功,共{}条" + +#: iast/views/engine_hook_rule_status.py:127 +msgid "total of {} Policy types" +msgstr "规则类型的总数" + +#: iast/views/engine_hook_rule_status.py:132 +msgid "Policy {} succeed" +msgstr "策略{}作成功" + +#: iast/views/engine_hook_rule_status.py:142 +msgid "Hook Rule Status Modify (Batch)" +msgstr "Hook规则状态修改(批量)" + +#: iast/views/engine_hook_rule_status.py:143 +msgid "Batch modify the status of the rule corresponding to the specified id" +msgstr "通过指定id来批量修改对应规则状态" + +#: iast/views/engine_hook_rule_status.py:160 +msgid "Strategy operation success, total {}" +msgstr "策略操作成功,共{}条" + +#: iast/views/engine_hook_rule_summary.py:18 +msgid "Total number of rule types" +msgstr "规则类型的总数" + +#: iast/views/engine_hook_rule_summary.py:19 +msgid "Total number of rules" +msgstr "规则的总数" + +#: iast/views/engine_hook_rule_summary.py:21 +msgid "Total number of sink type rules" +msgstr "污点类型规则的同属" + +#: iast/views/engine_hook_rule_summary.py:26 +#: iast/views/engine_hook_rule_types.py:33 iast/views/engine_hook_rules.py:35 +msgid "The id of programming language" +msgstr "编程语言的id" + +#: iast/views/engine_hook_rule_summary.py:38 +msgid "Hook Rule Summary" +msgstr "Hook规则概览" + +#: iast/views/engine_hook_rule_summary.py:39 +msgid "Statistics on the number of hook rules" +msgstr "对于Hook规则数量的统计" + +#: iast/views/engine_hook_rule_type_add.py:26 +#: iast/views/engine_hook_rule_types.py:29 iast/views/engine_hook_rules.py:26 +msgid "" +"type of hook rule \n" +" 1 represents the propagation method, 2 represents the source method, 3 " +"represents the filter method, and 4 represents the taint method" +msgstr "" +"Hook规则的类型 \n" +" 1 代表传播方法, 2 代表污点源方法, 3 代表过滤方法, 4 代表危险方法" + +#: iast/views/engine_hook_rule_type_add.py:32 +msgid "The enabled state of the hook strategy: 0-disabled, 1-enabled" +msgstr "hook策略的状态:0代表禁用,1代表启用" + +#: iast/views/engine_hook_rule_type_add.py:34 +msgid "The name of hook type" +msgstr "Hook类型的名称" + +#: iast/views/engine_hook_rule_type_add.py:38 +msgid "The short name of hook type" +msgstr "Hook类型的简称" + +#: iast/views/engine_hook_rule_type_add.py:45 +msgid "The id of programming language,find it in the programming language api" +msgstr "编程语言的id" + +#: iast/views/engine_hook_rule_type_add.py:51 +#: iast/views/engine_hook_rule_type_add.py:111 +msgid "Rule type successfully saved" +msgstr "规则类型保存成功" + +#: iast/views/engine_hook_rule_type_add.py:52 +#: iast/views/engine_hook_rule_type_add.py:99 +msgid "Incomplete data" +msgstr "数据不完整" + +#: iast/views/engine_hook_rule_type_add.py:85 +#: iast/views/engine_hook_rule_types.py:67 iast/views/engine_hook_rules.py:75 +msgid "Parameter parsing failed, error message: {}" +msgstr "参数解析出错,错误原因:{}" + +#: iast/views/engine_hook_rule_type_add.py:91 +msgid "Hook Rule Type Add" +msgstr "Hook规则类型添加" + +#: iast/views/engine_hook_rule_type_add.py:92 +msgid "Create hook rule type based on incoming parameters" +msgstr "基于以下参数创建Hook类型" + +#: iast/views/engine_hook_rule_type_disable.py:15 +#: iast/views/engine_hook_rule_type_enable.py:19 +msgid "The id of hook type" +msgstr "Hook类型的id" + +#: iast/views/engine_hook_rule_type_disable.py:20 +#: iast/views/engine_hook_rule_type_disable.py:55 +msgid "Forbidden success" +msgstr "禁用成功" + +#: iast/views/engine_hook_rule_type_disable.py:21 +#: iast/views/engine_hook_rule_type_disable.py:56 +#: iast/views/engine_hook_rule_type_enable.py:26 +#: iast/views/engine_hook_rule_type_enable.py:62 +#: iast/views/engine_hook_rule_types.py:82 iast/views/engine_hook_rules.py:96 +msgid "Strategy type does not exist" +msgstr "策略类型不存在" + +#: iast/views/engine_hook_rule_type_disable.py:38 +msgid "Hook Rule Status Disable" +msgstr "Hook规则状态禁用" + +#: iast/views/engine_hook_rule_type_disable.py:40 +msgid "Disable the status of the rule corresponding to the specified id." +msgstr "通过指定id来禁用对应规则" + +#: iast/views/engine_hook_rule_type_enable.py:24 +#: iast/views/engine_hook_rule_type_enable.py:61 +msgid "Enable successfully" +msgstr "启用成功" + +#: iast/views/engine_hook_rule_type_enable.py:38 +msgid "Parameter processing failed, error message: {}" +msgstr "参数处理失败,错误详情:{}" + +#: iast/views/engine_hook_rule_type_enable.py:44 +msgid "Hook Rule Status Enable" +msgstr "Hook规则状态启用" + +#: iast/views/engine_hook_rule_type_enable.py:46 +msgid "Enable the status of the rule corresponding to the specified id." +msgstr "通过指定id来启用对应规则" + +#: iast/views/engine_hook_rule_types.py:72 +msgid "Hook Types List" +msgstr "Hook规则类型列表" + +#: iast/views/engine_hook_rule_types.py:73 +msgid "Get Hook Types List" +msgstr "获取Hook规则类型列表" + +#: iast/views/engine_hook_rules.py:30 +msgid "page index" +msgstr "对应页码" + +#: iast/views/engine_hook_rules.py:32 +msgid "The id of hook_type" +msgstr "Hook 类型id" + +#: iast/views/engine_hook_rules.py:37 +msgid "The keyword for search" +msgstr "关键词搜索" + +#: iast/views/engine_hook_rules.py:81 +msgid "Hook Rule List" +msgstr "Hook规则列表" + +#: iast/views/engine_hook_rules.py:82 +msgid "Get the list of hook strategies" +msgstr "获取Hook策略列表" + +#: iast/views/engine_hook_rules.py:119 +msgid "Rule read error, error message: {}" +msgstr "规则读取出错,错误详情:{}" + +#: iast/views/engine_method_pool_detail.py:22 +msgid "Engine - search data according to policy" +msgstr "引擎-根据策略搜索数据" + +#: iast/views/engine_method_pool_detail.py:48 +msgid "Not queried" +msgstr "未查询到数据" + +#: iast/views/engine_method_pool_detail.py:55 +msgid "Acquisition fail" +msgstr "获取失败" + +#: iast/views/engine_method_pool_detail.py:68 +msgid "Temporary search" +msgstr "临时搜索" + +#: iast/views/engine_method_pool_detail.py:94 +msgid "Searching, current {} page" +msgstr "正在搜索,当前第{}页" + +#: iast/views/engine_method_pool_sca.py:25 +#: iast/views/engine_method_pool_search.py:147 +#: iast/views/engine_method_pool_time_range.py:38 +msgid "Method Pool" +msgstr "方法调用链" + +#: iast/views/engine_method_pool_sca.py:26 +msgid "Method Pool Component" +msgstr "方法调用链相关组件" + +#: iast/views/engine_method_pool_sca.py:27 +msgid "Get the component information list of the tainted call chain." +msgstr "获取方法调用链相关的组件信息列表" + +#: iast/views/engine_method_pool_sca.py:34 +msgid "method_pool_id is empty" +msgstr "method_pool_id为空" + +#: iast/views/engine_method_pool_sca.py:39 +msgid "method_pool does not exist" +msgstr "method_pool 不存在" + +#: iast/views/engine_method_pool_sca.py:45 +msgid "method_pool has no permission" +msgstr "method_pool无权访问" + +#: iast/views/engine_method_pool_search.py:31 +msgid "" +"Whether to enable highlighting, the text where the regular expression " +"matches will be highlighted" +msgstr "是否启用高亮,会对正则表达式匹配处的文本进行高亮" + +#: iast/views/engine_method_pool_search.py:36 +msgid "" +"Exclude the method_pool entry with the following id, this field is used to " +"obtain the data of the entire project in batches." +msgstr "排除以下id的方法调用链,该字段用于分批获取整个项目的数据。" + +#: iast/views/engine_method_pool_search.py:41 +msgid "time format such as 1,1628190947242" +msgstr "" + +#: iast/views/engine_method_pool_search.py:45 +msgid "" +"Time range, the default is the current time to the previous seven days, " +"separated by',', format such as 1,1628190947242" +msgstr "时间范围,默认为当前时刻到前七天,使用','分割,格式如,1,1628190947242" + +#: iast/views/engine_method_pool_search.py:48 +msgid "The url of the method pool, search using regular syntax" +msgstr "调用链的url,使用正则语法进行搜索" + +#: iast/views/engine_method_pool_search.py:51 +msgid "The response header of the method pood, search using regular syntax" +msgstr "调用链的响应头,使用正则语法进行搜索" + +#: iast/views/engine_method_pool_search.py:54 +msgid "The response body of the calling chain, search using regular syntax" +msgstr "调用链的响应体,使用正则语法进行搜索" + +#: iast/views/engine_method_pool_search.py:57 +msgid "The request header of the calling chain, search using regular syntax" +msgstr "调用链的请求头,使用正则语法进行搜索" + +#: iast/views/engine_method_pool_search.py:61 +msgid "The request data of the calling chain, search using regular syntax" +msgstr "调用链的请求体,使用正则语法进行搜索" + +#: iast/views/engine_method_pool_search.py:64 +msgid "The sinkvalues of the calling chain, search using regular syntax" +msgstr "调用链的污点值,使用正则语法进行搜索" + +#: iast/views/engine_method_pool_search.py:67 +msgid "The signature of the calling chain, search using regular syntax" +msgstr "调用链的方法签名,使用正则语法进行搜索" + +#: iast/views/engine_method_pool_search.py:70 +msgid "" +"The filter field will return the method call chain with the update time " +"after this time, which can be combined with the exclude_ids field to handle " +"paging" +msgstr "" +"过滤字段,会返回更新时间在这时间之后的方法调用链,可结合exclude_ids字段来处理" +"分页" + +#: iast/views/engine_method_pool_search.py:74 +msgid "the search_mode , 1-regex match ,2-regex not match " +msgstr "" + +#: iast/views/engine_method_pool_search.py:148 +msgid "Method Pool Search" +msgstr "方法调用链搜索" + +#: iast/views/engine_method_pool_search.py:150 +msgid "" +"Search for the method pool information according to the following " +"conditions, the default is regular expression input, regular specifications " +"refer to REGEX POSIX 1003.2" +msgstr "" +"根据以下条件搜索调用链信息,默认为正则表达式输入,正则规范参照REGEX POSIX " +"1003.2" + +#: iast/views/engine_method_pool_search.py:228 +msgid "The regular expression format is wrong, please use REGEX POSIX 1003.2" +msgstr "正则表达式格式错误,请使用REGEX POSIX 1003.2" + +#: iast/views/engine_method_pool_time_range.py:34 +msgid "the eariest time of method_pool" +msgstr "" + +#: iast/views/engine_method_pool_time_range.py:39 +msgid "Method Pool Time Range" +msgstr "方法调用链时间范围" + +#: iast/views/engine_method_pool_time_range.py:40 +msgid "get method_pool eariest time" +msgstr "获取方法调用链的最早时间" + +#: iast/views/filereplace.py:48 +msgid "" +"this file is disallowed to modifyupload failed,this file is disallowed to " +"modify." +msgstr "该文件不允许修改" + +#: iast/views/filereplace.py:60 +msgid "upload error" +msgstr "上传错误" + +#: iast/views/filereplace.py:65 +msgid "upload sussess" +msgstr "上传成功" + +#: iast/views/filereplace.py:74 +msgid "upload error, fail back to default" +msgstr "上传错误,回滚到默认值" + +#: iast/views/health.py:32 iast/views/openapi.py:30 iast/views/openapi.py:48 +#: iast/views/oss_health.py:23 +msgid "Get OpenAPI configuration failed" +msgstr "获取openapi配置失败" + +#: iast/views/health.py:34 +msgid "OpenAPI service is down, Please check it." +msgstr "OpenAPI服务异常,请检查" + +#: iast/views/log_clear.py:14 +msgid "Log clear" +msgstr "日志清空" + +#: iast/views/log_delete.py:14 +msgid "Log delete" +msgstr "日志删除" + +#: iast/views/log_delete.py:34 +msgid "The data to be deleted should not be empty" +msgstr "待删除的数据不能为空" + +#: iast/views/log_export.py:58 +msgid "Export failed, error message: Log id should not be empty" +msgstr "导出失败,原因:日志ID不能为空" + +#: iast/views/logs.py:17 +msgid "Log list" +msgstr "日志列表" + +#: iast/views/logs.py:84 iast/views/vul_recheck.py:47 +#: iast/views/vul_recheck.py:189 iast/views/vul_recheck_v2.py:37 +#: iast/views/vul_recheck_v2.py:178 +msgid "No permission to access" +msgstr "无权限访问" + +#: iast/views/messages_del.py:28 +msgid "The id of Message" +msgstr "消息的id" + +#: iast/views/messages_del.py:32 +msgid "delete all messages when all is True" +msgstr "all参数为true时,删除所有消息" + +#: iast/views/messages_del.py:38 +msgid "Messages Delete" +msgstr "消息删除" + +#: iast/views/messages_del.py:39 +msgid "Used by the user to delete the corresponding message" +msgstr "用于用户删除对应的消息" + +#: iast/views/messages_del.py:40 iast/views/messages_list.py:55 +#: iast/views/messages_new.py:38 iast/views/messages_send.py:41 +msgid "Messages" +msgstr "消息" + +#: iast/views/messages_list.py:38 +msgid "total_number" +msgstr "总数" + +#: iast/views/messages_list.py:39 +msgid "the number of pages" +msgstr "总页数" + +#: iast/views/messages_list.py:52 +msgid "Get Messages List" +msgstr "获取消息列表" + +#: iast/views/messages_list.py:54 iast/views/messages_send.py:40 +msgid "Used to get the message list corresponding to the user" +msgstr "获取用户对应的消息列表" + +#: iast/views/messages_new.py:26 +msgid "total number of new messages" +msgstr "消息的总数" + +#: iast/views/messages_new.py:35 +msgid "Messages Count" +msgstr "新消息的总数" + +#: iast/views/messages_new.py:37 +msgid "Used to get the number of messages corresponding to the user" +msgstr "用于获取用户对应的新消息的数量" + +#: iast/views/messages_send.py:38 +msgid "Send Message" +msgstr "发送消息" + +#: iast/views/method_graph.py:35 +msgid "Method pool ID is empty" +msgstr "方法池ID为空" + +#: iast/views/method_graph.py:58 +msgid "Stain call map type does not exist" +msgstr "污点调用图类型不存在" + +#: iast/views/method_graph.py:61 +msgid "Data does not exist or no permission to access" +msgstr "数据不存在或无权限访问" + +#: iast/views/method_graph.py:67 +msgid "Page and PageSize can only be numeric" +msgstr "page和pageSize只能为数字" + +#: iast/views/openapi.py:12 iast/views/openapi.py:73 iast/views/openapi.py:77 +#: iast/views/project_add.py:50 iast/views/project_report_sync_add.py:65 +#: iast/views/project_version_add.py:19 iast/views/project_version_add.py:45 +msgid "Created success" +msgstr "操作成功" + +#: iast/views/openapi.py:13 iast/views/openapi.py:63 iast/views/profile.py:34 +#: iast/views/profile.py:92 +msgid "Current users have no permission to modify" +msgstr "当前用户无权修改" + +#: iast/views/openapi.py:36 iast/views/openapi.py:53 iast/views/profile.py:18 +#: iast/views/profile.py:30 iast/views/profile.py:68 iast/views/profile.py:88 +msgid "Profile" +msgstr "系统配置" + +#: iast/views/openapi.py:37 +msgid "Profile DongTai-OpenApi Retrieve" +msgstr "获取配置中的DongTai-OpenApi地址" + +#: iast/views/openapi.py:38 +msgid "Get the uri of DongTai-OpenApi" +msgstr "获取 DongTai-OpenApi 的URI" + +#: iast/views/openapi.py:54 +msgid "Profile DongTai-OpenApi Modify" +msgstr "修改配置中的DongTai-OpenApi地址" + +#: iast/views/openapi.py:56 +msgid "" +"To set the url address of DongTai-OpenApi, administrator rights are required" +msgstr "设置DongTai-OpenApi的url地址,需要管理员权限" + +#: iast/views/oss_health.py:25 +msgid "OpenAPI configuration error" +msgstr "获取openapi配置失败" + +#: iast/views/profile.py:13 iast/views/profile.py:59 iast/views/profile.py:79 +msgid "profile value" +msgstr "配置值" + +#: iast/views/profile.py:16 +msgid "Get Profile" +msgstr "获取配置" + +#: iast/views/profile.py:17 +msgid "Get Profile with key" +msgstr "根据key获取配置" + +#: iast/views/profile.py:24 +msgid "Failed to get {} configuration" +msgstr "获取{}配置失败" + +#: iast/views/profile.py:27 iast/views/profile.py:84 +msgid "Profile modify" +msgstr "配置修改" + +#: iast/views/profile.py:29 iast/views/profile.py:86 +msgid "Modifiy Profile with key" +msgstr "修改对应key的配置值" + +#: iast/views/profile.py:49 +msgid "Update {} failed" +msgstr "更新{}失败" + +#: iast/views/profile.py:53 iast/views/profile.py:58 iast/views/profile.py:80 +msgid "profile key" +msgstr "配置key" + +#: iast/views/profile.py:57 +msgid "profile id" +msgstr "配置key" + +#: iast/views/profile.py:64 +msgid "GetProfileBatch" +msgstr "批量获取配置" + +#: iast/views/profile.py:66 +msgid "Get Profile with key batch" +msgstr "根据key批量获取配置" + +#: iast/views/profile.py:74 +msgid "Failed to get configuration" +msgstr "获取配置失败" + +#: iast/views/profile.py:105 +msgid "Update configuration failed" +msgstr "更新配置失败" + +#: iast/views/program_language.py:19 +msgid "The id of program language" +msgstr "编程语言的id" + +#: iast/views/program_language.py:20 +msgid "The name of program language" +msgstr "该编程语言的名称" + +#: iast/views/program_language.py:33 +msgid "Program Language" +msgstr "编程语言" + +#: iast/views/program_language.py:34 +msgid "Program Language List" +msgstr "编程语言列表" + +#: iast/views/program_language.py:35 +msgid "Get a list of program language." +msgstr "获取编程语言列表" + +#: iast/views/project_add.py:30 iast/views/project_detail.py:21 +#: iast/views/project_search.py:19 iast/views/project_summary.py:53 +#: iast/views/vul_details.py:53 +msgid "The name of project" +msgstr "项目名" + +#: iast/views/project_add.py:32 iast/views/project_detail.py:23 +msgid "The id corresponding to the agent, use, for segmentation." +msgstr "agent对应的id,使用\",\"进行切分。" + +#: iast/views/project_add.py:34 iast/views/project_detail.py:27 +msgid "The id corresponding to the scanning strategy." +msgstr "扫描策略的对于id" + +#: iast/views/project_add.py:51 iast/views/project_add.py:162 +msgid "Agent has been bound by other application" +msgstr "agent已被其他项目绑定" + +#: iast/views/project_add.py:52 iast/views/project_add.py:126 +msgid "Failed to create, the application name already exists" +msgstr "创建失败,项目名称已存在" + +#: iast/views/project_add.py:58 +msgid "New application" +msgstr "新增项目" + +#: iast/views/project_add.py:63 +msgid "Projects Add" +msgstr "项目新建" + +#: iast/views/project_add.py:65 +msgid "" +"Create a new project according to the given conditions;\n" +" when specifying the project id, update the item corresponding to " +"the id according to the given condition." +msgstr "" +"根据所给条件新建项目;\n" +" 当指定项目id是, 根据所给条件更新对应项目" + +#: iast/views/project_add.py:97 iast/views/project_add.py:99 +msgid "base_url validate failed" +msgstr "测试url校验失败" + +#: iast/views/project_add.py:105 +msgid "Agent parse error" +msgstr "探针升级" + +#: iast/views/project_add.py:110 +msgid "Required scan strategy and name" +msgstr "需要扫描策略和项目名" + +#: iast/views/project_add.py:151 +msgid "Version Update Error" +msgstr "项目版本更新" + +#: iast/views/project_delete.py:21 iast/views/project_delete.py:48 +msgid "Application has been deleted successfully" +msgstr "项目删除成功" + +#: iast/views/project_delete.py:22 iast/views/project_delete.py:51 +msgid "Failed to delete the project." +msgstr "删除失败" + +#: iast/views/project_delete.py:28 +msgid "Delete application" +msgstr "删除项目" + +#: iast/views/project_delete.py:33 +msgid "Projects Delete" +msgstr "删除项目" + +#: iast/views/project_detail.py:25 iast/views/project_summary.py:55 +msgid "The mode of project" +msgstr "项目模式" + +#: iast/views/project_detail.py:29 iast/views/project_summary.py:59 +msgid "Version information about the project" +msgstr "项目的版本信息" + +#: iast/views/project_detail.py:44 +msgid "View item details" +msgstr "查看项目详情" + +#: iast/views/project_detail.py:48 +msgid "Projects Detail" +msgstr "项目详情" + +#: iast/views/project_detail.py:50 +msgid "" +"Get project information by project id, including the current version " +"information of the project." +msgstr "获取项目的版本信息,包括项目的当前版本" + +#: iast/views/project_engines.py:18 +msgid "The id of the agent" +msgstr "探针id" + +#: iast/views/project_engines.py:21 +msgid "The short name of the agent" +msgstr "探针简称" + +#: iast/views/project_engines.py:34 +msgid "View engine list" +msgstr "查看引擎列表" + +#: iast/views/project_engines.py:38 +msgid "Projects Agents" +msgstr "项目探针" + +#: iast/views/project_engines.py:39 +msgid "Get the agent list corresponding to the project id." +msgstr "获取项目对应的探针列表" + +#: iast/views/project_report_delete.py:15 +#: iast/views/project_report_download.py:18 +msgid "The id of the project report" +msgstr "项目ID" + +#: iast/views/project_report_delete.py:23 +msgid "Delete Vulnerability Report" +msgstr "删除漏洞报告" + +#: iast/views/project_report_delete.py:28 +#: iast/views/project_report_export.py:64 +msgid "Projects Report Export" +msgstr "项目报告导出" + +#: iast/views/project_report_delete.py:30 +msgid "" +"According to the conditions, delete the report of the specified project or " +"the project of the specified vulnerability." +msgstr "基于以下条件,导出指定版本的报告或是特定漏洞的报告" + +#: iast/views/project_report_download.py:23 +msgid "Vulnerability Report Download" +msgstr "漏洞word报告下载" + +#: iast/views/project_report_download.py:28 +msgid "Projects Report Download" +msgstr "项目报告导出" + +#: iast/views/project_report_download.py:30 +#: iast/views/project_report_export.py:66 +msgid "" +"According to the conditions, export the report of the specified project or " +"the project of the specified vulnerability." +msgstr "基于以下条件,导出指定版本的报告或是特定漏洞的报告" + +#: iast/views/project_report_download.py:52 +msgid "Record is not ready" +msgstr "" + +#: iast/views/project_report_export.py:39 +msgid "Vulnerability Report Generate - Word" +msgstr "漏洞word报告生成" + +#: iast/views/project_report_export.py:93 +msgid "Vulnerability Report - {}. {}" +msgstr "漏洞报告-{}.{}" + +#: iast/views/project_report_list.py:17 +msgid "Project id" +msgstr "项目id" + +#: iast/views/project_report_list.py:28 +msgid "Vulnerability Report List" +msgstr "漏洞报告列表" + +#: iast/views/project_report_list.py:33 +msgid "Projects Report List" +msgstr "项目报告列表" + +#: iast/views/project_report_list.py:35 +msgid "" +"According to the conditions, list the report of the specified project or the " +"project of the specified vulnerability." +msgstr "基于以下条件,导出指定版本的报告或是特定漏洞的报告" + +#: iast/views/project_report_sync_add.py:18 +msgid "The vulnerability id of the project" +msgstr "项目的漏洞id" + +#: iast/views/project_report_sync_add.py:23 +msgid "The type of the vulnerability report" +msgstr "漏洞报告类型" + +#: iast/views/project_report_sync_add.py:28 +#: iast/views/project_report_sync_add.py:33 +msgid "Vulnerability Report Async Export" +msgstr "漏洞报告生成" + +#: iast/views/project_report_sync_add.py:35 +msgid "" +"According to the conditions, export the report of the specified project or " +"the project of the specified vulnerability async." +msgstr "基于以下条件,导出指定版本的报告或是特定漏洞的报告" + +#: iast/views/project_report_sync_add.py:57 +msgid "Project not exist" +msgstr "项目不存在" + +#: iast/views/project_report_sync_add.py:59 +msgid "Report type error" +msgstr "报告类型错误" + +#: iast/views/project_search.py:16 +msgid "Project name, support fuzzy search." +msgstr "项目名,支持模糊搜索" + +#: iast/views/project_search.py:35 iast/views/project_summary.py:95 +msgid "Projects Search" +msgstr "项目搜索" + +#: iast/views/project_search.py:37 iast/views/project_summary.py:97 +msgid "" +"Get the id and name of the item according to the search keyword matching the " +"item name, in descending order of time." +msgstr "" + +#: iast/views/project_summary.py:30 +msgid "Name of vulnerability" +msgstr "漏洞名" + +#: iast/views/project_summary.py:32 +msgid "Count of thi vulnerablity type" +msgstr "漏洞类型的个数" + +#: iast/views/project_summary.py:34 iast/views/sca_sidebar_index.py:27 +#: iast/views/vul_sidebar_index.py:59 iast/views/vul_summary.py:58 +#: iast/views/vul_summary_project.py:53 iast/views/vul_summary_type.py:52 +#: iast/views/vuls.py:93 +msgid "Level of vulnerability" +msgstr "漏洞等级" + +#: iast/views/project_summary.py:38 +msgid "Timestamp, format %M-%d" +msgstr "时间戳,格式位 %M-%d" + +#: iast/views/project_summary.py:40 +msgid "The number of vulnerabilities corresponding to the time" +msgstr "获取时间对应的漏洞数量" + +#: iast/views/project_summary.py:45 iast/views/strategys_type.py:28 +msgid "Level name of vulnerability" +msgstr "漏洞等级名称" + +#: iast/views/project_summary.py:47 iast/views/strategys_type.py:26 +msgid "Level id of vulnerability" +msgstr "漏洞等级id" + +#: iast/views/project_summary.py:57 +msgid "The latest update time of the project" +msgstr "默认值为项目的当前版本。" + +#: iast/views/project_summary.py:62 +msgid "Statistics on the number of types of vulnerabilities" +msgstr "关于漏洞类型的漏洞数量统计" + +#: iast/views/project_summary.py:69 +msgid "Statistics on the number of danger levels of vulnerabilities" +msgstr "基于漏洞等级的漏洞数量统计" + +#: iast/views/project_summary.py:78 +msgid "Item details - Summary" +msgstr "查看项目详情-概括" + +#: iast/views/project_version_add.py:25 +msgid "New application version information" +msgstr "新增项目版本信息" + +#: iast/views/project_version_add.py:30 +msgid "Projects Version Add" +msgstr "项目版本添加" + +#: iast/views/project_version_add.py:32 +msgid "" +"Add project version information according to the given conditions;\n" +" if the version id is specified, the corresponding version " +"information is updated according to the given conditions." +msgstr "" +"基于给定条件添加指定项目版本,若指定版本号,将基于给定条件更新指定版本信息" + +#: iast/views/project_version_current.py:27 +#: iast/views/project_version_current.py:72 +msgid "Version setting failed" +msgstr "版本设置失败" + +#: iast/views/project_version_current.py:28 +#: iast/views/project_version_current.py:66 +msgid "Version setting success" +msgstr "版本设置成功" + +#: iast/views/project_version_current.py:34 +msgid "Set to the current application version" +msgstr "设置为当前项目版本" + +#: iast/views/project_version_current.py:39 +msgid "Projects Version Current" +msgstr "当前项目版本获取" + +#: iast/views/project_version_current.py:41 +msgid "" +"Specify the selected version as the current version of the project according " +"to the given conditions." +msgstr "基于给定条件获取特定项目的版本号" + +#: iast/views/project_version_delete.py:31 +msgid "Delete application version information" +msgstr "删除项目版本信息" + +#: iast/views/project_version_delete.py:36 +msgid "Projects Version Delete" +msgstr "项目版本删除" + +#: iast/views/project_version_list.py:36 +msgid "View application version list" +msgstr "查看项目版本列表" + +#: iast/views/project_version_list.py:40 +msgid "Projects Version List" +msgstr "项目版本列表" + +#: iast/views/project_version_list.py:41 +msgid "Get the version information list of the item corresponding to the id" +msgstr "获取用户对应的漏洞列表" + +#: iast/views/project_version_list.py:57 +msgid "Search successful" +msgstr "查询成功" + +#: iast/views/project_version_update.py:18 +#: iast/views/project_version_update.py:42 iast/views/version_update.py:49 +msgid "Update completed" +msgstr "更新成功" + +#: iast/views/project_version_update.py:24 +msgid "Update application version information" +msgstr "更新项目版本信息" + +#: iast/views/project_version_update.py:29 +msgid "Projects Version Update" +msgstr "项目版本更新" + +#: iast/views/project_version_update.py:31 +msgid "Update the version information of the corresponding version id." +msgstr "使用指定版本id获取对应的版本信息。" + +#: iast/views/projects.py:26 iast/views/scan_strategys.py:48 +#: iast/views/sensitive_info_rule.py:113 iast/views/strategys.py:73 +msgid "The name of the item to be searched, supports fuzzy search." +msgstr "项目名搜索,支持模糊匹配" + +#: iast/views/projects.py:34 +msgid "View item list" +msgstr "查看项目列表" + +#: iast/views/projects.py:39 +msgid "Projects List" +msgstr "项目列表" + +#: iast/views/sca_details.py:57 iast/views/sca_summary.py:99 +#: iast/views/scas.py:88 iast/views/vul_details.py:280 +#: iast/views/vul_status.py:51 iast/views/vul_summary.py:103 +#: iast/views/vul_summary_project.py:98 iast/views/vul_summary_type.py:97 +#: iast/views/vulnerability_status.py:41 iast/views/vuls.py:147 +msgid "Get data sample" +msgstr "获取数据样例" + +#: iast/views/sca_details.py:59 iast/views/sca_summary.py:101 +#: iast/views/scas.py:90 iast/views/vul_details.py:282 +#: iast/views/vul_status.py:53 iast/views/vul_summary.py:105 +#: iast/views/vul_summary_project.py:100 iast/views/vul_summary_type.py:99 +#: iast/views/vulnerability_status.py:43 iast/views/vuls.py:149 +msgid "" +"The aggregation results are programming language, risk level, vulnerability " +"type, project" +msgstr "聚合结果分别为编程语言,风险等级,漏洞类型,项目。" + +#: iast/views/sca_details.py:85 +msgid "Component Detail" +msgstr "组件详情" + +#: iast/views/sca_details.py:87 +msgid "Get the details of the corresponding component by specifying the id." +msgstr "通过指定id来获取对应组件的详情" + +#: iast/views/sca_details.py:100 +msgid "Components do not exist or no permission to access" +msgstr "组件不存在或无权限访问" + +#: iast/views/sca_details.py:137 +msgid "Current version stopped for maintenance or it is not a secure version" +msgstr "当前版本已停止维护或暂无安全版本" + +#: iast/views/sca_details.py:152 +msgid "Component information query failed" +msgstr "组件信息查询失败" + +#: iast/views/sca_sidebar_index.py:40 iast/views/sca_summary.py:91 +#: iast/views/scas.py:80 iast/views/vul_list_for_plugin.py:54 +#: iast/views/vul_sidebar_index.py:70 iast/views/vul_summary.py:96 +#: iast/views/vul_summary_project.py:91 iast/views/vul_summary_type.py:90 +#: iast/views/vuls.py:140 +msgid "Sorted index" +msgstr "排序指标" + +#: iast/views/sca_sidebar_index.py:46 +msgid "Component List" +msgstr "组件列表" + +#: iast/views/sca_sidebar_index.py:48 +msgid "" +"Use the specified project information to obtain the corresponding component." +msgstr "使用指定项目信息获取对应的组件。" + +#: iast/views/sca_summary.py:31 +msgid "Three-party components overview" +msgstr "三方组件概览" + +#: iast/views/sca_summary.py:58 iast/views/scas.py:47 +#: iast/views/vul_summary.py:50 iast/views/vul_summary_project.py:45 +#: iast/views/vul_summary_type.py:44 +msgid "Name of Project" +msgstr "项目名" + +#: iast/views/sca_summary.py:63 iast/views/scas.py:57 +#: iast/views/vul_summary.py:63 iast/views/vul_summary_project.py:58 +#: iast/views/vul_summary_type.py:57 iast/views/vuls.py:32 +#: iast/views/vuls.py:107 +msgid "Id of Project" +msgstr "项目ID" + +#: iast/views/sca_summary.py:68 +msgid "The id level of vulnerability" +msgstr "漏洞等级id" + +#: iast/views/sca_summary.py:76 iast/views/scas.py:65 +#: iast/views/vul_summary.py:71 iast/views/vul_summary_project.py:66 +#: iast/views/vul_summary_type.py:65 iast/views/vuls.py:115 +msgid "The default is the current version id of the project." +msgstr "默认值为项目的当前版本。" + +#: iast/views/sca_summary.py:82 iast/views/scas.py:71 +msgid "Fuzzy keyword search field for package_name." +msgstr "模糊搜索关键词,针对package_name字段" + +#: iast/views/sca_summary.py:147 +msgid "Component Summary (with project)" +msgstr "组件概况(项目相关)" + +#: iast/views/sca_summary.py:149 +msgid "" +"Use the specified project information to get the corresponding component " +"summary" +msgstr "使用指定项目信息获取对应的组件概况。" + +#: iast/views/scan_strategys.py:75 iast/views/scan_strategys.py:111 +#: iast/views/scan_strategys.py:140 iast/views/scan_strategys.py:167 +#: iast/views/scan_strategys.py:189 iast/views/scan_strategys.py:211 +#: iast/views/scan_strategys.py:225 iast/views/scan_strategys.py:242 +msgid "ScanStrategy" +msgstr "策略" + +#: iast/views/scan_strategys.py:76 +msgid "ScanStrategy Relation Projects" +msgstr "" + +#: iast/views/scan_strategys.py:78 +msgid "Get scan strategy relation projects" +msgstr "" + +#: iast/views/scan_strategys.py:112 +msgid "ScanStrategy List" +msgstr "策略列表" + +#: iast/views/scan_strategys.py:141 +msgid "ScanStrategy Create" +msgstr "策略删除" + +#: iast/views/scan_strategys.py:143 +msgid "Create ScanStrategy" +msgstr "" + +#: iast/views/scan_strategys.py:159 iast/views/sensitive_info_rule.py:190 +msgid "create success" +msgstr "创建成功" + +#: iast/views/scan_strategys.py:168 +msgid "ScanStrategy Update" +msgstr "策略更新时间" + +#: iast/views/scan_strategys.py:186 iast/views/scan_strategys.py:234 +#: iast/views/scan_strategys.py:249 iast/views/sensitive_info_rule.py:215 +#: iast/views/sensitive_info_rule.py:339 iast/views/sensitive_info_rule.py:355 +msgid "update success" +msgstr "更新成功" + +#: iast/views/scan_strategys.py:190 +msgid "ScanStrategy delete" +msgstr "策略删除" + +#: iast/views/scan_strategys.py:205 iast/views/sensitive_info_rule.py:228 +msgid "delete success" +msgstr "处理成功" + +#: iast/views/scan_strategys.py:212 +msgid "ScanStrategy get" +msgstr "策略删除" + +#: iast/views/scan_strategys.py:213 +msgid "Get the item with pk" +msgstr "根据key获取配置" + +#: iast/views/scan_strategys.py:226 +msgid "ScanStrategy batch status" +msgstr "" + +#: iast/views/scan_strategys.py:227 iast/views/sensitive_info_rule.py:333 +msgid "batch update status." +msgstr "" + +#: iast/views/scan_strategys.py:243 +msgid "ScanStrategy all status" +msgstr "策略列表" + +#: iast/views/scan_strategys.py:244 iast/views/sensitive_info_rule.py:349 +msgid "all update status." +msgstr "通过状态id更新" + +#: iast/views/scas.py:52 +msgid "The id of level of vulnerability" +msgstr "漏洞等级id" + +#: iast/views/scas.py:125 +msgid "Component List (with project)" +msgstr "组件列表(项目相关)" + +#: iast/views/scas.py:127 +msgid "" +"use the specified project information to obtain the corresponding component." +msgstr "使用指定项目信息获取对应的组件。" + +#: iast/views/sensitive_info_rule.py:116 +msgid "regex pattern" +msgstr "正则匹配" + +#: iast/views/sensitive_info_rule.py:117 +msgid "the data for test regex" +msgstr "用于测试策略的数据" + +#: iast/views/sensitive_info_rule.py:131 iast/views/sensitive_info_rule.py:161 +#: iast/views/sensitive_info_rule.py:195 iast/views/sensitive_info_rule.py:218 +#: iast/views/sensitive_info_rule.py:231 iast/views/sensitive_info_rule.py:249 +#: iast/views/sensitive_info_rule.py:263 iast/views/sensitive_info_rule.py:331 +#: iast/views/sensitive_info_rule.py:347 +msgid "SensitiveInfoRule" +msgstr "" + +#: iast/views/sensitive_info_rule.py:132 +msgid "SensitiveInfoRule List" +msgstr "" + +#: iast/views/sensitive_info_rule.py:162 +msgid "SensitiveInfoRule Create" +msgstr "" + +#: iast/views/sensitive_info_rule.py:196 +msgid "SensitiveInfoRule Update" +msgstr "" + +#: iast/views/sensitive_info_rule.py:219 +msgid "SensitiveInfoRule delete" +msgstr "" + +#: iast/views/sensitive_info_rule.py:232 +msgid "SensitiveInfoRule get" +msgstr "" + +#: iast/views/sensitive_info_rule.py:250 +msgid "SensitiveInfoRule Pattern Type List" +msgstr "" + +#: iast/views/sensitive_info_rule.py:252 +msgid "Get the item corresponding to the user." +msgstr "获取用户对应的漏洞列表" + +#: iast/views/sensitive_info_rule.py:264 +msgid "SensitiveInfoRule validated_data" +msgstr "" + +#: iast/views/sensitive_info_rule.py:332 +msgid "SensitiveInfoRule batch status" +msgstr "" + +#: iast/views/sensitive_info_rule.py:348 +msgid "SensitiveInfoRule all status" +msgstr "" + +#: iast/views/strategy_delete.py:30 iast/views/strategy_disable.py:24 +#: iast/views/strategy_enable.py:26 iast/views/strategy_modified.py:27 +#: iast/views/strategys.py:80 iast/views/strategys.py:104 +#: iast/views/strategys.py:177 iast/views/strategys_add.py:34 +#: iast/views/strategys_list.py:27 iast/views/strategys_type.py:40 +msgid "Strategy" +msgstr "策略" + +#: iast/views/strategy_delete.py:31 +msgid "Strategy Delete" +msgstr "策略删除" + +#: iast/views/strategy_delete.py:33 +msgid "Delete the corresponding strategy according to id" +msgstr "使用策略对应的id来删除策略" + +#: iast/views/strategy_delete.py:41 +msgid "This strategy does not exist" +msgstr "该策略不存在" + +#: iast/views/strategy_disable.py:17 iast/views/strategy_disable.py:44 +msgid "Strategy is disabled, total {} hook rules" +msgstr "策略禁用成功,共{}条hook规则" + +#: iast/views/strategy_disable.py:25 +msgid "Strategy Disable" +msgstr "策略停用" + +#: iast/views/strategy_disable.py:27 +msgid "Disable the corresponding strategy according to id" +msgstr "使用策略对应的id来停用策略" + +#: iast/views/strategy_enable.py:27 +msgid "Strategy Enbale" +msgstr "策略启用" + +#: iast/views/strategy_enable.py:29 +msgid "Enable the corresponding strategy according to id" +msgstr "使用策略对应的id来启用策略" + +#: iast/views/strategy_modified.py:13 iast/views/strategys.py:29 +#: iast/views/strategys.py:43 iast/views/strategys_type.py:19 +msgid "The name of the vulnerability type targeted by the strategy" +msgstr "策略对应的漏洞类型名称" + +#: iast/views/strategy_modified.py:14 iast/views/strategys.py:30 +#: iast/views/strategys.py:44 +msgid "Types of vulnerabilities targeted by the strategy" +msgstr "获取用户对应的漏洞列表" + +#: iast/views/strategy_modified.py:15 iast/views/strategys.py:31 +#: iast/views/strategys.py:45 +msgid "This field indicates whether the vulnerability is enabled, 1 or 0" +msgstr "表示漏洞是否启用,1启用,0停用" + +#: iast/views/strategy_modified.py:16 iast/views/strategys.py:32 +#: iast/views/strategys.py:46 +msgid "Description of the corresponding vulnerabilities of the strategy" +msgstr "使用策略对应的id来删除策略" + +#: iast/views/strategy_modified.py:18 iast/views/strategys.py:34 +#: iast/views/strategys.py:49 iast/views/strategys_type.py:21 +msgid "The strategy corresponds to the level of vulnerability" +msgstr "漏洞等级的对应策略" + +#: iast/views/strategy_modified.py:20 iast/views/strategys.py:38 +#: iast/views/strategys.py:53 +msgid "Suggestions for repairing vulnerabilities corresponding to the strategy" +msgstr "策略对应的漏洞修复提示" + +#: iast/views/strategy_modified.py:28 +msgid "Strategy modified" +msgstr "策略添加" + +#: iast/views/strategy_modified.py:30 iast/views/strategys.py:107 +#: iast/views/strategys_list.py:30 +msgid "Get a list of strategies." +msgstr "获取策略列表" + +#: iast/views/strategys.py:36 +msgid "Strategy update time" +msgstr "策略更新时间" + +#: iast/views/strategys.py:81 +msgid "Strategy retrieve" +msgstr "策略删除" + +#: iast/views/strategys.py:83 +msgid "Get a strategiey by id." +msgstr "根据id获取策略" + +#: iast/views/strategys.py:105 +msgid "Strategy List" +msgstr "策略列表" + +#: iast/views/strategys.py:174 +msgid "No strategy" +msgstr "暂无策略" + +#: iast/views/strategys.py:178 +msgid "Strategy Add" +msgstr "策略添加" + +#: iast/views/strategys_add.py:20 +msgid "The id corresponding to the strategys, use\",\" for segmentation." +msgstr "策略对应的id,使用\",\"进行切分。" + +#: iast/views/strategys_add.py:21 +msgid "The name of strategy" +msgstr "策略名" + +#: iast/views/strategys_add.py:35 +msgid "Sacn Strategy Add" +msgstr "扫描策略添加" + +#: iast/views/strategys_list.py:16 +msgid "The name of the strategy" +msgstr "策略名" + +#: iast/views/strategys_list.py:28 +msgid "Strategy List (with user)" +msgstr "策略列表(用户相关)" + +#: iast/views/strategys_type.py:41 +msgid "Strategy Type" +msgstr "策略类型" + +#: iast/views/strategys_type.py:43 +msgid "Get a list of strategy types." +msgstr "获取策略类型列表" + +#: iast/views/system_info.py:14 +msgid "API - System Information Page" +msgstr "api - 系统信息页面" + +#: iast/views/user_info.py:20 +msgid "User Info" +msgstr "用户信息" + +#: iast/views/user_login.py:20 +msgid "User login" +msgstr "用户登录" + +#: iast/views/user_login.py:35 +msgid "Captcha timed out" +msgstr "验证码超时" + +#: iast/views/user_login.py:43 +msgid "Login successful" +msgstr "登录成功" + +#: iast/views/user_login.py:48 iast/views/user_login.py:55 +msgid "Login failed" +msgstr "登录失败" + +#: iast/views/user_login.py:50 +msgid "Verification code error" +msgstr "验证码错误" + +#: iast/views/user_login.py:52 +msgid "verification code should not be empty" +msgstr "验证码不能为空" + +#: iast/views/user_logout.py:21 +msgid "Sign out" +msgstr "退出登录" + +#: iast/views/user_logout.py:28 +msgid "Sign out successfully" +msgstr "退出成功" + +#: iast/views/user_passwrd.py:18 +msgid "Change Password" +msgstr "修改密码" + +#: iast/views/user_passwrd.py:24 +msgid "Password should not be empty" +msgstr "密码不允许为空" + +#: iast/views/user_passwrd.py:32 +msgid "Password has been changed successfully" +msgstr "密码修改成功" + +#: iast/views/user_passwrd.py:34 +msgid "Incorrect old password" +msgstr "原始密码错误" + +#: iast/views/user_passwrd.py:37 +msgid "Incorrect" +msgstr "参数不正确" + +#: iast/views/user_passwrd_reset.py:19 +msgid "Reset Password" +msgstr "重置密码" + +#: iast/views/user_passwrd_reset.py:30 +msgid "User {} password reset success" +msgstr "用户{}密码重置成功" + +#: iast/views/user_passwrd_reset.py:33 +msgid "User does not exist" +msgstr "用户不存在" + +#: iast/views/user_passwrd_reset.py:37 iast/views/user_passwrd_reset.py:38 +msgid "UserID is empty" +msgstr "用户ID为空" + +#: iast/views/user_passwrd_reset.py:41 +msgid "UserID must be a numeric" +msgstr "userId必须为数字" + +#: iast/views/user_passwrd_reset.py:44 +#, fuzzy, python-brace-format +#| msgid "Password reset failed, reasons: {E}" +msgid "Password reset failed, reasons: {E}" +msgstr "密码重置失败,原因:{e}" + +#: iast/views/user_register_batch.py:44 +msgid "Account has been created successfully" +msgstr "账号创建成功" + +#: iast/views/user_register_batch.py:70 +msgid "User {} already exists" +msgstr "用户{}已存在" + +#: iast/views/user_register_batch.py:75 +msgid "Failed to create user, error message: token is incorrect" +msgstr "用户创建失败,原因:token不正确" + +#: iast/views/user_register_batch.py:76 +msgid "Account registration successful" +msgstr "账号注册成功" + +#: iast/views/user_register_batch.py:91 +msgid "User account file read error" +msgstr "用户账号文件读取错误" + +#: iast/views/user_token.py:19 +msgid "Get OpenAPI token" +msgstr "获取OpenApi Token" + +#: iast/views/version_update.py:20 +msgid "Updated is currently not allowed" +msgstr "当前不允许更新" + +#: iast/views/vul_count_for_plugin.py:27 +msgid "Vulnerability Count (with agent name)" +msgstr "漏洞总数(Agent相关)" + +#: iast/views/vul_count_for_plugin.py:29 +msgid "Get the number of vulnerabilities corresponding to the Agent." +msgstr "获取用户对应的漏洞列表" + +#: iast/views/vul_count_for_plugin.py:35 iast/views/vul_list_for_plugin.py:70 +msgid "Please input agent name." +msgstr "please input agent name." + +#: iast/views/vul_count_for_plugin.py:41 iast/views/vul_list_for_plugin.py:76 +msgid "agent_name not found" +msgstr "Not found agent_name!" + +#: iast/views/vul_delete.py:25 +msgid "Delete vulnerability" +msgstr "删除漏洞" + +#: iast/views/vul_delete.py:28 +msgid "Vulnerability Delete" +msgstr "删除漏洞" + +#: iast/views/vul_delete.py:31 +msgid "Delete the corresponding vulnerability by specifying the id" +msgstr "使用漏洞对应的id来删除漏洞" + +#: iast/views/vul_delete.py:45 +msgid "Failed to delete, error message: Vulnerability does not exist" +msgstr "删除失败,原因:漏洞不存在" + +#: iast/views/vul_details.py:137 +msgid "Source method" +msgstr "污点来源方法" + +#: iast/views/vul_details.py:139 +msgid "Hazardous method" +msgstr "危险方法" + +#: iast/views/vul_details.py:141 +msgid "Propagation method" +msgstr "传播方法" + +#: iast/views/vul_details.py:158 +msgid "Analysis of errovence analysis of stain call diagram: {}" +msgstr "[{}] 污点调用图解析出错,原因:{}" + +#: iast/views/vul_details.py:168 +msgid "Error analysis of Header, error message: {}" +msgstr "header解析出错,错误原因:{}" + +#: iast/views/vul_details.py:207 iast/views/vul_details.py:361 +msgid "[{}] Vulnerability information parsing error, error message: {}" +msgstr "[{}] 漏洞信息解析出错,原因:{}" + +#: iast/views/vul_details.py:341 +msgid "" +"Use the corresponding id of the vulnerability to query the details of the " +"vulnerability" +msgstr "使用漏洞对应的id来获取漏洞详情信息" + +#: iast/views/vul_details.py:362 +msgid "Vulnerability data query error" +msgstr "漏洞数据查询出错" + +#: iast/views/vul_levels.py:27 +msgid "Vul level list" +msgstr "漏洞等级列表" + +#: iast/views/vul_levels.py:28 +msgid "Vul level List" +msgstr "漏洞等级列表" + +#: iast/views/vul_levels.py:29 +msgid "Get a list of vul level." +msgstr "获取策略等级列表" + +#: iast/views/vul_list_for_plugin.py:40 +msgid "Name of agent" +msgstr "Agent名称" + +#: iast/views/vul_list_for_plugin.py:45 iast/views/vul_sidebar_index.py:63 +#: iast/views/vul_summary.py:87 iast/views/vul_summary_project.py:82 +#: iast/views/vul_summary_type.py:81 iast/views/vuls.py:131 +msgid "The URL corresponding to the vulnerability" +msgstr "漏洞的对应url" + +#: iast/views/vul_list_for_plugin.py:61 +msgid "Vulnerability List (with agent name)" +msgstr "漏洞列表(Agent相关)" + +#: iast/views/vul_list_for_plugin.py:63 +msgid "Use the agent name to get the corresponding list of vulnerabilities" +msgstr "使用agent名获取对应的漏洞列表" + +#: iast/views/vul_recheck.py:30 +msgid "Whether the project does not exist agent" +msgstr "项目是否存在探针" + +#: iast/views/vul_recheck.py:32 +msgid "Waiting queue length for replay" +msgstr "重放的等待队列长度" + +#: iast/views/vul_recheck.py:34 +msgid "Success queue length for replay" +msgstr "重放的成功队列长度" + +#: iast/views/vul_recheck.py:36 +msgid "Checking queue length for replay" +msgstr "重放的检测队列长度" + +#: iast/views/vul_recheck.py:42 iast/views/vul_recheck.py:52 +#: iast/views/vul_recheck.py:164 iast/views/vul_recheck.py:242 +#: iast/views/vul_recheck_v2.py:32 iast/views/vul_recheck_v2.py:41 +#: iast/views/vul_recheck_v2.py:153 +msgid "Handle success" +msgstr "处理成功" + +#: iast/views/vul_recheck.py:43 iast/views/vul_recheck.py:224 +#: iast/views/vul_recheck_v2.py:33 iast/views/vul_recheck_v2.py:213 +msgid "Item ID should not be empty" +msgstr "项目ID不能为空" + +#: iast/views/vul_recheck.py:44 iast/views/vul_recheck.py:243 +#: iast/views/vul_recheck_v2.py:34 iast/views/vul_recheck_v2.py:232 +msgid "Incorrect format parameter" +msgstr "参数格式不正确" + +#: iast/views/vul_recheck.py:45 iast/views/vul_recheck.py:192 +#: iast/views/vul_recheck.py:247 iast/views/vul_recheck_v2.py:35 +#: iast/views/vul_recheck_v2.py:181 iast/views/vul_recheck_v2.py:236 +msgid "Batch playback error" +msgstr "批量重放出错" + +#: iast/views/vul_recheck.py:46 iast/views/vul_recheck.py:186 +#: iast/views/vul_recheck_v2.py:36 iast/views/vul_recheck_v2.py:175 +msgid "" +"Current application has not been associated with probes and cannot be " +"reproduced." +msgstr "当前项目尚未关联探针,无法进行漏洞重放" + +#: iast/views/vul_recheck.py:53 iast/views/vul_recheck.py:146 +#: iast/views/vul_recheck_v2.py:42 iast/views/vul_recheck_v2.py:135 +msgid "IDS should not be empty" +msgstr "ids不能为空" + +#: iast/views/vul_recheck.py:54 iast/views/vul_recheck.py:150 +#: iast/views/vul_recheck_v2.py:43 iast/views/vul_recheck_v2.py:139 +msgid "IDS must be: Vulnerability ID, Vulnerability ID Format" +msgstr "ids必须为:漏洞ID,漏洞ID 格式" + +#: iast/views/vul_recheck.py:55 iast/views/vul_recheck.py:168 +#: iast/views/vul_recheck_v2.py:44 iast/views/vul_recheck_v2.py:157 +#: iast/views/vul_request_replay.py:238 +msgid "Vulnerability replay error" +msgstr "漏洞重放出错" + +#: iast/views/vul_recheck.py:121 iast/views/vul_recheck.py:201 +#: iast/views/vul_recheck_v2.py:110 iast/views/vul_recheck_v2.py:190 +msgid "" +"available options are (\"all\",\"project\").\n" +" Corresponding to all or specific project respectively." +msgstr "可选项有('all','project')。对应全部漏洞和指定项目的漏洞。" + +#: iast/views/vul_recheck.py:129 iast/views/vul_recheck.py:209 +#: iast/views/vul_recheck_v2.py:118 iast/views/vul_recheck_v2.py:198 +msgid "" +"The corresponding id of the Project.\n" +" Only If the type is project, the projectId here will be used." +msgstr "项目的对应id.只有在type参数为project时,该参数才会被使用" + +#: iast/views/vul_recheck.py:133 iast/views/vul_recheck.py:213 +#: iast/views/vul_recheck_v2.py:122 iast/views/vul_recheck_v2.py:202 +msgid "Vulnerability verification" +msgstr "漏洞验证" + +#: iast/views/vul_recheck.py:134 iast/views/vul_recheck.py:214 +#: iast/views/vul_recheck_v2.py:123 iast/views/vul_recheck_v2.py:203 +msgid "" +"Verify the user's corresponding vulnerabilities.\n" +" Need to specify the type" +msgstr "验证用户对应的漏洞.需要指定验证行为的类型" + +#: iast/views/vul_recheck_v2.py:222 iast/views/vul_recheck_v2.py:231 +msgid "Verification in progress" +msgstr "验证中" + +#: iast/views/vul_request_replay.py:77 +msgid "HTTP request parsing error, error message: {}" +msgstr "HTTP请求解析出错,原因:{}" + +#: iast/views/vul_request_replay.py:200 +msgid "Stain pool data does not exist or no permission to access" +msgstr "污点池数据不存在或无权操作" + +#: iast/views/vul_request_replay.py:214 +msgid "" +"The probe has been destroyed or suspended, please check the probe status" +msgstr "探针已销毁或暂停运行,请选检查探针状态" + +#: iast/views/vul_request_replay.py:220 +msgid "Replay request is illegal" +msgstr "重放请求不合法" + +#: iast/views/vul_request_replay.py:233 +msgid "Relay request success" +msgstr "请求重返成功" + +#: iast/views/vul_request_replay.py:252 +msgid "Response header analysis error, error message: {}" +msgstr "Response Header解析出错,错误原因:{}" + +#: iast/views/vul_request_replay.py:283 iast/views/vul_request_replay.py:301 +msgid "Replay request does not exist or no permission to access" +msgstr "重放请求不存在或无操作权限" + +#: iast/views/vul_request_replay.py:304 +msgid "Replay request processing" +msgstr "重放请求处理中" + +#: iast/views/vul_request_replay.py:318 +msgid "Replay failed" +msgstr "重放失败" + +#: iast/views/vul_sidebar_index.py:48 iast/views/vul_summary.py:44 +#: iast/views/vul_summary_project.py:39 iast/views/vul_summary_type.py:38 +#: iast/views/vuls.py:77 +msgid "Type of vulnerability" +msgstr "漏洞类型" + +#: iast/views/vul_sidebar_index.py:52 +msgid "ID of the vulnerability type" +msgstr "漏洞类型的对应id" + +#: iast/views/vul_sidebar_index.py:73 +msgid "Vulnerability List" +msgstr "漏洞列表" + +#: iast/views/vul_sidebar_index.py:75 +msgid "Get the list of vulnerabilities corresponding to the user." +msgstr "获取用户对应的漏洞列表" + +#: iast/views/vul_status.py:19 iast/views/vul_status.py:91 +msgid "Vulnerability status is modified to {}" +msgstr "漏洞状态修改为{}" + +#: iast/views/vul_status.py:25 +msgid "Modify the vulnerability status" +msgstr "修改漏洞状态" + +#: iast/views/vul_status.py:31 +msgid "Update with status_id" +msgstr "通过状态id更新" + +#: iast/views/vul_status.py:33 +msgid "Update vulnerability status with status id." +msgstr "通过状态id修改漏洞状态" + +#: iast/views/vul_status.py:40 +msgid "Update with status name(Not recommended)" +msgstr "通过状态名更新(不推荐)" + +#: iast/views/vul_status.py:42 +msgid "Update vulnerability status with status name." +msgstr "通过漏洞状态名更新漏洞状态" + +#: iast/views/vul_status.py:61 +msgid "Vulnerability Status Modify" +msgstr "漏洞状态修改" + +#: iast/views/vul_status.py:62 +msgid "" +"Modify the vulnerability status of the specified id. \n" +" The status is specified by the following two parameters. \n" +" Status corresponds to the status noun and status_id corresponds to " +"the status id. \n" +" Both can be obtained from the vulnerability status list API, and " +"status_id is preferred." +msgstr "" +"修改指定id的漏洞状态,状态由以下两个参数指定,status对应状态名词,status_id对" +"应状态的id,均可由漏洞状态列表API获得,优先使用status_id" + +#: iast/views/vul_summary.py:31 iast/views/vul_summary_project.py:27 +#: iast/views/vul_summary_type.py:26 +msgid "Applied vulnerability overview" +msgstr "应用漏洞概览" + +#: iast/views/vul_summary.py:77 iast/views/vul_summary_project.py:72 +#: iast/views/vul_summary_type.py:71 iast/views/vuls.py:121 +msgid "Name of status" +msgstr "状态名" + +#: iast/views/vul_summary.py:82 iast/views/vul_summary_project.py:77 +#: iast/views/vul_summary_type.py:76 iast/views/vuls.py:126 +msgid "Id of status" +msgstr "状态ID" + +#: iast/views/vul_summary.py:152 iast/views/vul_summary_project.py:127 +#: iast/views/vul_summary_type.py:129 +msgid "Vulnerability Summary" +msgstr "漏洞概览" + +#: iast/views/vul_summary.py:154 iast/views/vul_summary_project.py:129 +#: iast/views/vul_summary_type.py:131 +msgid "" +"Use the following conditions to view the statistics of the number of " +"vulnerabilities in the project." +msgstr "使用下列条件来查看项目的漏洞数量统计" + +#: iast/views/vulnerability_status.py:79 +msgid "Vulnerability Status List" +msgstr "漏洞状态列表" + +#: iast/views/vulnerability_status.py:81 +msgid "" +"Vulnerability status list, which contains the optional status of " +"vulnerabilities. \n" +" When calling the vulnerability status modification API, please " +"obtain the vulnerability status data from this API first." +msgstr "" +"漏洞状态列表,里面包含了漏洞的可选状态,调用漏洞状态修改api时请先从此API获取" +"漏洞状态数据。" + +#: iast/views/vuls.py:30 iast/views/vuls.py:83 +msgid "name of project" +msgstr "项目名" + +#: iast/views/vuls.py:101 +msgid "The id Level of vulnerability" +msgstr "漏洞等级的id" + +#: iast/views/vuls.py:188 +msgid "Vulnerability List (with project)" +msgstr "漏洞列表(项目相关)" + +#: iast/views/vuls.py:191 +msgid "Get the list of vulnerabilities corresponding to the project" +msgstr "获取项目对应的漏洞列表" + +#: scaupload/views.py:73 scaupload/views.py:94 scaupload/views.py:130 +msgid "Get sca db bulk" +msgstr "" + +#: scaupload/views.py:74 scaupload/views.py:95 scaupload/views.py:131 +#: scaupload/views.py:156 scaupload/views.py:165 scaupload/views.py:181 +#: scaupload/views.py:190 scaupload/views.py:232 scaupload/views.py:238 +#: scaupload/views.py:249 +msgid "Get sca list" +msgstr "获取组件列表" + +#: scaupload/views.py:75 scaupload/views.py:96 scaupload/views.py:132 +#: scaupload/views.py:157 scaupload/views.py:166 scaupload/views.py:182 +#: scaupload/views.py:191 scaupload/views.py:233 scaupload/views.py:239 +#: scaupload/views.py:250 +msgid "SCA DB" +msgstr "组件库" + +#: scaupload/views.py:155 scaupload/views.py:164 scaupload/views.py:180 +#: scaupload/views.py:189 +msgid "Get sca db" +msgstr "获取组件库" + +#: scaupload/views.py:231 scaupload/views.py:237 +msgid "Get sca license list" +msgstr "获取组件license列表" + +#: scaupload/views.py:248 +msgid "Get sca stat " +msgstr "获取组件库数据统计" + +#: webapi/settings.py:383 +#, python-brace-format +msgid "" +"Here is the API documentation in webapi. The corresponding management part " +"API can be found through the relevant tag.\n" +"\n" +"There are two authentication methods. You can obtain csrf_token and " +"sessionid through the login process, or access the corresponding API through " +"the user's corresponding Token.\n" +"\n" +"The Token method is recommended here, and users can find it in the Agent " +"installation interface such as -H\n" +" 'Authorization: Token {token}', here is the token corresponding to the " +"user, the token method also requires a token like this on the request header." +msgstr "" +"这里是webapi中的API文档。可通过相关的tag来找到对应的管理部分API。\n" +"\n" +"这里有两个鉴权方式,通过登录流程获取csrf_token和sessionid,或者通过用户对应的" +"Token来访问对应的API。\n" +"\n" +"推荐使用Token方式,用户可在安装Agent界面找到如 -H 'Authorization: Token " +"{token}' ,此处为用户对应的token,token方式同样需要在请求头上带上一个这样的" +"token。 " + +#~ msgid "Default department" +#~ msgstr "默认部门" + +#~ msgid "version_name need" +#~ msgstr "需要版本名" + +#~ msgid "The id corresponding to the agent, use; for segmentation." +#~ msgstr "漏洞的对应url" + +#~ msgid "Vulnerability details query " +#~ msgstr "漏洞详情查询" + +#~ msgid "Confirmed" +#~ msgstr "已确认" + +#~ msgid "" +#~ "Deletion failed, please forcely delete all the users belong to the " +#~ "department{}" +#~ msgstr "删除失败,请先删除部门{}下所有的用户" + +#~ msgid "Running" +#~ msgstr "运行中" + +#~ msgid "Stopped" +#~ msgstr "未运行" diff --git a/static/reports/.gitignore b/static/reports/.gitignore new file mode 100644 index 000000000..c96a04f00 --- /dev/null +++ b/static/reports/.gitignore @@ -0,0 +1,2 @@ +* +!.gitignore \ No newline at end of file diff --git a/static/templates/footer.html b/static/templates/footer.html new file mode 100644 index 000000000..fdce3b32f --- /dev/null +++ b/static/templates/footer.html @@ -0,0 +1,7 @@ + + + +

iast.io

+
+ + \ No newline at end of file diff --git a/static/templates/header.html b/static/templates/header.html new file mode 100644 index 000000000..8a4805fee --- /dev/null +++ b/static/templates/header.html @@ -0,0 +1,6 @@ + + + +

 

+ + \ No newline at end of file diff --git a/static/templates/pdf.html b/static/templates/pdf.html new file mode 100644 index 000000000..d6f850191 --- /dev/null +++ b/static/templates/pdf.html @@ -0,0 +1,528 @@ + + + + + + + +
+ +
+

{{ project.name }}

+ +
+ +

{{ time_str }}

+

{{ i18n.security_testing_report }}

+
+

{{ i18n.first_project_information }}

+ + + + + + + + + + + + + + + + + + + +
{{ i18n.application_name }}{{ project.name }}
{{ i18n.author }}{{user.username}}
{{ i18n.number_of_vulnerability }}{{project.vul_count}}
{{ i18n.number_of_agent }}{{project.agent_count}}
+ +

{{ i18n.second_the_result_analysis }}

+

2.1 {{ i18n.vulnerability_severity_levels_distribution }}

+

+ {{ levelCountStr }} +

+

2.2 {{ i18n.distribution_of_vulnerability }}

+ + + + + + + + + {% for vulTypeTableBodyRow in vulTypeTableBodyRows %} + + + + + + {% endfor %} + +
{{ i18n.severity_levels }}{{ i18n.vulnerability_type_name }}{{ i18n.number }}
{{ vulTypeTableBodyRow.type_level }}{{ vulTypeTableBodyRow.type_name }}{{ vulTypeTableBodyRow.type_count }}
+ +

2.3 {{ i18n.vulnerability_details }}

+ + {% for vulTypeDetail in vulTypeDetailArray %} +

{{ vulTypeDetail.title }}

+ + {% for oneVul in vulTypeDetail.vuls %} +

{{ oneVul.title }}

+
+

{{ oneVul.summary }}

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + +
{{ oneVul.severity_level }}{{ oneVul.level_id }}
{{ oneVul.first_scan_time }}{{ oneVul.first_time }}
{{ oneVul.last_scan_time }}{{ oneVul.latest_time }}
{{ oneVul.development_language }}{{ oneVul.language }}
{{oneVul.vulnerability_url}}{{oneVul.url}}
+
+

{{oneVul.description}}

+
+

+ {{oneVul.detail_data1}} +

+

+ {{oneVul.detail_data2}} +

+

+ {{oneVul.detail_data3}} +

+
+ {% endfor %} + + {% endfor %} +
+
+ + + + \ No newline at end of file diff --git a/test/__init__.py b/test/__init__.py new file mode 100644 index 000000000..5c5964117 --- /dev/null +++ b/test/__init__.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/7/13 下午10:21 +# project: dongtai-engine +import os +import unittest + +import django + + +class DongTaiTestCase(unittest.TestCase): + def __init__(self, methodName='runTest'): + super().__init__(methodName) + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dongtai_conf.settings") + os.environ.setdefault("debug", "true") + django.setup() + + +from django.test.runner import DiscoverRunner + +class NoDbTestRunner(DiscoverRunner): + + def setup_databases(self, **kwargs): + pass + + def teardown_databases(self, old_config, **kwargs): + pass diff --git a/test/apiserver/__init__.py b/test/apiserver/__init__.py new file mode 100644 index 000000000..98f457e1e --- /dev/null +++ b/test/apiserver/__init__.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/7/28 下午4:09 +# project: dongtai-openapi diff --git a/test/apiserver/test_agent_apiroute.py b/test/apiserver/test_agent_apiroute.py new file mode 100644 index 000000000..0a36e2daa --- /dev/null +++ b/test/apiserver/test_agent_apiroute.py @@ -0,0 +1,1610 @@ +from test.apiserver.test_agent_base import AgentTestCase + + + +class ApiRouteTestCase(AgentTestCase): + + def test_agent_api_upload(self): + data = { + "detail": { + "agentId": + 57, + "apiData": [{ + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/tagorder", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/queryvirtualnumber", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/confirmextrafeebydriver", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/queryorderforrecent24hoursbypassenger", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/queryorderbydemandno", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/pickup", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/getorderfeedetail", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/matchsucceed", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/arrive", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/startservice", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/recordsharechannel", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/arriveviapoint", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/drivercompleteorder", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/driverremindpayment", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/passengeruseinvitecode", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/updatedestinfo", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/updatepoibyorderno", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/queryreserveinfobydriver", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/queryeventchangebydriver", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/queryreservedispatchagaintask", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/queryorderdetail", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/querylastevent", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/redispatchorder", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/redispatchfailed", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/cancelredispatch", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/querylastredispatchstatus", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/queryredispatchhistory", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/arriveandsettlement", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/setcancelreasonbypassenger", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/unlockandcancelorder", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/ordereventnotifybymerchant", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/queryredispatchorders", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/applyredispatchorders", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/queryhistorycancelrule", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/queryflightchangehis", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/share", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/changedestination", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/modifyorderprice", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/changeflight", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/paybyplatformnotify", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/querycancelfeebymis", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/querycanceldisplay", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/arriveboardingpoint", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/cancelorder", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/queryorders", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/queryredispatchingorderbydriver", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/querydriverredispatchcount", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/deleteorderbydriver", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/deleteorderbypassenger", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/cancelorderbymis", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/queryorderchainlist", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/queryvehicleinfobypassenger", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/querylatestorder", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/confirmordersettlement", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/premodifyorderprice", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/queryreservestartordersbydriver", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/paynotify", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/createpayorder", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/checkchangedestinationpayadvance", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/cancelpayorder", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/checkchangedestination", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/canceldriverservice", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/prerefund", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/refund", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/unifiedrefundnotify", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/prededuct", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/deduct", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/queryconflictorderlist", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/querycacheorderinfo", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/querylastreservebydriver", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/refuseorderbydriver", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/queryuserguidecancelrule", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/querycancelledorder", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/queryunstartreservelistbydriver", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/refunddetail", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/autopaybyplatformnotify", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/confirmextrafeebypassenger", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/enterviapoint", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/exitviapoint", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/arriveddestpartition", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/refunddetaillist", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/deductdetaillist", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/savecachedata", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/getcachedata", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/getviapointsbyordernos", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/getpaymentsbyordernos", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/querypaymentinfobyorderno", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/querymodifypriceinfobyorderno", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/getpartitionsbyorderno", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/receivealarm", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/querypayadvanceinfo", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/queryorderbypartner", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/cancelorderbytest", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/querysettlementdetailbycharter", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/querymodifypriceandrefundamount", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/querysettlementdetailbycharterv2", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/confirmordersettlementbycharter", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/cancelorderbydriver", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/queryorderfeedetail", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/uploadnavigationchange", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/queryexceptionorder", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/querynavigationchangelog", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/addequity", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/preredispatchfailed", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/statordersdailydriver", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/stopwork", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/deleteorder", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/querytaskinfo", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/queryselfunfinishorderlist", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/pushmerchantorderstatus", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/resetorder", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/updatestatus", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/matchsucceedv2", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/getdemandorsupplyorder", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/cancelorderforextsupply", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/operateordertag", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/querycancelordertag", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/querypassengerstat", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/freeorderbyplatform", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/queryorderbymisback", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/receivealarmforzebra", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/inserttaskinfo", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/queryunfinishedordersbydriver", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/cancelreleasehandle", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/querycancelrelease", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/redispatchorderbymis", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/createpayorderandpushpassenger", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/sendorderurl", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/unifiedtaskexecution", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/commitrefund", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/checkfirstsettlement", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/queryrelationorders", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/urgentsettlement", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/savefeerelease", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/getfeerelease", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/closeparentorder", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/refundrollback", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/queryorderlistbydevice", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/countfeereleasebyuid", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/querydriverappealunreadcount", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/editdriverappealunread", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/querydriverappeallist", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/querydriverappealdetail", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/driverappealcheck", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/driverappealsave", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/querydrivercashreward", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/senddrivercashreward", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/paymentconfirm", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/updatepassengersurveyflag", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/synclocaldata", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/changedestinationconfirm", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/saveordertransinner", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/changeorigin", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/changeroute", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/prepayrefundnotify", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/paychannelchangenotify", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/querynextorderlist", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/uploaddrivingsituation", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/updateordertag", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/changecarusetime", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/changecarusetimeconfirm", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/markexception", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/changeroutecheck", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/updateaftersalesmodel", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/insuranceresultnotify", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/removelocalcachedata", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/querycanceltask", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/getordernobymockcode", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/getuserreport", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/updatedrivertag", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/gettripremarkform", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/gettripremarkresult", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/updatetripremarkresult", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/gettripremarklist", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/getcontactchangelist", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/changecontact", + "class": "", + "parameters": "", + "returnType": "" + }, { + "controller": "", + "file": "", + "method": ["GET", "POST"], + "description": "", + "uri": "/orderbase/queryorderstatus4price", + "class": "", + "parameters": "", + "returnType": "" + }] + }, + "type": 97 + } + data['detail']['agentId'] = self.agent_id + res = self.agent_report(data, agentId=self.agent_id) diff --git a/test/apiserver/test_agent_base.py b/test/apiserver/test_agent_base.py new file mode 100644 index 000000000..9c6e218c8 --- /dev/null +++ b/test/apiserver/test_agent_base.py @@ -0,0 +1,459 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : test_agent_register +# @created : 星期四 12月 09, 2021 19:25:14 CST +# +# @description : +###################################################################### + + +from rest_framework.test import APITestCase +from dongtai_common.models.server import IastServer +from dongtai_common.models.user import User +import gzip +import json +from rest_framework.test import RequestsClient +from rest_framework.authtoken.models import Token +from dongtai_common.models.agent import IastAgent +from dongtai_protocol.decrypter import parse_data + +REGISTER_JSON = { + "serverPath": "/Users/erzhuangniu/workspace/vul/SecExample", + "containerVersion": "9.0.46.0", + "pid": "1416", + "language": "JAVA", + "serverPort": "0", + "version": "v1.1.3", + "network": "{\"name\":\"en0\",\"ip\":\"192.168.2.143\"}", + "serverEnv": + "{java.runtime.name=OpenJDK Runtime Environment, spring.output.ansi.enabled=always, project.name=SpringSec, sun.boot.library.path=/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/lib, java.vm.version=25.312-b07, gopherProxySet=false, java.vm.vendor=Amazon.com Inc., java.vendor.url=https://aws.amazon.com/corretto/, path.separator=:, project.version=120901, java.vm.name=OpenJDK 64-Bit Server VM, file.encoding.pkg=sun.io, user.country=CN, sun.java.launcher=SUN_STANDARD, sun.os.patch.level=unknown, java.vm.specification.name=Java Virtual Machine Specification, user.dir=/Users/erzhuangniu/workspace/vul/SecExample, java.runtime.version=1.8.0_312-b07, java.awt.graphicsenv=sun.awt.CGraphicsEnvironment, java.endorsed.dirs=/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/lib/endorsed, os.arch=x86_64, java.io.tmpdir=/var/folders/xy/xyx56h3s29z6376gvk32621h0000gn/T/, line.separator=
, java.vm.specification.vendor=Oracle Corporation, os.name=Mac OS X, sun.jnu.encoding=UTF-8, java.library.path=/Users/erzhuangniu/Library/Java/Extensions:/Library/Java/Extensions:/Network/Library/Java/Extensions:/System/Library/Java/Extensions:/usr/lib/java:., spring.jmx.enabled=true, java.specification.name=Java Platform API Specification, java.class.version=52.0, sun.management.compiler=HotSpot 64-Bit Tiered Compilers, spring.liveBeansView.mbeanDomain=, os.version=11.4, user.home=/Users/erzhuangniu, sun.net.http.allowRestrictedHeaders=true, user.timezone=, java.awt.printerjob=sun.lwawt.macosx.CPrinterJob, file.encoding=UTF-8, java.specification.version=1.8, java.class.path=/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/lib/charsets.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/lib/ext/cldrdata.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/lib/ext/dnsns.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/lib/ext/jaccess.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/lib/ext/jfxrt.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/lib/ext/localedata.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/lib/ext/nashorn.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/lib/ext/sunec.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/lib/ext/sunjce_provider.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/lib/ext/sunpkcs11.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/lib/ext/zipfs.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/lib/jce.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/lib/jfr.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/lib/jfxswt.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/lib/jsse.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/lib/management-agent.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/lib/resources.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/lib/rt.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/lib/ant-javafx.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/lib/dt.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/lib/javafx-mx.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/lib/jconsole.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/lib/packager.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/lib/sa-jdi.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/lib/tools.jar:/Users/erzhuangniu/workspace/vul/SecExample/target/classes:/Users/erzhuangniu/.m2/repository/org/springframework/boot/spring-boot-starter-thymeleaf/2.5.0/spring-boot-starter-thymeleaf-2.5.0.jar:/Users/erzhuangniu/.m2/repository/org/springframework/boot/spring-boot-starter/2.5.0/spring-boot-starter-2.5.0.jar:/Users/erzhuangniu/.m2/repository/org/springframework/boot/spring-boot-starter-logging/2.5.0/spring-boot-starter-logging-2.5.0.jar:/Users/erzhuangniu/.m2/repository/ch/qos/logback/logback-classic/1.2.3/logback-classic-1.2.3.jar:/Users/erzhuangniu/.m2/repository/ch/qos/logback/logback-core/1.2.3/logback-core-1.2.3.jar:/Users/erzhuangniu/.m2/repository/org/apache/logging/log4j/log4j-to-slf4j/2.14.1/log4j-to-slf4j-2.14.1.jar:/Users/erzhuangniu/.m2/repository/org/apache/logging/log4j/log4j-api/2.14.1/log4j-api-2.14.1.jar:/Users/erzhuangniu/.m2/repository/org/slf4j/jul-to-slf4j/1.7.30/jul-to-slf4j-1.7.30.jar:/Users/erzhuangniu/.m2/repository/jakarta/annotation/jakarta.annotation-api/1.3.5/jakarta.annotation-api-1.3.5.jar:/Users/erzhuangniu/.m2/repository/org/yaml/snakeyaml/1.28/snakeyaml-1.28.jar:/Users/erzhuangniu/.m2/repository/org/thymeleaf/thymeleaf-spring5/3.0.12.RELEASE/thymeleaf-spring5-3.0.12.RELEASE.jar:/Users/erzhuangniu/.m2/repository/org/thymeleaf/thymeleaf/3.0.12.RELEASE/thymeleaf-3.0.12.RELEASE.jar:/Users/erzhuangniu/.m2/repository/org/attoparser/attoparser/2.0.5.RELEASE/attoparser-2.0.5.RELEASE.jar:/Users/erzhuangniu/.m2/repository/org/unbescape/unbescape/1.1.6.RELEASE/unbescape-1.1.6.RELEASE.jar:/Users/erzhuangniu/.m2/repository/org/slf4j/slf4j-api/1.7.30/slf4j-api-1.7.30.jar:/Users/erzhuangniu/.m2/repository/org/thymeleaf/extras/thymeleaf-extras-java8time/3.0.4.RELEASE/thymeleaf-extras-java8time-3.0.4.RELEASE.jar:/Users/erzhuangniu/.m2/repository/org/springframework/boot/spring-boot-starter-web/2.5.0/spring-boot-starter-web-2.5.0.jar:/Users/erzhuangniu/.m2/repository/org/springframework/boot/spring-boot-starter-json/2.5.0/spring-boot-starter-json-2.5.0.jar:/Users/erzhuangniu/.m2/repository/com/fasterxml/jackson/core/jackson-databind/2.12.3/jackson-databind-2.12.3.jar:/Users/erzhuangniu/.m2/repository/com/fasterxml/jackson/core/jackson-annotations/2.12.3/jackson-annotations-2.12.3.jar:/Users/erzhuangniu/.m2/repository/com/fasterxml/jackson/core/jackson-core/2.12.3/jackson-core-2.12.3.jar:/Users/erzhuangniu/.m2/repository/com/fasterxml/jackson/datatype/jackson-datatype-jdk8/2.12.3/jackson-datatype-jdk8-2.12.3.jar:/Users/erzhuangniu/.m2/repository/com/fasterxml/jackson/datatype/jackson-datatype-jsr310/2.12.3/jackson-datatype-jsr310-2.12.3.jar:/Users/erzhuangniu/.m2/repository/com/fasterxml/jackson/module/jackson-module-parameter-names/2.12.3/jackson-module-parameter-names-2.12.3.jar:/Users/erzhuangniu/.m2/repository/org/springframework/boot/spring-boot-starter-tomcat/2.5.0/spring-boot-starter-tomcat-2.5.0.jar:/Users/erzhuangniu/.m2/repository/org/apache/tomcat/embed/tomcat-embed-core/9.0.46/tomcat-embed-core-9.0.46.jar:/Users/erzhuangniu/.m2/repository/org/apache/tomcat/embed/tomcat-embed-el/9.0.46/tomcat-embed-el-9.0.46.jar:/Users/erzhuangniu/.m2/repository/org/apache/tomcat/embed/tomcat-embed-websocket/9.0.46/tomcat-embed-websocket-9.0.46.jar:/Users/erzhuangniu/.m2/repository/org/springframework/spring-web/5.3.7/spring-web-5.3.7.jar:/Users/erzhuangniu/.m2/repository/org/springframework/spring-beans/5.3.7/spring-beans-5.3.7.jar:/Users/erzhuangniu/.m2/repository/org/springframework/spring-webmvc/5.3.7/spring-webmvc-5.3.7.jar:/Users/erzhuangniu/.m2/repository/org/springframework/spring-aop/5.3.7/spring-aop-5.3.7.jar:/Users/erzhuangniu/.m2/repository/org/springframework/spring-context/5.3.7/spring-context-5.3.7.jar:/Users/erzhuangniu/.m2/repository/org/springframework/spring-expression/5.3.7/spring-expression-5.3.7.jar:/Users/erzhuangniu/.m2/repository/org/springframework/boot/spring-boot-devtools/2.5.0/spring-boot-devtools-2.5.0.jar:/Users/erzhuangniu/.m2/repository/org/springframework/boot/spring-boot/2.5.0/spring-boot-2.5.0.jar:/Users/erzhuangniu/.m2/repository/org/springframework/boot/spring-boot-autoconfigure/2.5.0/spring-boot-autoconfigure-2.5.0.jar:/Users/erzhuangniu/.m2/repository/org/mybatis/spring/boot/mybatis-spring-boot-starter/1.3.0/mybatis-spring-boot-starter-1.3.0.jar:/Users/erzhuangniu/.m2/repository/org/springframework/boot/spring-boot-starter-jdbc/2.5.0/spring-boot-starter-jdbc-2.5.0.jar:/Users/erzhuangniu/.m2/repository/com/zaxxer/HikariCP/4.0.3/HikariCP-4.0.3.jar:/Users/erzhuangniu/.m2/repository/org/springframework/spring-jdbc/5.3.7/spring-jdbc-5.3.7.jar:/Users/erzhuangniu/.m2/repository/org/springframework/spring-tx/5.3.7/spring-tx-5.3.7.jar:/Users/erzhuangniu/.m2/repository/org/mybatis/spring/boot/mybatis-spring-boot-autoconfigure/1.3.0/mybatis-spring-boot-autoconfigure-1.3.0.jar:/Users/erzhuangniu/.m2/repository/org/mybatis/mybatis/3.4.4/mybatis-3.4.4.jar:/Users/erzhuangniu/.m2/repository/org/mybatis/mybatis-spring/1.3.1/mybatis-spring-1.3.1.jar:/Users/erzhuangniu/.m2/repository/mysql/mysql-connector-java/5.1.31/mysql-connector-java-5.1.31.jar:/Users/erzhuangniu/.m2/repository/org/projectlombok/lombok/1.18.20/lombok-1.18.20.jar:/Users/erzhuangniu/.m2/repository/com/github/pagehelper/pagehelper-spring-boot-starter/1.3.0/pagehelper-spring-boot-starter-1.3.0.jar:/Users/erzhuangniu/.m2/repository/com/github/pagehelper/pagehelper-spring-boot-autoconfigure/1.3.0/pagehelper-spring-boot-autoconfigure-1.3.0.jar:/Users/erzhuangniu/.m2/repository/com/github/pagehelper/pagehelper/5.2.0/pagehelper-5.2.0.jar:/Users/erzhuangniu/.m2/repository/com/github/jsqlparser/jsqlparser/3.2/jsqlparser-3.2.jar:/Users/erzhuangniu/.m2/repository/com/alibaba/fastjson/1.2.24/fastjson-1.2.24.jar:/Users/erzhuangniu/.m2/repository/org/springframework/spring-core/5.3.7/spring-core-5.3.7.jar:/Users/erzhuangniu/.m2/repository/org/springframework/spring-jcl/5.3.7/spring-jcl-5.3.7.jar:/Users/erzhuangniu/workspace/nagent/DongTai-agent-java/release/iast-agent.jar:/Applications/IntelliJ IDEA.app/Contents/lib/idea_rt.jar, user.name=erzhuangniu, com.sun.management.jmxremote=, java.vm.specification.version=1.8, sun.java.command=com.suyu.secexample.SecexampleApplication, java.home=/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre, sun.arch.data.model=64, user.language=zh, java.specification.vendor=Oracle Corporation, awt.toolkit=sun.lwawt.macosx.LWCToolkit, java.vm.info=mixed mode, java.version=1.8.0_312, java.ext.dirs=/Users/erzhuangniu/Library/Java/Extensions:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/lib/ext:/Library/Java/Extensions:/Network/Library/Java/Extensions:/System/Library/Java/Extensions:/usr/lib/java, sun.boot.class.path=/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/lib/resources.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/lib/rt.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/lib/sunrsasign.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/lib/jsse.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/lib/jce.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/lib/charsets.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/lib/jfr.jar:/Users/erzhuangniu/Library/Java/JavaVirtualMachines/corretto-1.8.0_312/Contents/Home/jre/classes, java.vendor=Amazon.com Inc., spring.application.admin.enabled=true, file.separator=/, java.vendor.url.bug=https://github.com/corretto/corretto-8/issues/, iast.server.mode=local, sun.io.unicode.encoding=UnicodeBig, sun.cpu.endian=little, sun.cpu.isalist=}", + "hostname": "localhost", + "serverAddr": "", + "containerName": "Apache Tomcat/9.0.46", + "name": "Mac OS X-localhost-v1.1.3-java.action.github.com12313", + "projectName": "SpringSec", + "projectVersion": "120901", + "autoCreateProject": 0, +} + +METHODPOOL_JSON = { + "detail": { + "reqHeader": + "c2VjLWZldGNoLW1vZGU6bmF2aWdhdGUKcmVmZXJlcjpodHRwOi8vbG9jYWxob3N0OjgwODAvCnNlYy1mZXRjaC1zaXRlOnNhbWUtb3JpZ2luCmFjY2VwdC1sYW5ndWFnZTp6aC1DTix6aDtxPTAuOQpjb29raWU6SWRlYS1mMmYwZDM1Nj1iN2I3MWM3Yi1kNWIzLTQzZWEtYWRiZC00OTJhMWM0ODE5ODc7IElkZWEtZjJmMGQzNTc9ZjY3NzQyYzEtYzZhNC00YTY2LTkwYmYtY2E3NGU5YzY4OThiCmR0LXRyYWNlaWQ6NzFhODRlYzlmNDhkNDJjYjk3NDkwMWVjOWNhNGI4MDAuMzQyMy4zNjg2LjAKc2VjLWZldGNoLXVzZXI6PzEKYWNjZXB0OnRleHQvaHRtbCxhcHBsaWNhdGlvbi94aHRtbCt4bWwsYXBwbGljYXRpb24veG1sO3E9MC45LGltYWdlL2F2aWYsaW1hZ2Uvd2VicCxpbWFnZS9hcG5nLCovKjtxPTAuOCxhcHBsaWNhdGlvbi9zaWduZWQtZXhjaGFuZ2U7dj1iMztxPTAuOQpzZWMtY2gtdWE6IiBOb3QgQTtCcmFuZCI7dj0iOTkiLCAiQ2hyb21pdW0iO3Y9Ijk2IiwgIkdvb2dsZSBDaHJvbWUiO3Y9Ijk2IgpzZWMtY2gtdWEtbW9iaWxlOj8wCnNlYy1jaC11YS1wbGF0Zm9ybToibWFjT1MiCmhvc3Q6bG9jYWxob3N0OjgwODAKdXBncmFkZS1pbnNlY3VyZS1yZXF1ZXN0czoxCmNvbm5lY3Rpb246a2VlcC1hbGl2ZQphY2NlcHQtZW5jb2Rpbmc6Z3ppcCwgZGVmbGF0ZSwgYnIKdXNlci1hZ2VudDpNb3ppbGxhLzUuMCAoTWFjaW50b3NoOyBJbnRlbCBNYWMgT1MgWCAxMF8xNV83KSBBcHBsZVdlYktpdC81MzcuMzYgKEtIVE1MLCBsaWtlIEdlY2tvKSBDaHJvbWUvOTYuMC40NjY0LjkzIFNhZmFyaS81MzcuMzYKc2VjLWZldGNoLWRlc3Q6ZG9jdW1lbnQK", + "agentId": + 3423, + "scheme": + "http", + "method": + "GET", + "contextPath": + "", + "pool": [{ + "invokeId": 1174, + "interfaces": [], + "targetHash": [490291580], + "targetValues": + "org.apache.tomcat.util.http.ValuesEnumerator@1d39417c", + "signature": "org.apache.catalina.connector.Request.getHeaders", + "originClassName": "org.apache.catalina.connector.Request", + "sourceValues": "accept-language", + "methodName": "getHeaders", + "className": "javax.servlet.http.HttpServletRequest", + "source": True, + "callerLineNumber": 3424, + "callerClass": "org.apache.catalina.connector.Request", + "args": "", + "callerMethod": "parseLocales", + "sourceHash": [], + "retClassName": "" + }, { + "invokeId": 1175, + "interfaces": [], + "targetHash": [419874307], + "targetValues": "zh-CN,zh;q=0.9", + "signature": + "org.apache.tomcat.util.http.ValuesEnumerator.nextElement", + "originClassName": "org.apache.tomcat.util.http.ValuesEnumerator", + "sourceValues": + "org.apache.tomcat.util.http.ValuesEnumerator@1d39417c", + "methodName": "nextElement", + "className": "java.util.Enumeration", + "source": False, + "callerLineNumber": 3427, + "callerClass": "org.apache.catalina.connector.Request", + "args": "", + "callerMethod": "parseLocales", + "sourceHash": [490291580], + "retClassName": "" + }, { + "invokeId": 1176, + "interfaces": [], + "targetHash": [1543785287], + "targetValues": "java.io.StringReader@5c044b47", + "signature": "java.io.StringReader.", + "originClassName": "java.io.StringReader", + "sourceValues": "zh-CN,zh;q=0.9", + "methodName": "", + "className": "java.io.StringReader", + "source": False, + "callerLineNumber": 3451, + "callerClass": "org.apache.catalina.connector.Request", + "args": "", + "callerMethod": "parseLocalesHeader", + "sourceHash": [419874307], + "retClassName": "" + }, { + "invokeId": 1177, + "interfaces": [], + "targetHash": [1628901220], + "targetValues": "cn.huoxian.iast.api.RequestWrapper@61170f64", + "signature": + "org.springframework.web.method.support.HandlerMethodArgumentResolverComposite.resolveArgument", + "originClassName": + "org.springframework.web.method.support.HandlerMethodArgumentResolverComposite", + "sourceValues": + "method 'vuln1' parameter 0 ModelAndViewContainer: reference to view with name 'cors/cors'; default model {name={\"敏感信息账号\": \"suyu\", \"敏感信息手机\": \"13888888888\",\"敏感信息qq\": \"10010\", \"敏感信息身份证\": \"321222222222222222\", \"敏感信息地址\": \"网商路699号阿里巴巴园区\"}} ServletWebRequest: uri=/cors1;client=0:0:0:0:0:0:0:1 org.springframework.web.servlet.mvc.method.annotation.ServletRequestDataBinderFactory@582953e7", + "methodName": "resolveArgument", + "className": + "org.springframework.web.method.support.HandlerMethodArgumentResolver", + "source": True, + "callerLineNumber": 170, + "callerClass": + "org.springframework.web.method.support.InvocableHandlerMethod", + "args": "", + "callerMethod": "getMethodArgumentValues", + "sourceHash": [], + "retClassName": "" + }, { + "invokeId": 1178, + "interfaces": [], + "targetHash": [438096027], + "targetValues": "cn.huoxian.iast.api.ResponseWrapper@1a1cd09b", + "signature": + "org.springframework.web.method.support.HandlerMethodArgumentResolverComposite.resolveArgument", + "originClassName": + "org.springframework.web.method.support.HandlerMethodArgumentResolverComposite", + "sourceValues": + "method 'vuln1' parameter 1 ModelAndViewContainer: reference to view with name 'cors/cors'; default model {name={\"敏感信息账号\": \"suyu\", \"敏感信息手机\": \"13888888888\",\"敏感信息qq\": \"10010\", \"敏感信息身份证\": \"321222222222222222\", \"敏感信息地址\": \"网商路699号阿里巴巴园区\"}} ServletWebRequest: uri=/cors1;client=0:0:0:0:0:0:0:1 org.springframework.web.servlet.mvc.method.annotation.ServletRequestDataBinderFactory@582953e7", + "methodName": "resolveArgument", + "className": + "org.springframework.web.method.support.HandlerMethodArgumentResolver", + "source": True, + "callerLineNumber": 170, + "callerClass": + "org.springframework.web.method.support.InvocableHandlerMethod", + "args": "", + "callerMethod": "getMethodArgumentValues", + "sourceHash": [], + "retClassName": "" + }, { + "invokeId": 1179, + "interfaces": [], + "targetHash": [1192543238], + "targetValues": + "org.apache.tomcat.util.http.ValuesEnumerator@4714c406", + "signature": + "javax.servlet.http.HttpServletRequestWrapper.getHeaders", + "originClassName": "javax.servlet.http.HttpServletRequestWrapper", + "sourceValues": "Accept", + "methodName": "getHeaders", + "className": "javax.servlet.http.HttpServletRequest", + "source": True, + "callerLineNumber": 135, + "callerClass": + "org.springframework.web.context.request.ServletWebRequest", + "args": "", + "callerMethod": "getHeaderValues", + "sourceHash": [], + "retClassName": "" + }, { + "invokeId": 1180, + "interfaces": [], + "targetHash": [929690313], + "targetValues": + "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9", + "signature": + "org.apache.tomcat.util.http.ValuesEnumerator.nextElement", + "originClassName": "org.apache.tomcat.util.http.ValuesEnumerator", + "sourceValues": + "org.apache.tomcat.util.http.ValuesEnumerator@4714c406", + "methodName": "nextElement", + "className": "java.util.Enumeration", + "source": False, + "callerLineNumber": 5294, + "callerClass": "java.util.Collections", + "args": "", + "callerMethod": "list", + "sourceHash": [1192543238], + "retClassName": "" + }, { + "invokeId": 1181, + "interfaces": [], + "targetHash": [1207614116], + "targetValues": + "[text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9]", + "signature": "java.util.Arrays.asList", + "originClassName": "java.util.Arrays", + "sourceValues": + "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9", + "methodName": "asList", + "className": "java.util.Arrays", + "source": False, + "callerLineNumber": 51, + "callerClass": + "org.springframework.web.accept.HeaderContentNegotiationStrategy", + "args": "", + "callerMethod": "resolveMediaTypes", + "sourceHash": [929690313, 2144890988, 929690313], + "retClassName": "" + }, { + "invokeId": 1182, + "interfaces": [], + "targetHash": [2065964968], + "targetValues": "text/html", + "signature": "java.lang.String.substring", + "originClassName": "java.lang.String", + "sourceValues": + "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9", + "methodName": "substring", + "className": "java.lang.String", + "source": False, + "callerLineNumber": 304, + "callerClass": "org.springframework.util.MimeTypeUtils", + "args": "", + "callerMethod": "tokenize", + "sourceHash": [929690313], + "retClassName": "" + }, { + "invokeId": 1183, + "interfaces": [], + "targetHash": [61035967], + "targetValues": "application/xhtml+xml", + "signature": "java.lang.String.substring", + "originClassName": "java.lang.String", + "sourceValues": + "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9", + "methodName": "substring", + "className": "java.lang.String", + "source": False, + "callerLineNumber": 304, + "callerClass": "org.springframework.util.MimeTypeUtils", + "args": "", + "callerMethod": "tokenize", + "sourceHash": [929690313], + "retClassName": "" + }, { + "invokeId": 1184, + "interfaces": [], + "targetHash": [1069817527], + "targetValues": "application/xml;q=0.9", + "signature": "java.lang.String.substring", + "originClassName": "java.lang.String", + "sourceValues": + "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9", + "methodName": "substring", + "className": "java.lang.String", + "source": False, + "callerLineNumber": 304, + "callerClass": "org.springframework.util.MimeTypeUtils", + "args": "", + "callerMethod": "tokenize", + "sourceHash": [929690313], + "retClassName": "" + }, { + "invokeId": 1185, + "interfaces": [], + "targetHash": [734131239], + "targetValues": "image/avif", + "signature": "java.lang.String.substring", + "originClassName": "java.lang.String", + "sourceValues": + "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9", + "methodName": "substring", + "className": "java.lang.String", + "source": False, + "callerLineNumber": 304, + "callerClass": "org.springframework.util.MimeTypeUtils", + "args": "", + "callerMethod": "tokenize", + "sourceHash": [929690313], + "retClassName": "" + }, { + "invokeId": 1186, + "interfaces": [], + "targetHash": [1883465640], + "targetValues": "image/webp", + "signature": "java.lang.String.substring", + "originClassName": "java.lang.String", + "sourceValues": + "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9", + "methodName": "substring", + "className": "java.lang.String", + "source": False, + "callerLineNumber": 304, + "callerClass": "org.springframework.util.MimeTypeUtils", + "args": "", + "callerMethod": "tokenize", + "sourceHash": [929690313], + "retClassName": "" + }, { + "invokeId": 1187, + "interfaces": [], + "targetHash": [1767168690], + "targetValues": "image/apng", + "signature": "java.lang.String.substring", + "originClassName": "java.lang.String", + "sourceValues": + "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9", + "methodName": "substring", + "className": "java.lang.String", + "source": False, + "callerLineNumber": 304, + "callerClass": "org.springframework.util.MimeTypeUtils", + "args": "", + "callerMethod": "tokenize", + "sourceHash": [929690313], + "retClassName": "" + }, { + "invokeId": 1188, + "interfaces": [], + "targetHash": [1987727497], + "targetValues": "*/*;q=0.8", + "signature": "java.lang.String.substring", + "originClassName": "java.lang.String", + "sourceValues": + "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9", + "methodName": "substring", + "className": "java.lang.String", + "source": False, + "callerLineNumber": 304, + "callerClass": "org.springframework.util.MimeTypeUtils", + "args": "", + "callerMethod": "tokenize", + "sourceHash": [929690313], + "retClassName": "" + }, { + "invokeId": 1189, + "interfaces": [], + "targetHash": [1388241581], + "targetValues": "application/signed-exchange;v=b3;q=0.9", + "signature": "java.lang.String.substring", + "originClassName": "java.lang.String", + "sourceValues": + "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9", + "methodName": "substring", + "className": "java.lang.String", + "source": False, + "callerLineNumber": 314, + "callerClass": "org.springframework.util.MimeTypeUtils", + "args": "", + "callerMethod": "tokenize", + "sourceHash": [929690313], + "retClassName": "" + }], + "secure": + False, + "uri": + "/cors1", + "url": + "http://localhost:8080/cors1", + "protocol": + "HTTP/1.1", + "replayRequest": + False, + "resBody": + "\n\n\n \n Java漏洞靶场<\\/title>\n <link rel=\"stylesheet\" href=\"https://cdn.jsdelivr.net/npm/semantic-ui@2.4.2/dist/semantic.min.css\">\n<\\/head>\n<body>\n\n<div style=\"padding: 40px;\n text-align: center;\n background: #1abc9c;\n color: white;\">\n <h1>Java漏洞演示平台<\\/h1>\n <button class=\"ui inverted secondary basic button\"><a style=\"color: white\" href=\"home\">回到首页<\\/a><\\/button>\n<\\/div>\n\n<div style=\"text-align: center;margin: 0px auto;\n margin-top: 50px;\">\n 您获取的结果为:<p>{"敏感信息账号": "suyu", "敏感信息手机": "13888888888","敏感信息qq": "10010", "敏感信息身份证": "321222222222222222", "敏感信息地址": "网商路699号阿里巴巴园区"}<\\/p>\n<\\/div>\n\n<div>\n <form action=\"/core3\" method=\"post\">\n <input type=\"text\" name=\"message\" placeholder=\"请输入敏感信息\">\n <input type=\"submit\" value=\"提交\">\n <\\/form>\n <p><\\/p>\n <!-- <p>提示<\\/p>-->\n <!-- <p>\"txf\" and \"1\"=\"1\"<\\/p>-->\n <!-- <p>\"txf\" and \"1\"=\"2\"<\\/p>-->\n<\\/div>\n\n\n\n<\\/body>\n<\\/html>\n", + "clientIp": + "127.0.0.1", + "reqBody": + "", + "resHeader": + "SFRUUC8xLjEgMjAwCkRvbmdUYWk6djEuMS4zClZhcnk6T3JpZ2luClZhcnk6T3JpZ2luClZhcnk6T3JpZ2luCg==" + }, + "type": 36 +} + + +class AgentTestCase(APITestCase): + def setUp(self): + self.user = User.objects.filter(pk=1).first() + self.client.force_authenticate(user=self.user) + data =self.register_agent(name='test') + self.agent_id = data['id'] + + def raw_register(self, **kwargs): + data = gzipdata(REGISTER_JSON) + token, success = Token.objects.get_or_create(user=self.user) + response = self.client.post( + 'http://testserver/api/v1/agent/register', + data=data, + HTTP_CONTENT_ENCODING='gzip', + content_type='application/json', + ) + return response + + def agent_heartbeat(self, **kwargs): + heartbeatdata = { + "detail": { + "agentId": self.agent_id, + "disk": "{}", + "memory": + "{\"total\":\"2GB\",\"rate\":0,\"use\":\"12421312312.115MB\"}", + "returnQueue": 0, + "cpu": "{\"rate\":5323123}" + }, + "type": 1 + } + heartbeatdata.update(kwargs) + data = gzipdata(heartbeatdata) + response = self.client.post( + 'http://testserver/api/v1/report/upload', + data=heartbeatdata, + HTTP_CONTENT_ENCODING='gzip', + content_type='application/json', + ) + return response + def agent_method_pool(self, **kwargs): + method_pool_data = METHODPOOL_JSON + method_pool_data['detail']['agentId'] = self.agent_id + method_pool_data['detail'].update(kwargs) + data = gzipdata(method_pool_data) + response = self.client.post( + 'http://testserver/api/v1/report/upload', + data=data, + HTTP_CONTENT_ENCODING='gzip', + content_type='application/json', + ) + return response + + def agent_report(self, json, **kwargs): + reportjson1 = json + reportjson1['detail']['agentId'] = self.agent_id + reportjson1['detail'].update(kwargs) + data = gzipdata(reportjson1) + response = self.client.post( + 'http://testserver/api/v1/report/upload', + data=data, + HTTP_CONTENT_ENCODING='gzip', + content_type='application/json', + ) + return response + + def register_agent(self, **kwargs): + register_data = REGISTER_JSON + register_data.update(kwargs) + data = gzipdata(REGISTER_JSON) + response = self.client.post('http://testserver/api/v1/agent/register', + data=data, + HTTP_CONTENT_ENCODING='gzip', + content_type='application/json', + ) + return json.loads(response.content)['data'] + + def tearDown(self): + pass + # IastAgent.objects.filter(pk=self.agent_id).delete() + + +def gzipdata(data): + return gzip.compress(json.dumps(data).encode('utf-8')) diff --git a/test/apiserver/test_agent_hardencode_vuln.py b/test/apiserver/test_agent_hardencode_vuln.py new file mode 100644 index 000000000..10bbf8618 --- /dev/null +++ b/test/apiserver/test_agent_hardencode_vuln.py @@ -0,0 +1,72 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : test_agent_hardencode_vuln +# @created : 星期六 12月 18, 2021 11:40:31 CST +# +# @description : +###################################################################### + + + +from test.apiserver.test_agent_base import AgentTestCase +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.agent_method_pool import MethodPool +from dongtai_common.models.strategy import IastStrategyModel +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +import json + +class AgentHardencodeTestCase(AgentTestCase): + + + def test_agent_hardencode_vuln(self): + json_ = { + "detail": { + "file": "SpringsecApplication.java", + "field": "PASSWORD", + "isJdk": False, + "class": "org.iast.springsec.SpringsecApplication", + "value": "1111" + }, + "type": 37 + } + + res = self.agent_report(json_) + assert res.status_code == 200 + strategy = IastStrategyModel.objects.filter(user_id=1, + vul_type='硬编码').first() + if strategy and strategy.state == 'enable': + assert IastVulnerabilityModel.objects.filter( + strategy=strategy).exists() + + def test_agent_hardencode_vuln_other(self): + jsondata = { + "detail": { + "agentId": self.agent_id, + "file": "DriverDataSource.java", + "field": "PASSWORD", + "isJdk": False, + "class": "com.zaxxer.hikari.util.DriverDataSource", + "value": "password" + }, + "type": 37 + } + res = self.agent_report(jsondata) + assert res.status_code == 200 + strategy = IastStrategyModel.objects.filter(user_id=1, + vul_type='硬编码').first() + if strategy and strategy.state == 'enable': + assert IastVulnerabilityModel.objects.filter( + strategy=strategy, + full_stack=json.dumps(jsondata['detail'])).exists() + vul = IastVulnerabilityModel.objects.filter( + strategy=strategy, + full_stack=json.dumps(jsondata['detail'])).first() + assert vul is not None + if vul: + assert vul.uri == jsondata['detail']['file'] + assert vul.url == jsondata['detail']['class'] + assert vul.taint_position == jsondata['detail']['field'] + assert vul.top_stack == "字段:{}".format( + jsondata['detail']['field']) + assert vul.bottom_stack == "硬编码值:{}".format( + jsondata['detail']['value']) diff --git a/test/apiserver/test_agent_method_pool.py b/test/apiserver/test_agent_method_pool.py new file mode 100644 index 000000000..cb24fda63 --- /dev/null +++ b/test/apiserver/test_agent_method_pool.py @@ -0,0 +1,1097 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : test_agent_method_pool +# @created : 星期一 12月 13, 2021 17:21:33 CST +# +# @description : +###################################################################### + + +from test.apiserver.test_agent_base import AgentTestCase,gzipdata +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.agent_method_pool import MethodPool +import gzip +import base64 + +class AgentMethodPoolTestCase(AgentTestCase): + + def test_agent_method_pool_upload(self): + method = { + "detail": { + "agentId": + 3490, + "clientIp": + "172.19.0.3", + "language": + "PHP", + "method": + "POST", + "pool": [{ + "args": "", + "callerClass": "", + "callerLineNumber": 0, + "callerMethod": "", + "className": "", + "interfaces": "[]", + "invokeId": 21, + "methodName": "$_POST", + "originClassName": "", + "retClassName": "", + "signature": ".$_POST", + "source": True, + "sourceHash": "3506402", + "sourceValues": "root,", + "targetHash": "3506402", + "targetValues": "root" + }, { + "args": "", + "callerClass": "", + "callerLineNumber": 0, + "callerMethod": "", + "className": "", + "interfaces": "[]", + "invokeId": 22, + "methodName": "$_POST", + "originClassName": "", + "retClassName": "", + "signature": ".$_POST", + "source": True, + "sourceHash": "2430751009", + "sourceValues": "12312334,", + "targetHash": "2430751009", + "targetValues": "12312334" + }, { + "args": "", + "callerClass": "", + "callerLineNumber": 0, + "callerMethod": "", + "className": "", + "interfaces": "[]", + "invokeId": 23, + "methodName": "$_POST", + "originClassName": "", + "retClassName": "", + "signature": ".$_POST", + "source": True, + "sourceHash": "2487299128", + "sourceValues": "Submit,", + "targetHash": "2487299128", + "targetValues": "Submit" + }, { + "args": "root", + "callerClass": "", + "callerLineNumber": 0, + "callerMethod": "", + "className": "", + "interfaces": "[]", + "invokeId": 24, + "methodName": "", + "originClassName": "", + "retClassName": "String", + "signature": "", + "source": False, + "sourceHash": "3506402", + "sourceValues": "root,", + "targetHash": "3264164444,3264164444", + "targetValues": "User Name:root,User Name:root" + }, { + "args": "User Name:root,\n", + "callerClass": "", + "callerLineNumber": 0, + "callerMethod": "", + "className": "", + "interfaces": "[]", + "invokeId": 25, + "methodName": "", + "originClassName": "", + "retClassName": "String", + "signature": "", + "source": False, + "sourceHash": "3264164444,10", + "sourceValues": "User Name:root,\n,", + "targetHash": "2404849966,2404849966", + "targetValues": "User Name:root\n,User Name:root\n" + }, { + "args": "User Name:root\n", + "callerClass": "", + "callerLineNumber": 53, + "callerMethod": "main", + "className": "", + "interfaces": "[]", + "invokeId": 26, + "methodName": "fwrite", + "originClassName": "", + "retClassName": "", + "signature": ".fwrite", + "source": False, + "sourceHash": "2404849966", + "sourceValues": "User Name:root\n,", + "targetHash": "", + "targetValues": "" + }, { + "args": "12312334", + "callerClass": "", + "callerLineNumber": 0, + "callerMethod": "", + "className": "", + "interfaces": "[]", + "invokeId": 27, + "methodName": "", + "originClassName": "", + "retClassName": "String", + "signature": "", + "source": False, + "sourceHash": "2430751009", + "sourceValues": "12312334,", + "targetHash": "2573646592,2573646592", + "targetValues": "Password:12312334,Password:12312334" + }, { + "args": + "Password:12312334,\n", + "callerClass": + "", + "callerLineNumber": + 0, + "callerMethod": + "", + "className": + "", + "interfaces": + "[]", + "invokeId": + 28, + "methodName": + "", + "originClassName": + "", + "retClassName": + "String", + "signature": + "", + "source": + False, + "sourceHash": + "2573646592,10", + "sourceValues": + "Password:12312334,\n,", + "targetHash": + "2473633034,2473633034", + "targetValues": + "Password:12312334\n,Password:12312334\n" + }, { + "args": "Password:12312334\n", + "callerClass": "", + "callerLineNumber": 54, + "callerMethod": "main", + "className": "", + "interfaces": "[]", + "invokeId": 29, + "methodName": "fwrite", + "originClassName": "", + "retClassName": "", + "signature": ".fwrite", + "source": False, + "sourceHash": "2473633034", + "sourceValues": "Password:12312334\n,", + "targetHash": "", + "targetValues": "" + }, { + "args": "root", + "callerClass": "", + "callerLineNumber": 0, + "callerMethod": "", + "className": "", + "interfaces": "[]", + "invokeId": 30, + "methodName": "", + "originClassName": "", + "retClassName": "String", + "signature": "", + "source": False, + "sourceHash": "3506402", + "sourceValues": "root,", + "targetHash": "34906116,34906116", + "targetValues": "\"root,\"root" + }, { + "args": "\"root,\"", + "callerClass": "", + "callerLineNumber": 0, + "callerMethod": "", + "className": "", + "interfaces": "[]", + "invokeId": 31, + "methodName": "", + "originClassName": "", + "retClassName": "String", + "signature": "", + "source": False, + "sourceHash": "34906116,34", + "sourceValues": "\"root,\",", + "targetHash": "1082089630,1082089630", + "targetValues": "\"root\",\"root\"" + }, { + "args": "12312334", + "callerClass": "", + "callerLineNumber": 0, + "callerMethod": "", + "className": "", + "interfaces": "[]", + "invokeId": 32, + "methodName": "", + "originClassName": "", + "retClassName": "String", + "signature": "", + "source": False, + "sourceHash": "2430751009", + "sourceValues": "12312334,", + "targetHash": "1106841411,1106841411", + "targetValues": "\"12312334,\"12312334" + }, { + "args": "\"12312334,\"", + "callerClass": "", + "callerLineNumber": 0, + "callerMethod": "", + "className": "", + "interfaces": "[]", + "invokeId": 33, + "methodName": "", + "originClassName": "", + "retClassName": "String", + "signature": "", + "source": False, + "sourceHash": "1106841411,34", + "sourceValues": "\"12312334,\",", + "targetHash": "4247312703,4247312703", + "targetValues": "\"12312334\",\"12312334\"" + }, { + "args": + "", + "callerClass": + "", + "callerLineNumber": + 61, + "callerMethod": + "", + "className": + "", + "interfaces": + "[]", + "invokeId": + 34, + "methodName": + "ZEND_ROPE_END", + "originClassName": + "", + "retClassName": + "String", + "signature": + ".ZEND_ROPE_END", + "source": + False, + "sourceHash": + "3935959953,1082089630,1478523430,4247312703,1876301945", + "sourceValues": + "SELECT username, password FROM users WHERE username=(,\"root\",) and password=(,\"12312334\",) LIMIT 0,1,", + "targetHash": + "3553945957", + "targetValues": + "SELECT username, password FROM users WHERE username=(\"root\") and password=(\"12312334\") LIMIT 0,1" + }, { + "args": + "SELECT username, password FROM users WHERE username=(\"root\") and password=(\"12312334\") LIMIT 0,1,\"12312334", + "callerClass": "", + "callerLineNumber": 62, + "callerMethod": "main", + "className": "", + "interfaces": "[]", + "invokeId": 35, + "methodName": "mysqli_query", + "originClassName": "", + "retClassName": "", + "signature": ".mysqli_query", + "source": False, + "sourceHash": "3553945957,1106841411", + "sourceValues": + "SELECT username, password FROM users WHERE username=(\"root\") and password=(\"12312334\") LIMIT 0,1,\"12312334,", + "targetHash": "", + "targetValues": "" + }], + "protocol": + "HTTP/1.1", + "replayRequest": + False, + "reqBody": + "", + "reqHeader": + "", + "resBody": + "", + "resHeader": + "", + "scheme": + "http", + "secure": + False, + "uri": + "/Less-12/", + "url": + "http://127.0.0.1:8008" + }, + "type": 36 + } + method['detail']['agentId'] = self.agent_id + data = gzipdata(method) + response = self.client.post('http://testserver/api/v1/report/upload', + data=data, + HTTP_CONTENT_ENCODING='gzip', + content_type='application/json', + ) + assert response.status_code == 200 + res = MethodPool.objects.filter(agent_id=self.agent_id).all() + assert len(res) == 1 + + + def test_agent_method_pool_from_go_agent(self): + data = { + "type": 36, + "detail": { + "agentId": + 4025, + "disk": + "", + "memory": + "", + "cpu": + "", + "methodQueue": + 0, + "replayQueue": + 0, + "reqCount": + 0, + "reportQueue": + 0, + "packagePath": + "", + "packageSignature": + "", + "packageName": + "", + "packageAlgorithm": + "", + "uri": + "/sqli1", + "url": + "http://localhost:9999/sqli123132123313132321123231", + "protocol": + "HTTP/1.1", + "contextPath": + "", + "pool": [{ + "invokeId": + 40252101640145388, + "interfaces": [], + "targetHash": + ["824634910755", "824634910761", "0", "0", "0", "0"], + "targetValues": + "Level low ", + "signature": + "go-agent/core/httpRequestCookie.Cookie(0xc00014e100, {0x8420f8, 0x5})\n", + "originClassName": + "http.(*Request)", + "sourceValues": + "Level ", + "methodName": + "Cookie", + "className": + "http.(*Request)", + "source": + True, + "callerLineNumber": + 49, + "callerClass": + "github.com/govwa/util", + "args": + "[\"Level\"]", + "callerMethod": + "GetCookie(0xc00014e100, {0x8420f8, 0x5})\n", + "sourceHash": ["8659192"], + "retClassName": + "*http.Cookie " + }, { + "invokeId": + 40252101640145389, + "interfaces": [], + "targetHash": [ + "824634288360", "824634288368", "824634288378", + "824634288384", "824634288396", "824634288400", + "824634288416", "0" + ], + "targetValues": + "root Aa@6447985 govwa localhost 3306 http://localhost 9999 ", + "signature": + "go-agent/core/jsonUnmarshal.Unmarshal({0xc000324200, 0xd9, 0x200}, {0x79e520, 0xc0001da580})\n", + "originClassName": + "fmt", + "sourceValues": + "", + "methodName": + "Sprintf", + "className": + "fmt", + "source": + True, + "callerLineNumber": + 29, + "callerClass": + "github.com/govwa/util/config", + "args": + "[\"ewogICAgInVzZXIiOiAicm9vdCIsCiAgICAicGFzc3dvcmQiOiAiQWFANjQ0Nzk4NSIsCiAgICAiZGJuYW1lIjogImdvdndhIiwKICAgICJzcWxob3N0IjogImxvY2FsaG9zdCIsCiAgICAic3FscG9ydCI6ICIzMzA2IiwKICAgICJ3ZWJzZXJ2ZXIiOiAiaHR0cDovL2xvY2FsaG9zdCIsCiAgICAid2VicG9ydCI6ICI5OTk5IiwKCiAgICAic2Vzc2lvbmtleToiOiAiRzBWdzQ0NCIKfQ==\"]", + "callerMethod": + "LoadConfig()\n", + "sourceHash": + None, + "retClassName": + "*config.Config " + }, { + "invokeId": + 40252101640145390, + "interfaces": [], + "targetHash": ["824636572896"], + "targetValues": + "root:Aa@6447985@tcp(localhost:3306)/ ", + "signature": + "go-agent/core/fmtSprintf.Sprintf({0x84afe4, 0x11}, {0xc00032c4b8, 0x4, 0x4})\n", + "originClassName": + "fmt", + "sourceValues": + "%s:%s@tcp(%s:%s)/ root Aa@6447985 localhost 3306 ", + "methodName": + "Sprintf", + "className": + "fmt", + "source": + False, + "callerLineNumber": + 18, + "callerClass": + "github.com/govwa/util/database", + "args": + "[\"%s:%s@tcp(%s:%s)/\",[\"root\",\"Aa@6447985\",\"localhost\",\"3306\"]]", + "callerMethod": + "Connect()\n", + "sourceHash": [ + "8695780", "824634288360", "824634288368", + "824634288384", "824634288396" + ], + "retClassName": + "string " + }, { + "invokeId": + 40252101640145391, + "interfaces": [], + "targetHash": ["824636573472"], + "targetValues": + "root:Aa@6447985@tcp(localhost:3306)/govwa ", + "signature": + "go-agent/core/fmtSprintf.Sprintf({0x84c9df, 0x13}, {0xc00032c4f8, 0x5, 0x5})\n", + "originClassName": + "fmt", + "sourceValues": + "%s:%s@tcp(%s:%s)/%s root Aa@6447985 localhost 3306 govwa ", + "methodName": + "Sprintf", + "className": + "fmt", + "source": + False, + "callerLineNumber": + 30, + "callerClass": + "github.com/govwa/util/database", + "args": + "[\"%s:%s@tcp(%s:%s)/%s\",[\"root\",\"Aa@6447985\",\"localhost\",\"3306\",\"govwa\"]]", + "callerMethod": + "Connect()\n", + "sourceHash": [ + "8702431", "824634288360", "824634288368", + "824634288384", "824634288396", "824634288378" + ], + "retClassName": + "string " + }, { + "invokeId": + 40252101640145390, + "interfaces": [], + "targetHash": + ["824634910484", "824634910490", "0", "0", "0", "0"], + "targetValues": + "govwa MTY0MDE0NDg3NHxEdi1CQkFFQ180SUFBUkFCRUFBQVh2LUNBQU1HYzNSeWFXNW5EQThBRFdkdmRuZGhYM05sYzNOcGIyNEVZbTl2YkFJQ0FBRUdjM1J5YVc1bkRBY0FCWFZ1WVcxbEJuTjBjbWx1Wnd3SEFBVmhaRzFwYmdaemRISnBibWNNQkFBQ2FXUUdjM1J5YVc1bkRBTUFBVEU9fPfvm5eU0A5drQKDLDOgC_ffWcZue0sMf7EbJ7H5XzIj ", + "signature": + "go-agent/core/httpRequestCookie.Cookie(0xc00014e100, {0x8424b8, 0x5})\n", + "originClassName": + "http.(*Request)", + "sourceValues": + "govwa ", + "methodName": + "Cookie", + "className": + "http.(*Request)", + "source": + True, + "callerLineNumber": + 91, + "callerClass": + "github.com/gorilla/sessions.(*CookieStore)", + "args": + "[\"govwa\"]", + "callerMethod": + "New(0xc0000b6ce0, 0xc00014e100, {0x8424b8, 0x5})\n", + "sourceHash": ["8660152"], + "retClassName": + "*http.Cookie " + }, { + "invokeId": + 40252101640145391, + "interfaces": [], + "targetHash": + ["824634910748", "824634910752", "0", "0", "0", "0"], + "targetValues": + "Uid 1 ", + "signature": + "go-agent/core/httpRequestCookie.Cookie(0xc00014e100, {0x8413f6, 0x3})\n", + "originClassName": + "http.(*Request)", + "sourceValues": + "Uid ", + "methodName": + "Cookie", + "className": + "http.(*Request)", + "source": + True, + "callerLineNumber": + 49, + "callerClass": + "github.com/govwa/util", + "args": + "[\"Uid\"]", + "callerMethod": + "GetCookie(0xc00014e100, {0x8413f6, 0x3})\n", + "sourceHash": ["8655862"], + "retClassName": + "*http.Cookie " + }, { + "invokeId": 40252101640145392, + "interfaces": [], + "targetHash": ["824635081280"], + "targetValues": + "SELECT p.user_id, p.full_name, p.city, p.phone_number \n\t\t\t\t\t\t\t\tFROM Profile as p,Users as u \n\t\t\t\t\t\t\t\twhere p.user_id = u.id \n\t\t\t\t\t\t\t\tand u.id=1 ", + "signature": + "go-agent/core/fmtSprintf.Sprintf({0x86883b, 0x90}, {0xc00032c6c0, 0x1, 0x1})\n", + "originClassName": "fmt", + "sourceValues": + "SELECT p.user_id, p.full_name, p.city, p.phone_number \n\t\t\t\t\t\t\t\tFROM Profile as p,Users as u \n\t\t\t\t\t\t\t\twhere p.user_id = u.id \n\t\t\t\t\t\t\t\tand u.id=%s 1 ", + "methodName": "Sprintf", + "className": "fmt", + "source": False, + "callerLineNumber": 38, + "callerClass": + "github.com/govwa/vulnerability/sqli.(*Profile)", + "args": + "[\"SELECT p.user_id, p.full_name, p.city, p.phone_number \\n\\t\\t\\t\\t\\t\\t\\t\\tFROM Profile as p,Users as u \\n\\t\\t\\t\\t\\t\\t\\t\\twhere p.user_id = u.id \\n\\t\\t\\t\\t\\t\\t\\t\\tand u.id=%s\",[\"1\"]]", + "callerMethod": + "UnsafeQueryGetData(0xc0002925c0, {0xc000122820, 0x1})\n", + "sourceHash": ["8816699", "824634910752"], + "retClassName": "string " + }, { + "invokeId": 40252101640145393, + "interfaces": [], + "targetHash": None, + "targetValues": "", + "signature": + "go-agent/core/sqlDBQuery.Query(0xc0001c0a90, {0xc00014c240, 0x8f}, {0x0, 0x0, 0x0})\n", + "originClassName": "sql.(*DB)", + "sourceValues": + "SELECT p.user_id, p.full_name, p.city, p.phone_number \n\t\t\t\t\t\t\t\tFROM Profile as p,Users as u \n\t\t\t\t\t\t\t\twhere p.user_id = u.id \n\t\t\t\t\t\t\t\tand u.id=1 ", + "methodName": "Query", + "className": "sql.(*DB)", + "source": False, + "callerLineNumber": 42, + "callerClass": + "github.com/govwa/vulnerability/sqli.(*Profile)", + "args": + "[\"SELECT p.user_id, p.full_name, p.city, p.phone_number \\n\\t\\t\\t\\t\\t\\t\\t\\tFROM Profile as p,Users as u \\n\\t\\t\\t\\t\\t\\t\\t\\twhere p.user_id = u.id \\n\\t\\t\\t\\t\\t\\t\\t\\tand u.id=1\",None]", + "callerMethod": + "UnsafeQueryGetData(0xc0002925c0, {0xc000122820, 0x1})\n", + "sourceHash": ["824635081280"], + "retClassName": "*sql.Rows *errors.errorString " + }], + "language": + "GO", + "clientIp": + "[::1]:53457", + "secure": + False, + "queryString": + "", + "replayRequest": + False, + "method": + "GET", + "reqHeader": + "eyJBY2NlcHQiOlsidGV4dC9odG1sLGFwcGxpY2F0aW9uL3hodG1sK3htbCxhcHBsaWNhdGlvbi94bWw7cT0wLjksaW1hZ2Uvd2VicCxpbWFnZS9hcG5nLCovKjtxPTAuOCxhcHBsaWNhdGlvbi9zaWduZWQtZXhjaGFuZ2U7dj1iMztxPTAuOSJdLCJBY2NlcHQtRW5jb2RpbmciOlsiZ3ppcCwgZGVmbGF0ZSwgYnIiXSwiQWNjZXB0LUxhbmd1YWdlIjpbInpoLUNOLHpoO3E9MC45LGVuLUdCO3E9MC44LGVuO3E9MC43LGVuLVVTO3E9MC42Il0sIkNvbm5lY3Rpb24iOlsia2VlcC1hbGl2ZSJdLCJDb29raWUiOlsiSG1fbHZ0XzY5YmUxNTIzNTFlNDc5YjhiNjRmNzdhOTM0NzAzYTU1PTE2Mzk3MjcwNjA7IGdvdndhPU1UWTBNREUwTkRnM05IeEVkaTFDUWtGRlExODBTVUZCVWtGQ1JVRkJRVmgyTFVOQlFVMUhZek5TZVdGWE5XNUVRVGhCUkZka2RtUnVaR2hZTTA1c1l6Tk9jR0l5TkVWWmJUbDJZa0ZKUTBGQlJVZGpNMUo1WVZjMWJrUkJZMEZDV0ZaMVdWY3hiRUp1VGpCamJXeDFXbmQzU0VGQlZtaGFSekZ3WW1kYWVtUklTbkJpYldOTlFrRkJRMkZYVVVkak0xSjVZVmMxYmtSQlRVRkJWRVU5ZlBmdm01ZVUwQTVkclFLRExET2dDX2ZmV2NadWUwc01mN0ViSjdINVh6SWo7IFVpZD0xOyBMZXZlbD1sb3ciXSwiUmVmZXJlciI6WyJodHRwOi8vbG9jYWxob3N0Ojk5OTkvc3FsaTEiXSwiU2VjLUNoLVVhIjpbIlwiIE5vdCBBO0JyYW5kXCI7dj1cIjk5XCIsIFwiQ2hyb21pdW1cIjt2PVwiOTZcIiwgXCJNaWNyb3NvZnQgRWRnZVwiO3Y9XCI5NlwiIl0sIlNlYy1DaC1VYS1Nb2JpbGUiOlsiPzAiXSwiU2VjLUNoLVVhLVBsYXRmb3JtIjpbIlwiV2luZG93c1wiIl0sIlNlYy1GZXRjaC1EZXN0IjpbImRvY3VtZW50Il0sIlNlYy1GZXRjaC1Nb2RlIjpbIm5hdmlnYXRlIl0sIlNlYy1GZXRjaC1TaXRlIjpbInNhbWUtb3JpZ2luIl0sIlNlYy1GZXRjaC1Vc2VyIjpbIj8xIl0sIlVwZ3JhZGUtSW5zZWN1cmUtUmVxdWVzdHMiOlsiMSJdLCJVc2VyLUFnZW50IjpbIk1vemlsbGEvNS4wIChXaW5kb3dzIE5UIDEwLjA7IFdpbjY0OyB4NjQpIEFwcGxlV2ViS2l0LzUzNy4zNiAoS0hUTUwsIGxpa2UgR2Vja28pIENocm9tZS85Ni4wLjQ2NjQuMTEwIFNhZmFyaS81MzcuMzYgRWRnLzk2LjAuMTA1NC42MiJdfQ==", + "reqBody": + "", + "resBody": + " \u003cp\u003eYour Profile :\u003c/p\u003e\n sql: converting argument $1 type: unsupported type []interface {}, a slice of interface \n\u003cpre\u003e\nUid : 1\nName : \nCity : \nNumber : \n\u003c/pre\u003e\n \u003cdiv class=\"more-info\"\u003e\n \u003cspan\u003eMore Info :\u003c/span\u003e\n \u003ca target=\"_blank\" href=\"http://www.sqlinjection.net/union/\"\u003ehttp://www.sqlinjection.net/union/\u003c/a\u003e\n \u003ca target=\"_blank\" href=\"https://www.owasp.org/index.php/SQL_Injection\"\u003ehttps://www.owasp.org/index.php/SQL_Injection\u003c/a\u003e\n \u003c/div\u003e\n \u003c/div\u003e\n \u003c/div\u003e\n \u003c/div\u003e\n\u003c/div\u003e\n\n\u003c/div\u003e\n\n\n \u003cfooter class=\"footer\"\u003e\n \u003cdiv class=\"container\"\u003e\n \u003cspan\u003e\u003ci class=\"fa fa-copyright\"\u003e\u003c/i\u003eNemosecurity\u003c/span\u003e\n \u003c/div\u003e\n \u003c/footer\u003e\n\u003c/div\u003e\n\n\u003c/body\u003e\n\n\u003c/html\u003e\n \u003cli\u003e\u003ca href=\"idor1\"\u003eIDOR 1\u003c/a\u003e\u003c/li\u003e\n \u003cli\u003e\u003ca href=\"idor2\"\u003eIDOR 2\u003c/a\u003e\u003c/li\u003e\n \u003c/ul\u003e\n\n \n \u003cli\u003e\n \u003ca href=\"csa\"\u003e\n \u003ci class=\"fa fa-bug fa-lg\"\u003e\u003c/i\u003e Client Side Auth\n \u003c/a\u003e\n \u003c/li\u003e\n \u003cli style=\"height:35px\"\u003e\n \u003c/li\u003e\n \u003cli\u003e\n \u003ca href=\"setting\"\u003e\n \u003ci class=\"glyphicon glyphicon-cog fa-lg\"\u003e\u003c/i\u003e Setting\n \u003c/a\u003e\n \u003c/li\u003e\n \u003cli\u003e\n \u003ca href=\"logout\"\u003e\n \u003ci class=\"fa fa-sign-out fa-lg\"\u003e\u003c/i\u003e Logout\n \u003c/a\u003e\n \u003c/li\u003e\n \n \u003c/ul\u003e\n \u003c/div\u003e\n \u003c/div\u003e\n \u003c/div\u003e\n \n\u003cdiv class=\"col-md-9\"\u003e\n \u003cdiv class=\"panel panel-primary\"\u003e\n \u003cdiv class=\"panel-heading\"\u003eSQL Injection Vulnerability\u003c/div\u003e\n \u003cdiv class=\"panel-body\"\u003e\n \u003cdiv class=\"pnl\"\u003e\n \n \u003cp\u003eThis should be safe\u003c/p\u003e\n ", + "scheme": + "", + "resHeader": + "e30=", + "invokeId": + 0, + "interfaces": + None, + "targetHash": + None, + "targetValues": + "", + "signature": + "", + "originClassName": + "", + "sourceValues": + "", + "methodName": + "", + "className": + "", + "source": + False, + "callerLineNumber": + 0, + "callerClass": + "", + "args": + "", + "callerMethod": + "", + "sourceHash": + None, + "retClassName": + "", + "log": + "", + "apiData": + None + }, + "invoke_id": 40252101640145387 + } + data['detail']['agentId'] = self.agent_id + data = gzipdata(data) + response = self.client.post('http://testserver/api/v1/report/upload', + data=data, + HTTP_CONTENT_ENCODING='gzip', + content_type='application/json', + ) + assert response.status_code == 200 + assert MethodPool.objects.filter( + url="http://localhost:9999/sqli123132123313132321123231", + agent_id=self.agent_id).exists() + def test_agent_method_pool_gzip_test(self): + data = { + "type": 36, + "detail": { + "agentId": + 4025, + "disk": + "", + "memory": + "", + "cpu": + "", + "methodQueue": + 0, + "replayQueue": + 0, + "reqCount": + 0, + "reportQueue": + 0, + "packagePath": + "", + "packageSignature": + "", + "packageName": + "", + "packageAlgorithm": + "", + "uri": + "/sqli1", + "url": + "http://localhost:9999/sqli123132123313132321123231test", + "protocol": + "HTTP/1.1", + "contextPath": + "", + "pool": [{ + "invokeId": + 40252101640145388, + "interfaces": [], + "targetHash": + ["824634910755", "824634910761", "0", "0", "0", "0"], + "targetValues": + "Level low ", + "signature": + "go-agent/core/httpRequestCookie.Cookie(0xc00014e100, {0x8420f8, 0x5})\n", + "originClassName": + "http.(*Request)", + "sourceValues": + "Level ", + "methodName": + "Cookie", + "className": + "http.(*Request)", + "source": + True, + "callerLineNumber": + 49, + "callerClass": + "github.com/govwa/util", + "args": + "[\"Level\"]", + "callerMethod": + "GetCookie(0xc00014e100, {0x8420f8, 0x5})\n", + "sourceHash": ["8659192"], + "retClassName": + "*http.Cookie " + }, { + "invokeId": + 40252101640145389, + "interfaces": [], + "targetHash": [ + "824634288360", "824634288368", "824634288378", + "824634288384", "824634288396", "824634288400", + "824634288416", "0" + ], + "targetValues": + "root Aa@6447985 govwa localhost 3306 http://localhost 9999 ", + "signature": + "go-agent/core/jsonUnmarshal.Unmarshal({0xc000324200, 0xd9, 0x200}, {0x79e520, 0xc0001da580})\n", + "originClassName": + "fmt", + "sourceValues": + "", + "methodName": + "Sprintf", + "className": + "fmt", + "source": + True, + "callerLineNumber": + 29, + "callerClass": + "github.com/govwa/util/config", + "args": + "[\"ewogICAgInVzZXIiOiAicm9vdCIsCiAgICAicGFzc3dvcmQiOiAiQWFANjQ0Nzk4NSIsCiAgICAiZGJuYW1lIjogImdvdndhIiwKICAgICJzcWxob3N0IjogImxvY2FsaG9zdCIsCiAgICAic3FscG9ydCI6ICIzMzA2IiwKICAgICJ3ZWJzZXJ2ZXIiOiAiaHR0cDovL2xvY2FsaG9zdCIsCiAgICAid2VicG9ydCI6ICI5OTk5IiwKCiAgICAic2Vzc2lvbmtleToiOiAiRzBWdzQ0NCIKfQ==\"]", + "callerMethod": + "LoadConfig()\n", + "sourceHash": + None, + "retClassName": + "*config.Config " + }, { + "invokeId": + 40252101640145390, + "interfaces": [], + "targetHash": ["824636572896"], + "targetValues": + "root:Aa@6447985@tcp(localhost:3306)/ ", + "signature": + "go-agent/core/fmtSprintf.Sprintf({0x84afe4, 0x11}, {0xc00032c4b8, 0x4, 0x4})\n", + "originClassName": + "fmt", + "sourceValues": + "%s:%s@tcp(%s:%s)/ root Aa@6447985 localhost 3306 ", + "methodName": + "Sprintf", + "className": + "fmt", + "source": + False, + "callerLineNumber": + 18, + "callerClass": + "github.com/govwa/util/database", + "args": + "[\"%s:%s@tcp(%s:%s)/\",[\"root\",\"Aa@6447985\",\"localhost\",\"3306\"]]", + "callerMethod": + "Connect()\n", + "sourceHash": [ + "8695780", "824634288360", "824634288368", + "824634288384", "824634288396" + ], + "retClassName": + "string " + }, { + "invokeId": + 40252101640145391, + "interfaces": [], + "targetHash": ["824636573472"], + "targetValues": + "root:Aa@6447985@tcp(localhost:3306)/govwa ", + "signature": + "go-agent/core/fmtSprintf.Sprintf({0x84c9df, 0x13}, {0xc00032c4f8, 0x5, 0x5})\n", + "originClassName": + "fmt", + "sourceValues": + "%s:%s@tcp(%s:%s)/%s root Aa@6447985 localhost 3306 govwa ", + "methodName": + "Sprintf", + "className": + "fmt", + "source": + False, + "callerLineNumber": + 30, + "callerClass": + "github.com/govwa/util/database", + "args": + "[\"%s:%s@tcp(%s:%s)/%s\",[\"root\",\"Aa@6447985\",\"localhost\",\"3306\",\"govwa\"]]", + "callerMethod": + "Connect()\n", + "sourceHash": [ + "8702431", "824634288360", "824634288368", + "824634288384", "824634288396", "824634288378" + ], + "retClassName": + "string " + }, { + "invokeId": + 40252101640145390, + "interfaces": [], + "targetHash": + ["824634910484", "824634910490", "0", "0", "0", "0"], + "targetValues": + "govwa MTY0MDE0NDg3NHxEdi1CQkFFQ180SUFBUkFCRUFBQVh2LUNBQU1HYzNSeWFXNW5EQThBRFdkdmRuZGhYM05sYzNOcGIyNEVZbTl2YkFJQ0FBRUdjM1J5YVc1bkRBY0FCWFZ1WVcxbEJuTjBjbWx1Wnd3SEFBVmhaRzFwYmdaemRISnBibWNNQkFBQ2FXUUdjM1J5YVc1bkRBTUFBVEU9fPfvm5eU0A5drQKDLDOgC_ffWcZue0sMf7EbJ7H5XzIj ", + "signature": + "go-agent/core/httpRequestCookie.Cookie(0xc00014e100, {0x8424b8, 0x5})\n", + "originClassName": + "http.(*Request)", + "sourceValues": + "govwa ", + "methodName": + "Cookie", + "className": + "http.(*Request)", + "source": + True, + "callerLineNumber": + 91, + "callerClass": + "github.com/gorilla/sessions.(*CookieStore)", + "args": + "[\"govwa\"]", + "callerMethod": + "New(0xc0000b6ce0, 0xc00014e100, {0x8424b8, 0x5})\n", + "sourceHash": ["8660152"], + "retClassName": + "*http.Cookie " + }, { + "invokeId": + 40252101640145391, + "interfaces": [], + "targetHash": + ["824634910748", "824634910752", "0", "0", "0", "0"], + "targetValues": + "Uid 1 ", + "signature": + "go-agent/core/httpRequestCookie.Cookie(0xc00014e100, {0x8413f6, 0x3})\n", + "originClassName": + "http.(*Request)", + "sourceValues": + "Uid ", + "methodName": + "Cookie", + "className": + "http.(*Request)", + "source": + True, + "callerLineNumber": + 49, + "callerClass": + "github.com/govwa/util", + "args": + "[\"Uid\"]", + "callerMethod": + "GetCookie(0xc00014e100, {0x8413f6, 0x3})\n", + "sourceHash": ["8655862"], + "retClassName": + "*http.Cookie " + }, { + "invokeId": 40252101640145392, + "interfaces": [], + "targetHash": ["824635081280"], + "targetValues": + "SELECT p.user_id, p.full_name, p.city, p.phone_number \n\t\t\t\t\t\t\t\tFROM Profile as p,Users as u \n\t\t\t\t\t\t\t\twhere p.user_id = u.id \n\t\t\t\t\t\t\t\tand u.id=1 ", + "signature": + "go-agent/core/fmtSprintf.Sprintf({0x86883b, 0x90}, {0xc00032c6c0, 0x1, 0x1})\n", + "originClassName": "fmt", + "sourceValues": + "SELECT p.user_id, p.full_name, p.city, p.phone_number \n\t\t\t\t\t\t\t\tFROM Profile as p,Users as u \n\t\t\t\t\t\t\t\twhere p.user_id = u.id \n\t\t\t\t\t\t\t\tand u.id=%s 1 ", + "methodName": "Sprintf", + "className": "fmt", + "source": False, + "callerLineNumber": 38, + "callerClass": + "github.com/govwa/vulnerability/sqli.(*Profile)", + "args": + "[\"SELECT p.user_id, p.full_name, p.city, p.phone_number \\n\\t\\t\\t\\t\\t\\t\\t\\tFROM Profile as p,Users as u \\n\\t\\t\\t\\t\\t\\t\\t\\twhere p.user_id = u.id \\n\\t\\t\\t\\t\\t\\t\\t\\tand u.id=%s\",[\"1\"]]", + "callerMethod": + "UnsafeQueryGetData(0xc0002925c0, {0xc000122820, 0x1})\n", + "sourceHash": ["8816699", "824634910752"], + "retClassName": "string " + }, { + "invokeId": 40252101640145393, + "interfaces": [], + "targetHash": None, + "targetValues": "", + "signature": + "go-agent/core/sqlDBQuery.Query(0xc0001c0a90, {0xc00014c240, 0x8f}, {0x0, 0x0, 0x0})\n", + "originClassName": "sql.(*DB)", + "sourceValues": + "SELECT p.user_id, p.full_name, p.city, p.phone_number \n\t\t\t\t\t\t\t\tFROM Profile as p,Users as u \n\t\t\t\t\t\t\t\twhere p.user_id = u.id \n\t\t\t\t\t\t\t\tand u.id=1 ", + "methodName": "Query", + "className": "sql.(*DB)", + "source": False, + "callerLineNumber": 42, + "callerClass": + "github.com/govwa/vulnerability/sqli.(*Profile)", + "args": + "[\"SELECT p.user_id, p.full_name, p.city, p.phone_number \\n\\t\\t\\t\\t\\t\\t\\t\\tFROM Profile as p,Users as u \\n\\t\\t\\t\\t\\t\\t\\t\\twhere p.user_id = u.id \\n\\t\\t\\t\\t\\t\\t\\t\\tand u.id=1\",None]", + "callerMethod": + "UnsafeQueryGetData(0xc0002925c0, {0xc000122820, 0x1})\n", + "sourceHash": ["824635081280"], + "retClassName": "*sql.Rows *errors.errorString " + }], + "language": + "GO", + "clientIp": + "[::1]:53457", + "secure": + False, + "queryString": + "", + "replayRequest": + False, + "method": + "GET", + "reqHeader": + "eyJBY2NlcHQiOlsidGV4dC9odG1sLGFwcGxpY2F0aW9uL3hodG1sK3htbCxhcHBsaWNhdGlvbi94bWw7cT0wLjksaW1hZ2Uvd2VicCxpbWFnZS9hcG5nLCovKjtxPTAuOCxhcHBsaWNhdGlvbi9zaWduZWQtZXhjaGFuZ2U7dj1iMztxPTAuOSJdLCJBY2NlcHQtRW5jb2RpbmciOlsiZ3ppcCwgZGVmbGF0ZSwgYnIiXSwiQWNjZXB0LUxhbmd1YWdlIjpbInpoLUNOLHpoO3E9MC45LGVuLUdCO3E9MC44LGVuO3E9MC43LGVuLVVTO3E9MC42Il0sIkNvbm5lY3Rpb24iOlsia2VlcC1hbGl2ZSJdLCJDb29raWUiOlsiSG1fbHZ0XzY5YmUxNTIzNTFlNDc5YjhiNjRmNzdhOTM0NzAzYTU1PTE2Mzk3MjcwNjA7IGdvdndhPU1UWTBNREUwTkRnM05IeEVkaTFDUWtGRlExODBTVUZCVWtGQ1JVRkJRVmgyTFVOQlFVMUhZek5TZVdGWE5XNUVRVGhCUkZka2RtUnVaR2hZTTA1c1l6Tk9jR0l5TkVWWmJUbDJZa0ZKUTBGQlJVZGpNMUo1WVZjMWJrUkJZMEZDV0ZaMVdWY3hiRUp1VGpCamJXeDFXbmQzU0VGQlZtaGFSekZ3WW1kYWVtUklTbkJpYldOTlFrRkJRMkZYVVVkak0xSjVZVmMxYmtSQlRVRkJWRVU5ZlBmdm01ZVUwQTVkclFLRExET2dDX2ZmV2NadWUwc01mN0ViSjdINVh6SWo7IFVpZD0xOyBMZXZlbD1sb3ciXSwiUmVmZXJlciI6WyJodHRwOi8vbG9jYWxob3N0Ojk5OTkvc3FsaTEiXSwiU2VjLUNoLVVhIjpbIlwiIE5vdCBBO0JyYW5kXCI7dj1cIjk5XCIsIFwiQ2hyb21pdW1cIjt2PVwiOTZcIiwgXCJNaWNyb3NvZnQgRWRnZVwiO3Y9XCI5NlwiIl0sIlNlYy1DaC1VYS1Nb2JpbGUiOlsiPzAiXSwiU2VjLUNoLVVhLVBsYXRmb3JtIjpbIlwiV2luZG93c1wiIl0sIlNlYy1GZXRjaC1EZXN0IjpbImRvY3VtZW50Il0sIlNlYy1GZXRjaC1Nb2RlIjpbIm5hdmlnYXRlIl0sIlNlYy1GZXRjaC1TaXRlIjpbInNhbWUtb3JpZ2luIl0sIlNlYy1GZXRjaC1Vc2VyIjpbIj8xIl0sIlVwZ3JhZGUtSW5zZWN1cmUtUmVxdWVzdHMiOlsiMSJdLCJVc2VyLUFnZW50IjpbIk1vemlsbGEvNS4wIChXaW5kb3dzIE5UIDEwLjA7IFdpbjY0OyB4NjQpIEFwcGxlV2ViS2l0LzUzNy4zNiAoS0hUTUwsIGxpa2UgR2Vja28pIENocm9tZS85Ni4wLjQ2NjQuMTEwIFNhZmFyaS81MzcuMzYgRWRnLzk2LjAuMTA1NC42MiJdfQ==", + "reqBody": + "", + "resBody": + " \u003cp\u003eYour Profile :\u003c/p\u003e\n sql: converting argument $1 type: unsupported type []interface {}, a slice of interface \n\u003cpre\u003e\nUid : 1\nName : \nCity : \nNumber : \n\u003c/pre\u003e\n \u003cdiv class=\"more-info\"\u003e\n \u003cspan\u003eMore Info :\u003c/span\u003e\n \u003ca target=\"_blank\" href=\"http://www.sqlinjection.net/union/\"\u003ehttp://www.sqlinjection.net/union/\u003c/a\u003e\n \u003ca target=\"_blank\" href=\"https://www.owasp.org/index.php/SQL_Injection\"\u003ehttps://www.owasp.org/index.php/SQL_Injection\u003c/a\u003e\n \u003c/div\u003e\n \u003c/div\u003e\n \u003c/div\u003e\n \u003c/div\u003e\n\u003c/div\u003e\n\n\u003c/div\u003e\n\n\n \u003cfooter class=\"footer\"\u003e\n \u003cdiv class=\"container\"\u003e\n \u003cspan\u003e\u003ci class=\"fa fa-copyright\"\u003e\u003c/i\u003eNemosecurity\u003c/span\u003e\n \u003c/div\u003e\n \u003c/footer\u003e\n\u003c/div\u003e\n\n\u003c/body\u003e\n\n\u003c/html\u003e\n \u003cli\u003e\u003ca href=\"idor1\"\u003eIDOR 1\u003c/a\u003e\u003c/li\u003e\n \u003cli\u003e\u003ca href=\"idor2\"\u003eIDOR 2\u003c/a\u003e\u003c/li\u003e\n \u003c/ul\u003e\n\n \n \u003cli\u003e\n \u003ca href=\"csa\"\u003e\n \u003ci class=\"fa fa-bug fa-lg\"\u003e\u003c/i\u003e Client Side Auth\n \u003c/a\u003e\n \u003c/li\u003e\n \u003cli style=\"height:35px\"\u003e\n \u003c/li\u003e\n \u003cli\u003e\n \u003ca href=\"setting\"\u003e\n \u003ci class=\"glyphicon glyphicon-cog fa-lg\"\u003e\u003c/i\u003e Setting\n \u003c/a\u003e\n \u003c/li\u003e\n \u003cli\u003e\n \u003ca href=\"logout\"\u003e\n \u003ci class=\"fa fa-sign-out fa-lg\"\u003e\u003c/i\u003e Logout\n \u003c/a\u003e\n \u003c/li\u003e\n \n \u003c/ul\u003e\n \u003c/div\u003e\n \u003c/div\u003e\n \u003c/div\u003e\n \n\u003cdiv class=\"col-md-9\"\u003e\n \u003cdiv class=\"panel panel-primary\"\u003e\n \u003cdiv class=\"panel-heading\"\u003eSQL Injection Vulnerability\u003c/div\u003e\n \u003cdiv class=\"panel-body\"\u003e\n \u003cdiv class=\"pnl\"\u003e\n \n \u003cp\u003eThis should be safe\u003c/p\u003e\n ", + "scheme": + "", + "resHeader": + "e30=", + "invokeId": + 0, + "interfaces": + None, + "targetHash": + None, + "targetValues": + "", + "signature": + "", + "originClassName": + "", + "sourceValues": + "", + "methodName": + "", + "className": + "", + "source": + False, + "callerLineNumber": + 0, + "callerClass": + "", + "args": + "", + "callerMethod": + "", + "sourceHash": + None, + "retClassName": + "", + "log": + "", + "apiData": + None + }, + "invoke_id": 40252101640145387 + } + data['detail']['agentId'] = self.agent_id + testdata = '11231231321331232131231312233hwqeqqwe' + data['detail'][ + 'resHeader'] = "Q29udGVudC1UeXBlOmFwcGxpY2F0aW9uL2pzb24KWC1GcmFtZS1PcHRpb25zOkRFTlkKQ29udGVudC1MZW5ndGg6NjYKQ29udGVudC1lbmNvZGluZzpnemlwClgtQ29udGVudC1UeXBlLU9wdGlvbnM6bm9zbmlmZgpSZWZlcnJlci1Qb2xpY3k6c2FtZS1vcmlnaW4=" + data['version'] = 'v2' + data['detail']['resBody'] = gzip_test_data = base64.b64encode( + gzip.compress(bytes( + testdata, encoding='utf-8'))).decode('raw_unicode_escape') + data = gzipdata(data) + response = self.client.post( + 'http://testserver/api/v1/report/upload', + data=data, + HTTP_CONTENT_ENCODING='gzip', + content_type='application/json', + ) + assert response.status_code == 200 + assert MethodPool.objects.filter( + url="http://localhost:9999/sqli123132123313132321123231test", + agent_id=self.agent_id).exists() + assert not MethodPool.objects.filter( + url="http://localhost:9999/sqli123132123313132321123231test", + agent_id=self.agent_id, + res_body=gzip_test_data).exists() + + assert MethodPool.objects.filter( + url="http://localhost:9999/sqli123132123313132321123231test", + agent_id=self.agent_id, + res_body=testdata).exists() + + + + + def test_agent_method_pool(self): + data = { + "detail": { + "reqHeader": + "QWNjZXB0OnRleHQvaHRtbCxhcHBsaWNhdGlvbi94aHRtbCt4bWwsYXBwbGljYXRpb24veG1sO3EKVXBncmFkZS1JbnNlY3VyZS1SZXF1ZXN0czoxClVzZXItQWdlbnQ6TW96aWxsYS81LjAgKFdpbmRvd3MgTlQgMTAuMDsgV2luNjQ7IHg2NDsgcnY6OTguMCkgR2Vja28vMjAxMDAxMDEgRmlyZWZveC85OC4wCkNvbm5lY3Rpb246Y2xvc2UKSG9zdDoxMC4xODguMTM2LjE4Njo4OTYyCkFjY2VwdC1MYW5ndWFnZTp6aC1DTix6aDtxCkFjY2VwdC1FbmNvZGluZzpnemlwLCBkZWZsYXRlCmR0LXRyYWNlaWQ6Mzc1YWQyNTk1ZjFkNGQ4M2FhNzUwNmVkZjFmNWI4YTUtNTc5NjQwYWMxNzZhNGFmODgyNWNhNjg5MjM4OTM4MDUuNTguOTEuMApDb250ZW50LUxlbmd0aDo3NApDb250ZW50LVR5cGU6YXBwbGljYXRpb24vanNvbgo=", + "agentId": 58, + "scheme": "", + "method": "POST", + "contextPath": "/orderbase/queryOrderDetail", + "pool": [], + "secure": "", + "uri": "/orderbase/queryOrderDetail", + "url": "10.188.136.186:8962/orderbase/queryOrderDetail", + "protocol": "HTTP/1.1", + "resBody": + "SFRUUC8xLjEgMjAwCngtdHJhY2UtaWQ6dHJpcG9yZGVyYmFzZS0wYWJjODhiYS00NTc5MDQtMjc1\nOTc1MDg1CmNvbnRlbnQtdHlwZTphcHBsaWNhdGlvbi9qc29uOyBjaGFyc2V0Cgp7InJldENvZGUi\nOjAsIm9yZGVySW5mbyI6eyJvcmRlck5vIjoiVDRpejE2bjl3MjdjM2U3MDE0ZmNkMjQ0NzA5N2Nj\nZGJhMjJkYjU0ODY4IiwidHJpcE5vIjoiVDRpejE2bjl3MjEyMTYyZjMzMDFjODE0NDgwOWExMGM5\nNzJjOWJhMDkwM2ExIiwiZGVtYW5kTm8iOiJUNGl6MTZuOXcyN2MzZTcwMTRmY2QyNDQ3MDk3Y2Nk\nYmEyMmRiNTQ4NjgiLCJjaXR5Q29kZSI6IjMxMDEwMCIsInN0YXR1cyI6ODAsInN1YlN0YXR1cyI6\nMCwicHJvZHVjdElkIjoiMSIsIm9yZGVyVHlwZSI6MiwiY2FyTGV2ZWwiOiIxMTAiLCJwYXNzZW5n\nZXJJZCI6IlAyZWEyYjg3ZGYwZTU0MGM5OTBhMGY3ZjAwYWNiZTE2OCIsImRyaXZlcklkIjoiRGM1\nZTIwZDIwYTVkZTQxODViZGQ0YjBjNTEzNDE2OTEyIiwib3JkZXJUaW1lIjoiMjAyMS0xMi0xMyAw\nMzoyNjo0MyIsIm9yaWdpbkNpdHlDb2RlIjoiMzEwMTAwIiwib3JpZ2luTG9jYXRpb24iOiIxMjEu\nODAyOTk5LDMxLjE0OTQ4NyIsIm9yaWdpbkFkZHJlc3MiOiLmnLrlnLrplYfnuqzkuIDot68xMDDl\nj7ciLCJvcmlnaW5Qb2kiOiLkuIrmtbfmtabkuJzlm73pmYXmnLrlnLox5Y+36Iiq56uZ5qW8Iiwi\nb3JpZ2luUG9pSWQiOiJCMDAxNTZUSjhPIiwiZGVzdGluYXRpb25DaXR5Q29kZSI6IjMxMDEwMCIs\nImRlc3RpbmF0aW9uTG9jYXRpb24iOiIxMjEuNDc1MTY0LDMxLjIyODgxNiIsImRlc3RpbmF0aW9u\nQWRkcmVzcyI6IuS6uuawkeWkp+mBkzIwMeWPtyIsImRlc3RpbmF0aW9uUG9pIjoi5LiK5rW35Y2a\n54mp6aaGIiwiZGVzdGluYXRpb25Qb2lJZCI6IkIwMDE1MzVCQkMiLCJwYXNzZW5nZXJUYWdzIjoi\nMSwwLDAsMiwwLDAsMCwwLDAsMCwwLDAsMCwxLDAsMCwwLDEsMCwxLDAiLCJkcml2ZXJUYWdzIjoi\nMCwwLDAsMCwwLDAsMCwwLDAsMCwwLDAiLCJkZWxldGVUYWciOjAsImNoYW5uZWxDb2RlIjoic2Fp\nYyIsImVzdGltYXRlSWQiOiIyMTEyMTM4YTY3MDcwZWE5N2U0YmZlODFlZDA0M2Q4NDg0NThjNjAy\nIiwiZXhwZW5JZCI6IjIxMTIxMzczZWFhMWU4N2VmNDQzYjQ4ZWI1OGVjYjc0ZmIzYzEzIiwiZGVt\nYW5kQXBwSWQiOiJzYWljX2NhciIsInN1cHBseUFwcElkIjoic2FpY19jYXJkIiwicGFzc2VuZ2Vy\nRXN0aW1hdGVGZWUiOjI1Mzg5LCJwYXNzZW5nZXJQYXlhYmxlRmVlIjoxODAwLCJwYXNzZW5nZXJQ\nYXltZW50RmVlIjoxODAwLCJkcml2ZXJSZWNlaXZlZEZlZSI6MTI1MCwidXBkYXRlVGltZSI6IjIw\nMjEtMTItMTMgMDM6Mjc6MDMiLCJjcmVhdGVUaW1lIjoiMjAyMS0xMi0xMyAwMzoyNjo0NyIsImNh\nclVzZVRpbWUiOiIyMDIxLTEyLTEzIDA0OjMxOjQyIiwiaXNSZWYiOjAsInBheUJ5UGxhdGZvcm1G\nbGFnIjowLCJyYXRlRmxhZyI6MCwiaXNDYW5jZWwiOjAsImNhclVzZVR5cGUiOjEsImNhclVzZUR1\ncmF0aW9uIjowLCJjYXJVc2VTY2VuZSI6MCwicGFydGl0aW9uQ291bnQiOjAsInBhcnRpdGlvbklu\nZGV4IjowLCJkaXNwYXRjaE1vZGUiOjIsImVzdGltYXRlRHVyYXRpb24iOjAsImRhdGFTb3VyY2Ui\nOjAsImNhclNlcnZpY2UiOjEsInNpbXVsUmluZ0NhckxldmVsIjoiMTEwIiwidHJpcE1vZGUiOjAs\nInBheU9yZGVySWQiOiIyMDIxMTIxMzAzMjY1MzM2MDVhZTljZTJmMjQwM2E4MzAxYmNmMjEyMDQ5\nZDkxIiwib3JkZXJDcm93ZCI6MCwib3JkZXJMYXllciI6MCwic2NoZWR1bGVUeXBlIjowLCJleHRy\nYUludDEiOjAsImV4dHJhSW50MiI6MCwiZXh0cmFJbnQzIjowLCJleHRyYUludDQiOjAsImV4dHJh\nSW50NSI6MCwiZXN0aW1hdGVEaXN0YW5jZSI6MCwiZW50VHlwZSI6MCwicGF5bWVudENvbmZpcm1G\nbGFnIjowLCJwYXNzZW5nZXJTdXJ2ZXlGbGFnIjowLCJjaGFuZ2VUcmlwRmxhZyI6MCwib3JpZ2lu\nU3RhdGlvbkNvZGUiOiIwIiwiZGVzdGluYXRpb25TdGF0aW9uQ29kZSI6IjAiLCJwbGF0Zm9ybU1l\nc3NhZ2VGZWUiOjE5OSwiZGVzdGluYXRpb25UeXBlIjowLCJyZUNhbGxGbGFnIjowfSwiZHJpdmVy\nSW5mbyI6eyJvcmRlck5vIjoiVDRpejE2bjl3MjdjM2U3MDE0ZmNkMjQ0NzA5N2NjZGJhMjJkYjU0\nODY4IiwidHJpcE5vIjoiVDRpejE2bjl3MjEyMTYyZjMzMDFjODE0NDgwOWExMGM5NzJjOWJhMDkw\nM2ExIiwiZHJpdmVySWQiOiJEYzVlMjBkMjBhNWRlNDE4NWJkZDRiMGM1MTM0MTY5MTIiLCJkcml2\nZXJHcm91cHMiOiI0MiwzOTIiLCJkcml2ZXJOYW1lIjoi6YK55biI5YKFIiwiZHJpdmVyQXZhdGFy\nIjoiaHR0cHM6Ly9pb3QtdGVzdC1wZXJmb3JtYW5jZS5vc3MtY24taGFuZ3pob3UuYWxpeXVuY3Mu\nY29tL2ltYWdlcy9fYWxpXzRmYjQxNDJiMGQxZjQxNDM5MGUzZjk4ZWJhMDBiZTkzP0V4cGlyZXM9\nMzI1MDE5MjMyMDAmT1NTQWNjZXNzS2V5SWQ9TFRBSTRHMTVFSHRyUGI0YVZ1RVF4NVN1JlNpZ25h\ndHVyZT1LaGpQcGFoNDhpR2tOUGpjUzlwYlolMkZDOGlHRSUzRCIsImRyaXZlckFwcFZlcnNpb24i\nOiIzLjAuMCIsImRyaXZlclBsYXRmb3JtIjoiYW5kcm9pZCIsImRyaXZlclJhdGVTY29yZSI6NDQ5\nLCJkcml2ZXJEaXNwbGF5U2NvcmUiOjQ1MCwiZHJpdmVyVG90YWxEaXNwYXRjaE9yZGVyQ291bnQi\nOjE4MjIsInZlaGljbGVWaW4iOiJWSU40MTIyNDk2MzE4NjE2MSIsInZlaGljbGVObyI6Iua1izIx\nMDI1NTgiLCJ2ZWhpY2xlQ29sb3IiOiLnmb3oibIiLCJ2ZWhpY2xlQnJhbmQiOiLliKvlhYsiLCJ2\nZWhpY2xlTW9kZWwiOiJHTDYiLCJieXdheUFkZHJlc3MiOiLmuqfpmLPot682MTHlj7cxOTMz6ICB\n5Zy65Z2KMuWPt+alvCIsImFjdHVhbENhckxldmVsIjoiMTEwIiwidXBkYXRlVGltZSI6IjIwMjEt\nMTItMTMgMDM6Mjc6MDMiLCJjcmVhdGVUaW1lIjoiMjAyMS0xMi0xMyAwMzoyNjo0NyIsInJld2Fy\nZEFtb3VudCI6MCwicHVuaXNoQW1vdW50IjowLCJwaWNrdXBFdGEiOjAsInBpY2t1cEVkYSI6MCwi\naXNEaXNwYXRjaGVkIjowLCJpc0ZyZWVEaXNwYXRjaGVkIjowLCJkcml2ZXJNb2JpbGUiOiIxMTIz\nMTAxMDgzNyIsImRyaXZlclNldHRsZW1lbnRNb2RlIjoxLCJkcml2ZXJDb21wbGV0ZU9yZGVyQ291\nbnQiOjg4NSwibGFzdFJlbGF5RXRhIjowLCJsYXN0UmVsYXlFZGEiOjAsImRyaXZlckFwcENhcGFj\naXR5IjoiMSwxLDEiLCJkcml2ZXJUcmFuc3BvcnRUeXBlIjoiMSIsImRyaXZlckFjdGl2aXR5UmV3\nYXJkIjowLCJkcml2ZXJMZXZlbElkIjoxLCJkcml2ZXJMZXZlbE5hbWUiOiLpnZLpk5wiLCJkcml2\nZXJFc3RpbWF0ZUZlZSI6MCwiZHJpdmVyQ29tcGxldGVPcmRlclNob3dDb3VudCI6MCwiZHJpdmVy\nQ29tcGxldGVEYWlseVNob3dDb3VudCI6MCwidmVoaWNsZVNlYXROdW0iOjB9LCJwYXNzZW5nZXJJ\nbmZvIjp7Im9yZGVyTm8iOiJUNGl6MTZuOXcyN2MzZTcwMTRmY2QyNDQ3MDk3Y2NkYmEyMmRiNTQ4\nNjgiLCJ0cmlwTm8iOiJUNGl6MTZuOXcyMTIxNjJmMzMwMWM4MTQ0ODA5YTEwYzk3MmM5YmEwOTAz\nYTEiLCJwYXNzZW5nZXJOYW1lIjoiMTEzKioqKjA4MzciLCJwYXNzZW5nZXJSYXRlU2NvcmUiOjQ2\nNiwicGFzc2VuZ2VyRGlzcGxheVNjb3JlIjo1LCJwYXNzZW5nZXJUb3RhbERpc3BhdGNoT3JkZXJD\nb3VudCI6NCwicGFzc2VuZ2VyQXBwVmVyc2lvbiI6IjMuMC4wIiwicGFzc2VuZ2VyUGxhdGZvcm0i\nOiJhbmRyb2lkIiwiZW50ZXJwcmlzZUlkIjowLCJlbnRlcnByaXNlUnVsZUlkIjowLCJhdXRvU2Vy\ndmljZUNvZGUiOiIwIiwiZW50ZXJwcmlzZU5lZWRSZW1hcmsiOjAsImVudGVycHJpc2VFbXBsb3ll\nZUlkIjoiMCIsImVudGVycHJpc2VBbW91bnQiOjAsInVwZGF0ZVRpbWUiOiIyMDIxLTEyLTEzIDAz\nOjI3OjAzIiwiY3JlYXRlVGltZSI6IjIwMjEtMTItMTMgMDM6MjY6NDciLCJwYXNzZW5nZXJJZCI6\nIlAyZWEyYjg3ZGYwZTU0MGM5OTBhMGY3ZjAwYWNiZTE2OCIsInBhc3Nlbmdlck1vYmlsZSI6IjEx\nMzMxMDEwODM3IiwiZmxpZ2h0Tm8iOiJLTjU5NTUiLCJmbGlnaHREZXAiOiJQS1giLCJmbGlnaHRB\ncnIiOiJTSEEiLCJmbGlnaHREYXRlIjoiMjAyMS0xMi0xMyAwMDowMDowMCIsImV4cFRpbWUiOjMw\nLCJvcmlnaW5hbEFwcG9pbnRtZW50VGltZSI6IjIwMjEtMTItMTMgMDQ6MzE6NDIiLCJpc0Rpc3Bh\ndGNoZWQiOiIwIiwiZ3Vlc3RDb3VudCI6MSwiZW50ZXJwcmlzZURpc2NvdW50IjoiMCIsImVudGVy\ncHJpc2VDb3N0Y2VudGVyIjowLCJlbnRlcnByaXNlQ29zdGNlbnRlck5hbWUiOiIwIiwiZW50ZXJw\ncmlzZUZyb3plblJhdGUiOiIwIiwicGF5TW9kZSI6MCwicGFzc2VuZ2VyU2V0dGxlbWVudE1vZGUi\nOjAsImZvbnRQbGFjYXJkIjoiIzRmNzBiZCIsInJ1bGVTbmFwU2hvdElkIjowLCJlbnRlcnByaXNl\nRGVwdElkIjowLCJhY3R1YWxQYXlNb2RlIjoxLCJwYXNzZW5nZXJDYXRlZ29yeSI6MCwib3BlcmF0\nb3JCeSI6MiwiZWdnRGlzY291bnQiOjAsImVnZ01heEFtb3VudCI6MCwiZWdnRmxhZyI6MCwidGF4\naURpc3BhdGNoRmVlIjowLCJlbnRlcnByaXNlQ2F0ZWdvcnkiOjAsInBhc3NlbmdlckFwcENhcGFj\naXR5IjoiMSwxLDEiLCJwYXNzZW5nZXJWYWx1YXRpb25Nb2RlIjoyLCJiZWxvbmdJZCI6MCwiYmVs\nb25nVHlwZSI6MCwidGVtcFJpc2VSYXRlIjowLCJ0ZW1wUmlzZUxpbWl0IjowLCJ0ZW1wUmlzZUZl\nZSI6MCwidGVtcFJpc2VUeXBlIjowLCJkaXNwYXRjaFJpc2VGZWUiOjAsImhvbGlkYXlSaXNlRmVl\nIjowLCJwYXNzZW5nZXJHZW5kZXIiOjB9LCJjYW5jZWxJbmZvIjp7ImNhbmNlbEJ5IjowLCJjYW5j\nZWxQYXllciI6MCwiY2FuY2VsRHV0eSI6MCwib3BlcmF0b3JCeSI6MCwib3JkZXJDb25maXJtRmxh\nZyI6MCwiZHV0eVNjZW5lIjowLCJ3b3Jrc2hlZXRUeXBlIjowLCJleHRDYW5jZWxEdXR5IjowLCJl\neHRDYW5jZWxGZWUiOjAsImV4dENhbmNlbER1dHlGcmVlIjowfSwiZmVlSW5mbyI6W3sib3JkZXJO\nbyI6IlQ0aXoxNm45dzI3YzNlNzAxNGZjZDI0NDcwOTdjY2RiYTIyZGI1NDg2OCIsImJhc2VGZWUi\nOjE4MDAsImV4dHJhRmVlIjowLCJ0b3RhbEZlZSI6MTgwMCwiZml4ZWRQcmljZSI6MCwiZHVyYXRp\nb24iOjEsImR1cmF0aW9uRmVlIjowLCJkaXN0YW5jZSI6MCwiZGlzdGFuY2VGZWUiOjAsIm1pbkZl\nZSI6MTcwMCwicGFya0ZlZSI6MCwid2FpdEZlZSI6MCwiYnJpZGdlRmVlIjowLCJjbGVhckZlZSI6\nMCwibG9uZ0Rpc3RhbmNlRmVlIjowLCJsb25nRGlzdGFuY2UiOjAsImNyb3NzQ2l0eUZlZSI6MCwi\nZGlzcGF0Y2hGZWUiOjAsIm5pZ2h0RmVlIjowLCJvdmVydGltZUZlZSI6MCwib3ZlcnRpbWUiOjAs\nImNhbmNlbEZlZSI6MCwiZmVlVHlwZSI6MSwidXBkYXRlVGltZSI6IjIwMjEtMTItMTMgMDM6MjY6\nNTMiLCJjcmVhdGVUaW1lIjoiMjAyMS0xMi0xMyAwMzoyNjo1MyIsImFkanVzdEZlZSI6MCwiY291\ncG9uVHlwZSI6MCwiY291cG9uQW1vdW50IjowLCJwYXJ0aXRpb25JbmRleCI6MCwicGxhY2FyZEZl\nZSI6MCwib3RoZXJGZWUiOjAsInBsYXRmb3JtTWVzc2FnZUZlZSI6MCwicGFzc2VuZ2VyQ29uZmly\nbSI6MSwiYmFzZVRpbWUiOjEsImJhc2VNaWxlYWdlIjoxMDAwLCJleHRyYURpc3RhbmNlIjowLCJl\neHRyYUR1cmF0aW9uIjowLCJ2YWx1YXRpb25Nb2RlIjoyLCJsZXNzVGhhblN0YXJ0RmVlIjoxLCJ3\nYWl0VGltZSI6MCwiY291cG9uVmFsdWUiOjAsImNoZWFwQW1vdW50IjowLCJjaG9vc2VEcml2ZXJG\nZWUiOjAsImNvbW1pc3Npb25GZWUiOjAsImVudFNlcnZpY2VGZWUiOjAsInRlbXBSaXNlRmVlIjow\nLCJmZWVTdWJUeXBlIjowLCJwaWNrdXBFdGEiOjAsInBpY2t1cEVkYSI6MCwibG93U3BlZWREdXJh\ndGlvbiI6MCwibG93U3BlZWRGZWUiOjAsImRpc3RhbmNlQWxsb3dhbmNlRmVlIjowLCJ0YXhpRGlz\ncGF0Y2hGZWUiOjAsImZ1ZWxGZWUiOjAsInByaWNlSW5mbyI6IntcInRpbWVSYW5nZUZlZUluZm9c\nIjogW3tcIml0ZW1zXCI6IFt7XCJmZWVcIjogMTcwMCwgXCJlbmRUaW1lXCI6IFwiMTA6MzU6MDBc\nIiwgXCJiZWdpblRpbWVcIjogXCIwMDowMDowMFwiLCBcImZyZWVDb3VudFwiOiAwLCBcInRvdGFs\nQ291bnRcIjogMH1dLCBcImZlZVR5cGVcIjogXCJzdGFydEZlZVwifSwge1wiaXRlbXNcIjogW3tc\nImZlZVwiOiAwLCBcImVuZFRpbWVcIjogXCIxMTowMDowMFwiLCBcImJlZ2luVGltZVwiOiBcIjAw\nOjAwOjAwXCIsIFwiZnJlZUNvdW50XCI6IDEsIFwidG90YWxDb3VudFwiOiAxfV0sIFwiZmVlVHlw\nZVwiOiBcImR1cmF0aW9uRmVlXCJ9LCB7XCJmZWVUeXBlXCI6IFwiZGlzdGFuY2VGZWVcIn0sIHtc\nImZlZVR5cGVcIjogXCJsb25nRGlzdGFuY2VGZWVcIn1dfSIsImRpc3BhdGNoUmlzZUZlZSI6MCwi\naG9saWRheVJpc2VGZWUiOjEwMCwidGF4aUZlZSI6MH0seyJvcmRlck5vIjoiVDRpejE2bjl3Mjdj\nM2U3MDE0ZmNkMjQ0NzA5N2NjZGJhMjJkYjU0ODY4IiwiYmFzZUZlZSI6MCwiZXh0cmFGZWUiOjAs\nInRvdGFsRmVlIjoxODAwLCJmaXhlZFByaWNlIjowLCJkdXJhdGlvbiI6MCwiZHVyYXRpb25GZWUi\nOjAsImRpc3RhbmNlIjowLCJkaXN0YW5jZUZlZSI6MCwibWluRmVlIjowLCJwYXJrRmVlIjowLCJ3\nYWl0RmVlIjowLCJicmlkZ2VGZWUiOjAsImNsZWFyRmVlIjowLCJsb25nRGlzdGFuY2VGZWUiOjAs\nImxvbmdEaXN0YW5jZSI6MCwiY3Jvc3NDaXR5RmVlIjowLCJkaXNwYXRjaEZlZSI6MCwibmlnaHRG\nZWUiOjAsIm92ZXJ0aW1lRmVlIjowLCJvdmVydGltZSI6MCwiY2FuY2VsRmVlIjowLCJmZWVUeXBl\nIjoyLCJ1cGRhdGVUaW1lIjoiMjAyMS0xMi0xMyAwMzoyNzowMyIsImNyZWF0ZVRpbWUiOiIyMDIx\nLTEyLTEzIDAzOjI3OjAzIiwiYWRqdXN0RmVlIjowLCJjb3Vwb25UeXBlIjowLCJjb3Vwb25BbW91\nbnQiOjAsInBhcnRpdGlvbkluZGV4IjowLCJwbGFjYXJkRmVlIjowLCJvdGhlckZlZSI6MCwicGF5\nbWVudFJlcXVlc3RJZCI6IjIxMTIxMzhhNjcwNzBlYTk3ZTRiZmU4MWVkMDQzZDg0ODQ1OGM2MDIi\nLCJwbGF0Zm9ybU1lc3NhZ2VGZWUiOjAsInBhc3NlbmdlckNvbmZpcm0iOjAsImJhc2VUaW1lIjow\nLCJiYXNlTWlsZWFnZSI6MCwiZXh0cmFEaXN0YW5jZSI6MCwiZXh0cmFEdXJhdGlvbiI6MCwidmFs\ndWF0aW9uTW9kZSI6MSwibGVzc1RoYW5TdGFydEZlZSI6MCwid2FpdFRpbWUiOjAsImNvdXBvblZh\nbHVlIjowLCJjaGVhcEFtb3VudCI6MCwiY2hvb3NlRHJpdmVyRmVlIjowLCJjb21taXNzaW9uRmVl\nIjowLCJlbnRTZXJ2aWNlRmVlIjowLCJ0ZW1wUmlzZUZlZSI6MCwiZmVlU3ViVHlwZSI6MCwicGlj\na3VwRXRhIjowLCJwaWNrdXBFZGEiOjAsImxvd1NwZWVkRHVyYXRpb24iOjAsImxvd1NwZWVkRmVl\nIjowLCJkaXN0YW5jZUFsbG93YW5jZUZlZSI6MCwidGF4aURpc3BhdGNoRmVlIjowLCJmdWVsRmVl\nIjowLCJkaXNwYXRjaFJpc2VGZWUiOjAsImhvbGlkYXlSaXNlRmVlIjowLCJ0YXhpRmVlIjowfSx7\nIm9yZGVyTm8iOiJUNGl6MTZuOXcyN2MzZTcwMTRmY2QyNDQ3MDk3Y2NkYmEyMmRiNTQ4NjgiLCJi\nYXNlRmVlIjoxMjUwLCJleHRyYUZlZSI6MCwidG90YWxGZWUiOjEyNTAsImZpeGVkUHJpY2UiOjAs\nImR1cmF0aW9uIjoxLCJkdXJhdGlvbkZlZSI6MCwiZGlzdGFuY2UiOjAsImRpc3RhbmNlRmVlIjow\nLCJtaW5GZWUiOjEyMDAsInBhcmtGZWUiOjAsIndhaXRGZWUiOjAsImJyaWRnZUZlZSI6MCwiY2xl\nYXJGZWUiOjAsImxvbmdEaXN0YW5jZUZlZSI6MCwibG9uZ0Rpc3RhbmNlIjowLCJjcm9zc0NpdHlG\nZWUiOjAsImRpc3BhdGNoRmVlIjowLCJuaWdodEZlZSI6MCwib3ZlcnRpbWVGZWUiOjAsIm92ZXJ0\naW1lIjowLCJjYW5jZWxGZWUiOjAsImZlZVR5cGUiOjMsInVwZGF0ZVRpbWUiOiIyMDIxLTEyLTEz\nIDAzOjI2OjUzIiwiY3JlYXRlVGltZSI6IjIwMjEtMTItMTMgMDM6MjY6NTMiLCJhZGp1c3RGZWUi\nOjAsImNvdXBvblR5cGUiOjAsImNvdXBvbkFtb3VudCI6MCwicGFydGl0aW9uSW5kZXgiOjAsInBs\nYWNhcmRGZWUiOjAsIm90aGVyRmVlIjowLCJwbGF0Zm9ybU1lc3NhZ2VGZWUiOjUwLCJwYXNzZW5n\nZXJDb25maXJtIjowLCJiYXNlVGltZSI6MTAsImJhc2VNaWxlYWdlIjozMDAwLCJleHRyYURpc3Rh\nbmNlIjowLCJleHRyYUR1cmF0aW9uIjowLCJ2YWx1YXRpb25Nb2RlIjoyLCJsZXNzVGhhblN0YXJ0\nRmVlIjoxLCJ3YWl0VGltZSI6MCwiY291cG9uVmFsdWUiOjAsImNoZWFwQW1vdW50IjowLCJjaG9v\nc2VEcml2ZXJGZWUiOjAsImNvbW1pc3Npb25GZWUiOjAsImVudFNlcnZpY2VGZWUiOjAsInRlbXBS\naXNlRmVlIjowLCJmZWVTdWJUeXBlIjowLCJwaWNrdXBFdGEiOjAsInBpY2t1cEVkYSI6MCwibG93\nU3BlZWREdXJhdGlvbiI6MCwibG93U3BlZWRGZWUiOjAsImRpc3RhbmNlQWxsb3dhbmNlRmVlIjow\nLCJ0YXhpRGlzcGF0Y2hGZWUiOjAsImZ1ZWxGZWUiOjAsInByaWNlSW5mbyI6IntcInRpbWVSYW5n\nZUZlZUluZm9cIjogW3tcIml0ZW1zXCI6IFt7XCJmZWVcIjogMTIwMCwgXCJlbmRUaW1lXCI6IFwi\nMTY6MDA6MDBcIiwgXCJiZWdpblRpbWVcIjogXCIwMDowMDowMFwiLCBcImZyZWVDb3VudFwiOiAw\nLCBcInRvdGFsQ291bnRcIjogMH1dLCBcImZlZVR5cGVcIjogXCJzdGFydEZlZVwifSwge1wiaXRl\nbXNcIjogW3tcImZlZVwiOiAwLCBcImVuZFRpbWVcIjogXCIxNjowMDowMFwiLCBcImJlZ2luVGlt\nZVwiOiBcIjAwOjAwOjAwXCIsIFwiZnJlZUNvdW50XCI6IDEsIFwidG90YWxDb3VudFwiOiAxfV0s\nIFwiZmVlVHlwZVwiOiBcImR1cmF0aW9uRmVlXCJ9LCB7XCJmZWVUeXBlXCI6IFwiZGlzdGFuY2VG\nZWVcIn0sIHtcImZlZVR5cGVcIjogXCJsb25nRGlzdGFuY2VGZWVcIn1dfSIsImRpc3BhdGNoUmlz\nZUZlZSI6MCwiaG9saWRheVJpc2VGZWUiOjEwMCwidGF4aUZlZSI6MH0seyJvcmRlck5vIjoiVDRp\nejE2bjl3MjdjM2U3MDE0ZmNkMjQ0NzA5N2NjZGJhMjJkYjU0ODY4IiwiYmFzZUZlZSI6MjUzODks\nImV4dHJhRmVlIjowLCJ0b3RhbEZlZSI6MjUzODksImZpeGVkUHJpY2UiOjAsImR1cmF0aW9uIjoy\nNjc5LCJkdXJhdGlvbkZlZSI6MzcxMCwiZGlzdGFuY2UiOjQ0NjUwLCJkaXN0YW5jZUZlZSI6MTU3\nMTQsIm1pbkZlZSI6MTcwMCwicGFya0ZlZSI6MCwid2FpdEZlZSI6MCwiYnJpZGdlRmVlIjowLCJj\nbGVhckZlZSI6MCwibG9uZ0Rpc3RhbmNlRmVlIjo0MTY1LCJsb25nRGlzdGFuY2UiOjQxNjUwLCJj\ncm9zc0NpdHlGZWUiOjAsImRpc3BhdGNoRmVlIjowLCJuaWdodEZlZSI6MCwib3ZlcnRpbWVGZWUi\nOjAsIm92ZXJ0aW1lIjowLCJjYW5jZWxGZWUiOjAsImZlZVR5cGUiOjcsInVwZGF0ZVRpbWUiOiIy\nMDIxLTEyLTEzIDAzOjI2OjQ3IiwiY3JlYXRlVGltZSI6IjIwMjEtMTItMTMgMDM6MjY6NDciLCJh\nZGp1c3RGZWUiOjAsImNvdXBvblR5cGUiOjAsImNvdXBvbkFtb3VudCI6MCwicGFydGl0aW9uSW5k\nZXgiOjAsInBsYWNhcmRGZWUiOjAsIm90aGVyRmVlIjowLCJwbGF0Zm9ybU1lc3NhZ2VGZWUiOjAs\nInBhc3NlbmdlckNvbmZpcm0iOjAsImJhc2VUaW1lIjoxLCJiYXNlTWlsZWFnZSI6MTAwMCwiZXh0\ncmFEaXN0YW5jZSI6NDM2NTAsImV4dHJhRHVyYXRpb24iOjI2MTksInZhbHVhdGlvbk1vZGUiOjIs\nImxlc3NUaGFuU3RhcnRGZWUiOjIsIndhaXRUaW1lIjowLCJjb3Vwb25WYWx1ZSI6MCwiY2hlYXBB\nbW91bnQiOjAsImNob29zZURyaXZlckZlZSI6MCwiY29tbWlzc2lvbkZlZSI6MCwiZW50U2Vydmlj\nZUZlZSI6MCwidGVtcFJpc2VGZWUiOjAsImZlZVN1YlR5cGUiOjEsInBpY2t1cEV0YSI6MCwicGlj\na3VwRWRhIjowLCJlc3RpbWF0ZUlkIjoiMjExMjEzOGE2NzA3MGVhOTdlNGJmZTgxZWQwNDNkODQ4\nNDU4YzYwMiIsImxvd1NwZWVkRHVyYXRpb24iOjAsImxvd1NwZWVkRmVlIjowLCJkaXN0YW5jZUFs\nbG93YW5jZUZlZSI6MCwidGF4aURpc3BhdGNoRmVlIjowLCJmdWVsRmVlIjowLCJkaXNwYXRjaFJp\nc2VGZWUiOjAsImhvbGlkYXlSaXNlRmVlIjoxMDAsInRheGlGZWUiOjB9LHsib3JkZXJObyI6IlQ0\naXoxNm45dzI3YzNlNzAxNGZjZDI0NDcwOTdjY2RiYTIyZGI1NDg2OCIsImJhc2VGZWUiOjY5ODA2\nLCJleHRyYUZlZSI6MCwidG90YWxGZWUiOjcwMDA1LCJmaXhlZFByaWNlIjowLCJkdXJhdGlvbiI6\nMjY3OSwiZHVyYXRpb25GZWUiOjEwMzk1LCJkaXN0YW5jZSI6NDQ2NTAsImRpc3RhbmNlRmVlIjo4\nMzMwLCJtaW5GZWUiOjEyMDAsInBhcmtGZWUiOjAsIndhaXRGZWUiOjAsImJyaWRnZUZlZSI6MCwi\nY2xlYXJGZWUiOjAsImxvbmdEaXN0YW5jZUZlZSI6NDk5ODAsImxvbmdEaXN0YW5jZSI6NDE2NTAs\nImNyb3NzQ2l0eUZlZSI6MCwiZGlzcGF0Y2hGZWUiOjAsIm5pZ2h0RmVlIjowLCJvdmVydGltZUZl\nZSI6MCwib3ZlcnRpbWUiOjAsImNhbmNlbEZlZSI6MCwiZmVlVHlwZSI6OCwidXBkYXRlVGltZSI6\nIjIwMjEtMTItMTMgMDM6MjY6NDciLCJjcmVhdGVUaW1lIjoiMjAyMS0xMi0xMyAwMzoyNjo0NyIs\nImFkanVzdEZlZSI6MCwiY291cG9uVHlwZSI6MCwiY291cG9uQW1vdW50IjowLCJwYXJ0aXRpb25J\nbmRleCI6MCwicGxhY2FyZEZlZSI6MCwib3RoZXJGZWUiOjAsInBsYXRmb3JtTWVzc2FnZUZlZSI6\nMTk5LCJwYXNzZW5nZXJDb25maXJtIjowLCJiYXNlVGltZSI6MTAsImJhc2VNaWxlYWdlIjozMDAw\nLCJleHRyYURpc3RhbmNlIjo0MTY1MCwiZXh0cmFEdXJhdGlvbiI6MjA3OSwidmFsdWF0aW9uTW9k\nZSI6MiwibGVzc1RoYW5TdGFydEZlZSI6MCwid2FpdFRpbWUiOjAsImNvdXBvblZhbHVlIjowLCJj\naGVhcEFtb3VudCI6MCwiY2hvb3NlRHJpdmVyRmVlIjowLCJjb21taXNzaW9uRmVlIjowLCJlbnRT\nZXJ2aWNlRmVlIjowLCJ0ZW1wUmlzZUZlZSI6MCwiZmVlU3ViVHlwZSI6MSwicGlja3VwRXRhIjow\nLCJwaWNrdXBFZGEiOjAsImVzdGltYXRlSWQiOiIyMTEyMTM4YTY3MDcwZWE5N2U0YmZlODFlZDA0\nM2Q4NDg0NThjNjAyIiwibG93U3BlZWREdXJhdGlvbiI6MCwibG93U3BlZWRGZWUiOjAsImRpc3Rh\nbmNlQWxsb3dhbmNlRmVlIjowLCJ0YXhpRGlzcGF0Y2hGZWUiOjAsImZ1ZWxGZWUiOjAsImRpc3Bh\ndGNoUmlzZUZlZSI6MCwiaG9saWRheVJpc2VGZWUiOjEwMCwidGF4aUZlZSI6MH1dLCJwYXltZW50\nSW5mbyI6W3sib3JkZXJObyI6IlQ0aXoxNm45dzI3YzNlNzAxNGZjZDI0NDcwOTdjY2RiYTIyZGI1\nNDg2OCIsInRyaXBObyI6IlQ0aXoxNm45dzIxMjE2MmYzMzAxYzgxNDQ4MDlhMTBjOTcyYzliYTA5\nMDNhMSIsInJlcXVlc3RJZCI6IjIxMTIxMzhhNjcwNzBlYTk3ZTRiZmU4MWVkMDQzZDg0ODQ1OGM2\nMDIiLCJwYXlPcmRlck5vIjoic2l0MjAyMTEyMTMwMzI3MDAwNDkyMTI0MDY5MlRFU1QiLCJwYXlC\naXpUeXBlIjoyMCwic3RhdHVzIjoxLCJ1cGRhdGVUaW1lIjoiMjAyMS0xMi0xMyAwMzoyNzowMyIs\nImNyZWF0ZVRpbWUiOiIyMDIxLTEyLTEzIDAzOjI2OjUzIiwicGF5Q2hhbm5lbCI6IjEiLCJwYXll\nciI6MSwiYWN0dWFsSW5mbyI6IntcImFwcElkXCI6IFwic2FpY19jYXJcIiwgXCJwYXlUaW1lXCI6\nIFwiMjAyMS0xMi0xMyAwMzoyNzowMlwiLCBcImNvdXBvbklkXCI6IDAsIFwicGFpZEluZm9cIjog\nXCJbe1xcXCJhY3R1YWxBZ2dzY0NoYW5uZWxUeXBlXFxcIjpcXFwiXFxcIixcXFwiY2FwaXRhbEFt\nb3VudFxcXCI6MCxcXFwiY2hhbm5lbElkXFxcIjo0LFxcXCJjaGFubmVsT3JkZXJOb1xcXCI6XFxc\nIlQwMjExMTAzODkwNzM5XFxcIixcXFwiY2hhbm5lbFNlcU5vXFxcIjpcXFwiMjAyMTExMDMyMjAw\nMTQ0Nzc5MTQxMDc5Mjc3NlxcXCIsXFxcImVudElkXFxcIjowLFxcXCJlbnRQYXlNb2RlbFxcXCI6\nMyxcXFwiZXF1aXR5QW1vdW50XFxcIjowLFxcXCJtZXJnZUZsYWdcXFwiOjAsXFxcInBhaWRcXFwi\nOjE4MDAsXFxcInBheUNoYW5uZWxOYW1lXFxcIjpcXFwi5pSv5LuY5a6d5pSv5LuYXFxcIixcXFwi\ncGF5T3JkZXJOb1xcXCI6XFxcInNpdDIwMjExMjEzMDMyNzAwMDQ5MjEyNDA2OTJURVNUXFxcIixc\nXFwicGF5U2NlbmVcXFwiOjMsXFxcInBheVNjZW5lVGV4dFxcXCI6XFxcIumihOS7mFxcXCIsXFxc\nInBheWNoYW5uZWxcXFwiOjEsXFxcInRyYW5zYWN0aW9uTm9cXFwiOlxcXCJUZGQ3YmVkOGZlNWI5\nNGRjMWJiMTFiMWJjODQ0YzE3MmVcXFwiLFxcXCJ0cmFuc2FjdGlvblRpbWVcXFwiOlxcXCIyMDIx\nLTEyLTEzIDAzOjI3OjAyXFxcIn1dXCIsIFwiY291cG9uRm9ybVwiOiAwLCBcImNvdXBvblR5cGVc\nIjogMCwgXCJlbXBsb3llZUlkXCI6IFwiXCIsIFwiZW50cHJpc2VJZFwiOiBcIlwiLCBcIm1lcmNo\nYW50SWRcIjogXCJzYWljX2NhclwiLCBcInBheUNoYW5uZWxcIjogXCIxXCIsIFwiY2hlYXBBbW91\nbnRcIjogMCwgXCJjb21wYW55TmFtZVwiOiBcIlwiLCBcImNvdXBvblRpdGxlXCI6IFwiXCIsIFwi\nY291cG9uVmFsdWVcIjogMCwgXCJhY3R1YWxBbW91bnRcIjogMTgwMCwgXCJjb3Vwb25BbW91bnRc\nIjogMCwgXCJkcml2ZXJBbW91bnRcIjogMTI1MCwgXCJiYWxhbmNlQW1vdW50XCI6IDAsIFwicmV2\nZW51ZUFtb3VudFwiOiA1NTAsIFwicGF5Q2hhbm5lbE5hbWVcIjogXCLmlK/ku5jlrp3mlK/ku5hc\nIiwgXCJ0aGlyZFBhcnR5Q291cG9uQW1vdW50XCI6IDB9IiwicHJpY2VJbmZvIjoiXCJ7fVwiIn1d\nLCJ0YWdNSW5mbyI6eyJhaXJwbGFuZSI6MSwidXNlRWFzIjowLCJyZXBsYWNlIjowLCJmb3JlaWdu\nIjoyLCJrdW5zaGFuIjowLCJwYXlBZHZhbmNlIjowLCJyZWZ1bmQiOjAsImJlY2tvbiI6MCwicmVs\nYXkiOjAsImJ5V2F5IjowLCJyZWRpc3BhdGNoIjowLCJjcm9zc0NpdHkiOjAsImV4Y2VwdGlvbiI6\nMSwiYWxhcm1QIjowLCJhbGFybUQiOjAsInNoYXJlIjowLCJtb2RpZnlQcmljZSI6MCwicGF5QnlQ\nbGF0Zm9ybSI6MCwiZXhwaXJhdGlvblRpbWUiOiIyMDAwLTAxLTAxIDAxOjAxOjAxIn0sInRyYW5z\nSW5uZXJNSW5mbyI6eyJkaXNwYXRjaFRpbWUiOiIyMDIxLTEyLTEzIDAzOjI2OjQ4Iiwic2Vydmlj\nZVN0YXJ0VGltZSI6IjIwMjEtMTItMTMgMDM6MjY6NTIiLCJhcnJpdmVCb2FyZGluZ1RpbWUiOiIy\nMDIxLTEyLTEzIDAzOjI2OjUyIiwicGlja1VwVGltZSI6IjIwMjEtMTItMTMgMDM6MjY6NTIiLCJh\ncnJpdmVUaW1lIjoiMjAyMS0xMi0xMyAwMzoyNjo1MyIsInBheVRpbWUiOiIyMDIxLTEyLTEzIDAz\nOjI3OjAzIn0sInRyYW5zT3V0ZXJJbmZvIjpbeyJvcmRlck5vIjoiVDRpejE2bjl3MjdjM2U3MDE0\nZmNkMjQ0NzA5N2NjZGJhMjJkYjU0ODY4IiwiZXZlbnRDb2RlIjoiNjAxMCIsImV2ZW50RGV0YWls\nIjoie1wiaXNDYW5jZWxcIjowfSIsInVzZXJJZCI6IlAyZWEyYjg3ZGYwZTU0MGM5OTBhMGY3ZjAw\nYWNiZTE2OCIsInVzZXJUeXBlIjoxLCJjcmVhdGVUaW1lIjoiMjAyMS0xMi0xMyAwMzoyNjo0NyJ9\nLHsib3JkZXJObyI6IlQ0aXoxNm45dzI3YzNlNzAxNGZjZDI0NDcwOTdjY2RiYTIyZGI1NDg2OCIs\nImV2ZW50Q29kZSI6IjYwMTAiLCJldmVudERldGFpbCI6IntcImlzQ2FuY2VsXCI6MH0iLCJ1c2Vy\nSWQiOiJEYzVlMjBkMjBhNWRlNDE4NWJkZDRiMGM1MTM0MTY5MTIiLCJ1c2VyVHlwZSI6MiwiY3Jl\nYXRlVGltZSI6IjIwMjEtMTItMTMgMDM6MjY6NDcifV0sInRyYW5zSW5uZXJJbmZvIjpbeyJvcmRl\nck5vIjoiVDRpejE2bjl3MjdjM2U3MDE0ZmNkMjQ0NzA5N2NjZGJhMjJkYjU0ODY4IiwiZXZlbnRD\nb2RlIjoiYXJyaXZlQW5kU2V0dGxlbWVudCIsImV2ZW50RGV0YWlsIjoie1wibmFtZVwiOiBcIuWI\nsOi+vuW5tue7k+eul1wifSIsInVzZXJUeXBlIjowLCJ0cmFuc1RpbWUiOiIyMDIxLTEyLTEzIDAz\nOjI2OjUzIiwidXBkYXRlVGltZSI6IjIwMjEtMTItMTMgMDM6MjY6NTMiLCJjcmVhdGVUaW1lIjoi\nMjAyMS0xMi0xMyAwMzoyNjo1MyIsImZyb21TdGF0ZSI6MCwidG9TdGF0ZSI6MCwib3BlcmF0b3JU\neXBlIjoxLCJvcGVyYXRvciI6MCwiZHJpdmVyTG9jYXRpb24iOiIxMjEuNDc1MTY0LDMxLjIyODgx\nNiIsImRyaXZlclBvaUFkZHJlc3MiOiLkuIrmtbfkurrmsJHlub/lnLrkuIrmtbfljZrnianppoYi\nLCJkcml2ZXJEZXRhaWxBZGRyZXNzIjoi5LiK5rW35biC6buE5rWm5Yy65Lq65rCR5aSn6YGTMTIw\n5Y+3IiwicGFydGl0aW9uSW5kZXgiOjB9LHsib3JkZXJObyI6IlQ0aXoxNm45dzI3YzNlNzAxNGZj\nZDI0NDcwOTdjY2RiYTIyZGI1NDg2OCIsImV2ZW50Q29kZSI6ImFycml2ZUJvYXJkaW5nIiwiZXZl\nbnREZXRhaWwiOiJ7XCJhcnJpdmVCb2FyZGluZ0VkYVwiOiA0MTU0OSwgXCJhcnJpdmVCb2FyZGlu\nZ0V0YVwiOiAyMzE1fSIsInVzZXJUeXBlIjowLCJ0cmFuc1RpbWUiOiIyMDIxLTEyLTEzIDAzOjI2\nOjUyIiwidXBkYXRlVGltZSI6IjIwMjEtMTItMTMgMDM6MjY6NTIiLCJjcmVhdGVUaW1lIjoiMjAy\nMS0xMi0xMyAwMzoyNjo1MiIsImZyb21TdGF0ZSI6MCwidG9TdGF0ZSI6MCwib3BlcmF0b3JUeXBl\nIjoxLCJvcGVyYXRvciI6MCwiZHJpdmVyTG9jYXRpb24iOiIxMjEuNDc2NjEyLDMxLjIxMTI2NiIs\nImRyaXZlclBvaUFkZHJlc3MiOiLoibrov6rluIzoiJ7ouYjloZHlvaLkuK3lv4Mo5paw5aSp5Zyw\n5bqXKeS4rea1t+eOr+Wuh+iNnyIsImRyaXZlckRldGFpbEFkZHJlc3MiOiLkuIrmtbfluILpu4Tm\ntabljLrpqazlvZPot68zMeWPtyIsInBhcnRpdGlvbkluZGV4IjowfSx7Im9yZGVyTm8iOiJUNGl6\nMTZuOXcyN2MzZTcwMTRmY2QyNDQ3MDk3Y2NkYmEyMmRiNTQ4NjgiLCJldmVudENvZGUiOiJqdWRn\nZVF1aWNrVHJpcENvbXBsZXRlT3JkZXIiLCJldmVudERldGFpbCI6IntcIm5hbWVcIjogXCLlrozl\njZXliKTmlq3lv6vpgJ/ooYznqItcIiwgXCJqb2JTb3VyY2VcIjogXCJkb0Fycml2ZUFuZFNldHRs\nZW1lbnRcIn0iLCJ1c2VyVHlwZSI6MCwidHJhbnNUaW1lIjoiMjAyMS0xMi0xMyAwMzoyNzo1NCIs\nInVwZGF0ZVRpbWUiOiIyMDIxLTEyLTEzIDAzOjI3OjU0IiwiY3JlYXRlVGltZSI6IjIwMjEtMTIt\nMTMgMDM6Mjc6NTQiLCJmcm9tU3RhdGUiOjAsInRvU3RhdGUiOjAsIm9wZXJhdG9yVHlwZSI6MzAs\nIm9wZXJhdG9yIjowLCJwYXJ0aXRpb25JbmRleCI6MH0seyJvcmRlck5vIjoiVDRpejE2bjl3Mjdj\nM2U3MDE0ZmNkMjQ0NzA5N2NjZGJhMjJkYjU0ODY4IiwiZXZlbnRDb2RlIjoibWF0Y2hTdWNjZWVk\nIiwiZXZlbnREZXRhaWwiOiJ7XCJuYW1lXCI6IFwi5rS+5Y2V5oiQ5YqfXCIsIFwiZXRhQnVmZmVy\nXCI6IDAsIFwiZHJpdmVyRXh0TGF0XCI6IFwiMC4wXCIsIFwiZHJpdmVyRXh0TG9uXCI6IFwiMC4w\nXCJ9IiwidXNlclR5cGUiOjAsInRyYW5zVGltZSI6IjIwMjEtMTItMTMgMDM6MjY6NDgiLCJ1cGRh\ndGVUaW1lIjoiMjAyMS0xMi0xMyAwMzoyNjo0NyIsImNyZWF0ZVRpbWUiOiIyMDIxLTEyLTEzIDAz\nOjI2OjQ3IiwiZnJvbVN0YXRlIjowLCJ0b1N0YXRlIjowLCJvcGVyYXRvclR5cGUiOjMwLCJvcGVy\nYXRvciI6MCwicGFzc2VuZ2VyTG9jYXRpb24iOiIxMjEuODAyOTk5LDMxLjE0OTQ4NyIsImRyaXZl\nckxvY2F0aW9uIjoiMTIxLjQ3NjYxMiwzMS4yMTEyNjYiLCJwYXNzZW5nZXJQb2lBZGRyZXNzIjoi\n5LiK5rW35rWm5Lic5Zu96ZmF5py65Zy6MeWPt+iIquermealvCIsInBhc3NlbmdlckRldGFpbEFk\nZHJlc3MiOiLmnLrlnLrplYfnuqzkuIDot68xMDDlj7ciLCJkcml2ZXJQb2lBZGRyZXNzIjoi6Im6\n6L+q5biM6Iie6LmI5aGR5b2i5Lit5b+DKOaWsOWkqeWcsOW6lynkuK3mtbfnjq/lrofojZ8iLCJk\ncml2ZXJEZXRhaWxBZGRyZXNzIjoi5LiK5rW35biC6buE5rWm5Yy66ams5b2T6LevMzHlj7ciLCJw\nYXJ0aXRpb25JbmRleCI6MH0seyJvcmRlck5vIjoiVDRpejE2bjl3MjdjM2U3MDE0ZmNkMjQ0NzA5\nN2NjZGJhMjJkYjU0ODY4IiwiZXZlbnRDb2RlIjoicGF5bm90aWZ5IiwidXNlclR5cGUiOjAsInRy\nYW5zVGltZSI6IjIwMjEtMTItMTMgMDM6Mjc6MDMiLCJ1cGRhdGVUaW1lIjoiMjAyMS0xMi0xMyAw\nMzoyNzowMyIsImNyZWF0ZVRpbWUiOiIyMDIxLTEyLTEzIDAzOjI3OjAzIiwiZnJvbVN0YXRlIjow\nLCJ0b1N0YXRlIjowLCJvcGVyYXRvclR5cGUiOjAsIm9wZXJhdG9yIjowLCJwYXJ0aXRpb25JbmRl\neCI6MH0seyJvcmRlck5vIjoiVDRpejE2bjl3MjdjM2U3MDE0ZmNkMjQ0NzA5N2NjZGJhMjJkYjU0\nODY4IiwiZXZlbnRDb2RlIjoicGlja3VwUGFzc2VuZ2VyIiwiZXZlbnREZXRhaWwiOiJ7XCJuYW1l\nXCI6IFwi5o6l5Yiw5LmY5a6iXCJ9IiwidXNlclR5cGUiOjAsInRyYW5zVGltZSI6IjIwMjEtMTIt\nMTMgMDM6MjY6NTIiLCJ1cGRhdGVUaW1lIjoiMjAyMS0xMi0xMyAwMzoyNjo1MiIsImNyZWF0ZVRp\nbWUiOiIyMDIxLTEyLTEzIDAzOjI2OjUyIiwiZnJvbVN0YXRlIjowLCJ0b1N0YXRlIjowLCJvcGVy\nYXRvclR5cGUiOjAsIm9wZXJhdG9yIjowLCJkcml2ZXJMb2NhdGlvbiI6IjEyMS40NzY2MTIsMzEu\nMjExMjY2IiwiZHJpdmVyUG9pQWRkcmVzcyI6IuelneahpemVh+S4iua1t+a1puS4nOWbvemZheac\nuuWcujHlj7foiKrnq5nmpbwiLCJkcml2ZXJEZXRhaWxBZGRyZXNzIjoi5LiK5rW35biC5rWm5Lic\n5paw5Yy65ZCv6Iiq6LevOTAw5Y+3IiwicGFydGl0aW9uSW5kZXgiOjB9LHsib3JkZXJObyI6IlQ0\naXoxNm45dzI3YzNlNzAxNGZjZDI0NDcwOTdjY2RiYTIyZGI1NDg2OCIsImV2ZW50Q29kZSI6InN0\nYXJ0U2VydmljZSIsImV2ZW50RGV0YWlsIjoie1wibmFtZVwiOiBcIumihOe6puWNleWPuOacuuW8\ngOWni+acjeWKoVwifSIsInVzZXJUeXBlIjowLCJ0cmFuc1RpbWUiOiIyMDIxLTEyLTEzIDAzOjI2\nOjUyIiwidXBkYXRlVGltZSI6IjIwMjEtMTItMTMgMDM6MjY6NTIiLCJjcmVhdGVUaW1lIjoiMjAy\nMS0xMi0xMyAwMzoyNjo1MiIsImZyb21TdGF0ZSI6MCwidG9TdGF0ZSI6MCwib3BlcmF0b3JUeXBl\nIjowLCJvcGVyYXRvciI6MCwiZHJpdmVyTG9jYXRpb24iOiIxMjEuNDc2NjEyLDMxLjIxMTI2NiIs\nInBhcnRpdGlvbkluZGV4IjowfV0sImd1ZXN0SW5mbyI6W3siZ3Vlc3ROYW1lIjoiMTEzKioqKjA4\nMzciLCJndWVzdE1vYmlsZSI6IjExMzMxMDEwODM3IiwiaXNTZW5kTXNnIjowLCJpc1NlbmRFbWFp\nbCI6MCwiaXNEZWZhdWx0IjoxfV0sInJlZnVuZEFtb3VudCI6MCwidGFnSW5mbyI6W3sib3JkZXJO\nbyI6IlQ0aXoxNm45dzI3YzNlNzAxNGZjZDI0NDcwOTdjY2RiYTIyZGI1NDg2OCIsInRhZ1R5cGUi\nOjEsInRhZ1ZhbHVlIjoiVEcwMTAwMSIsInJlbWFyayI6IuWPuOacuuWIsOi+vuS4iui9pueCuei2\nhei/h+iMg+WbtOW8guW4uCzopoHmsYLojIPlm7Q6MTAx57GzIOWunumZheiMg+WbtDozMTgwMSDn\nsbMiLCJjcmVhdGVUaW1lIjoiMjAyMS0xMi0xMyAwMzoyNjo1MiIsInVwZGF0ZVRpbWUiOiIyMDIx\nLTEyLTEzIDAzOjI2OjUyIn0seyJvcmRlck5vIjoiVDRpejE2bjl3MjdjM2U3MDE0ZmNkMjQ0NzA5\nN2NjZGJhMjJkYjU0ODY4IiwidGFnVHlwZSI6OCwidGFnVmFsdWUiOiJURzA2MDAyIiwicmVtYXJr\nIjoi5Y+45py65bey6K+EIiwiY3JlYXRlVGltZSI6IjIwMjEtMTItMTMgMDM6MjY6NTUiLCJ1cGRh\ndGVUaW1lIjoiMjAyMS0xMi0xMyAwMzoyNjo1NSJ9LHsib3JkZXJObyI6IlQ0aXoxNm45dzI3YzNl\nNzAxNGZjZDI0NDcwOTdjY2RiYTIyZGI1NDg2OCIsInRhZ1R5cGUiOjE5LCJ0YWdWYWx1ZSI6IlRH\nMDEzMDAwIiwicmVtYXJrIjoi6aOO5o6nLeWujOWNleato+W4uCIsImNyZWF0ZVRpbWUiOiIyMDIx\nLTEyLTEzIDAzOjI3OjU0IiwidXBkYXRlVGltZSI6IjIwMjEtMTItMTMgMDM6Mjc6NTQifV0sImRp\nc3BhdGNoQWdhaW5UYXNrSW5mbyI6eyJvcGVyYXRvclR5cGUiOjAsIm5lZWRDb3VudCI6MCwic3Rh\ndHVzIjowLCJyZXdhcmRBbW91bnQiOjAsInB1bmlzaEFtb3VudCI6MCwicmlnaHRGbGFnIjowLCJy\naWdodFNjb3JlIjowLCJkaXNwYXRjaE1vZGUiOjB9LCJwcm9wZXJ0eUluZm8iOlt7Im9yZGVyTm8i\nOiJUNGl6MTZuOXcyN2MzZTcwMTRmY2QyNDQ3MDk3Y2NkYmEyMmRiNTQ4NjgiLCJ0cmlwTm8iOiJU\nNGl6MTZuOXcyMTIxNjJmMzMwMWM4MTQ0ODA5YTEwYzk3MmM5YmEwOTAzYTEiLCJwcm9wZXJ0eUlk\nIjoiRlVDIiwidXNlclByb3BlcnR5SWQiOiJGVUMiLCJ1c2VWYWx1ZSI6MCwic3RhdHVzIjo0LCJw\ncm9wZXJ0eVR5cGUiOjB9XSwiZHJpdmluZ1NpdHVhdGlvbkluZm8iOnsidGVtcGVyYXR1cmUiOjAs\nImRpc3RhbmNlIjowLCJkdXJhdGlvbiI6MCwiYXZnU3BlZWQiOjAsImF1dG9SYXRlIjowLCJ0cmFm\nZmljTW90b3JRdHkiOjAsInRyYWZmaWNOb25Nb3RvclF0eSI6MCwidHJhZmZpY1BlZGVzdHJpYW5R\ndHkiOjAsImFpTGV2ZWwiOjAsImFpTGV2ZWxQcm9ncmVzcyI6MH19", + "clientIp": "", + "reqBody": + "{productId=1, orderNo=T4iz16n9w27c3e7014fcd2447097ccdba22db54868}", + "resHeader": "" + }, + "type": 36, + "version": "v2" + } + data['detail']['agentId'] = self.agent_id + res = self.agent_report(data, agentId=self.agent_id) + diff --git a/test/apiserver/test_agent_register.py b/test/apiserver/test_agent_register.py new file mode 100644 index 000000000..5cfb9d37d --- /dev/null +++ b/test/apiserver/test_agent_register.py @@ -0,0 +1,33 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : test_agent_register +# @created : 星期五 12月 10, 2021 14:46:44 CST +# +# @description : +###################################################################### + + +from test.apiserver.test_agent_base import AgentTestCase +from dongtai_common.models.agent import IastAgent +import json + + +class AgentRegisterTestCase(AgentTestCase): + def setUp(self): + super().setUp() + self.test_agent_id = [] + + def test_rep_register(self): + data1 = self.raw_register(name='rep_data') + self.agent_heartbeat() + data2 = self.raw_register(name='rep_data') + assert data1.status_code == 200 and data2.status_code == 200 + self.test_agent_id += [ + json.loads(data2.content)['data']['id'], + json.loads(data1.content)['data']['id'] + ] + assert data1.content == data2.content + + def test_register(self): + assert not IastAgent.objects.filter(pk=self.agent_id, + project_version_id=0).exists() diff --git a/test/apiserver/test_agent_sca_upload.py b/test/apiserver/test_agent_sca_upload.py new file mode 100644 index 000000000..75495c245 --- /dev/null +++ b/test/apiserver/test_agent_sca_upload.py @@ -0,0 +1,49 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : test_agent_sca_upload +# @created : 星期一 12月 20, 2021 20:34:48 CST +# +# @description : +###################################################################### + + + +from test.apiserver.test_agent_base import AgentTestCase + + + +class ScaUploadTestCase(AgentTestCase): + + + def test_agent_sca_upload(self): + data = { + "detail": { + "packagePath": + "/Users/xxx/spring-boot/2.3.2.RELEASE/spring-boot-2.3.2.RELEASE.jar", + "agentId": self.agent_id, + "packageSignature": "efd5812bc736735e71447a51701becd14c2bede0", + "packageName": "spring-boot-2.3.2.RELEASE.jar", + "packageAlgorithm": "SHA-1" + }, + "type": 17 + } + res = self.agent_report(data) + assert res.status_code == 200 + def test_agent_sca_bulk_upload(self): + data = { + "detail": { + "agentId": + self.agent_id, + "packages": [{ + "packagePath": + "/Users/xxx/spring-boot/2.3.2.RELEASE/spring-boot-2.3.2.RELEASE.jar", + "packageSignature": + "efd5812bc736735e71447a51701becd14c2bede0", + "packageName": "spring-boot-2.3.2.RELEASE.jar", + "packageAlgorithm": "SHA-1" + }] + }, + "type": 18 + } + res = self.agent_report(data) + assert res.status_code == 200 diff --git a/test/apiserver/test_agent_startuptime.py b/test/apiserver/test_agent_startuptime.py new file mode 100644 index 000000000..9cd21c04b --- /dev/null +++ b/test/apiserver/test_agent_startuptime.py @@ -0,0 +1,31 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : test_agent_startuptime +# @created : 星期一 12月 20, 2021 18:48:30 CST +# +# @description : +###################################################################### + + + +from test.apiserver.test_agent_base import (AgentTestCase, gzipdata) + + +class AgentStartUptimeTestCase(AgentTestCase): + + + def test_start_up_time_compalince(self): + data = {'agentId': self.agent_id, 'startupTime': 448} + gzip_data = gzipdata(data) + response_no_gzip = self.client.post( + 'http://testserver/api/v1/agent/startuptime', + data=data, + ) + response_gzip = self.client.post( + 'http://testserver/api/v1/agent/gzipstartuptime', + data=gzip_data, + HTTP_CONTENT_ENCODING='gzip', + content_type='application/json', + ) + assert response_no_gzip.status_code == 200 + assert response_gzip.status_code == 200 diff --git a/test/apiserver/test_config.py b/test/apiserver/test_config.py new file mode 100644 index 000000000..5cbe4aeb9 --- /dev/null +++ b/test/apiserver/test_config.py @@ -0,0 +1,24 @@ +from rest_framework.test import APITestCase +from dongtai_protocol.views.agent_config import get_agent_config +from dongtai_protocol.views.agent_config import * +from dongtai_common.models.user import User + + +class VulDetailTestCase(APITestCase): + + def test_agent_config_generate(self): + print(get_agent_config(1)) + + def test_agent_detail_retrieve(self): + res = get_agent_filter_details(1) + print(res) + + def test_target_filter(self): + res = get_agent_config_by_scan(1, 2) + print(res) + + + def test_agent_config_request(self): + self.user = User.objects.filter(pk=1).first() + self.client.force_authenticate(user=self.user) + response = self.client.post('/api/v1/agent/thresholdv2') diff --git a/test/apiserver/views/__init__.py b/test/apiserver/views/__init__.py new file mode 100644 index 000000000..6ffc24cd4 --- /dev/null +++ b/test/apiserver/views/__init__.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/7/28 下午4:10 +# project: dongtai-openapi diff --git a/test/apiserver/views/agent_download.py b/test/apiserver/views/agent_download.py new file mode 100644 index 000000000..3429eb433 --- /dev/null +++ b/test/apiserver/views/agent_download.py @@ -0,0 +1,25 @@ +import unittest + +from test import DongTaiTestCase + + +class AgentDownloadTestCase(DongTaiTestCase): + def test_something(self): + self.assertEqual(True, False) + + def test_python_agent_download(self): + pass + + def test_python_agent_replace_config(self): + from dongtai_protocol.views.agent_download import PythonAgentDownload + download_handler = PythonAgentDownload() + download_handler.replace_config() + + def test_java_agent_download(self): + from dongtai_protocol.views.agent_download import JavaAgentDownload + download_handler = JavaAgentDownload() + download_handler.download_agent() + + +if __name__ == '__main__': + unittest.main() diff --git a/test/core/__init__.py b/test/core/__init__.py new file mode 100644 index 000000000..f9daf13af --- /dev/null +++ b/test/core/__init__.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/7/2 上午10:45 +# project: dongtai-engine diff --git a/test/core/plugins/__init__.py b/test/core/plugins/__init__.py new file mode 100644 index 000000000..5eaf86d76 --- /dev/null +++ b/test/core/plugins/__init__.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/10/22 下午2:56 +# project: DongTai-engine diff --git a/test/core/plugins/strategy_headers.py b/test/core/plugins/strategy_headers.py new file mode 100644 index 000000000..dd22fbf1f --- /dev/null +++ b/test/core/plugins/strategy_headers.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/10/22 下午2:57 +# project: DongTai-engine + +import unittest + +from test import DongTaiTestCase + + +class MyTestCase(DongTaiTestCase): + + def test_check_response_header(self): + from dongtai_engine.plugins.strategy_headers import check_response_header + from dongtai_common.models.agent_method_pool import MethodPool + check_response_header(MethodPool.objects.first()) + + def test_check_strict_transport_security(self): + value = 'max-age=31536000; includeSubDomains' + import re + result = re.match('max-age=(\\d+);.*?', value) + if result: + print(result.group(1)) + + +if __name__ == '__main__': + unittest.main() diff --git a/test/core/plugins/strategy_sentive.py b/test/core/plugins/strategy_sentive.py new file mode 100644 index 000000000..4fcf6df54 --- /dev/null +++ b/test/core/plugins/strategy_sentive.py @@ -0,0 +1,40 @@ +import unittest + +from test import DongTaiTestCase + + +class MyTestCase(DongTaiTestCase): + def test_something(self): + self.assertEqual(True, True) + + def test_search_id_card_leak(self): + id = 45789 + from dongtai_common.models.agent_method_pool import MethodPool + method_pool = MethodPool.objects.get(id=id) + if method_pool: + from dongtai_engine.plugins.strategy_sensitive import search_id_card_leak + search_id_card_leak(method_pool) + + def test_check_id_card(self): + right_numbers = ('510103196502083435', '510103196608150034',) + wrong_numbers = ('510103296502083435',) + from dongtai_engine.plugins.strategy_sensitive import check_id_card + for right_number in right_numbers: + print('current number is: ' + right_number) + self.assertEqual(True, check_id_card(right_number)) + + for wrong_number in wrong_numbers: + print('current number is: ' + wrong_number) + self.assertEqual(False, check_id_card(wrong_number)) + + def test_check_response_content(self): + id = 45789 + from dongtai_common.models.agent_method_pool import MethodPool + method_pool = MethodPool.objects.get(id=id) + if method_pool: + from dongtai_engine.plugins.strategy_sensitive import check_response_content + check_response_content(method_pool) + + +if __name__ == '__main__': + unittest.main() diff --git a/test/core/tasks.py b/test/core/tasks.py new file mode 100644 index 000000000..d46421a55 --- /dev/null +++ b/test/core/tasks.py @@ -0,0 +1,109 @@ +import unittest + +from test import DongTaiTestCase + + +class MyTestCase(DongTaiTestCase): + def test_something(self): + self.assertEqual(True, False) + + def test_vul_recheck(self): + from dongtai_engine.tasks import vul_recheck + vul_recheck() + + def test_report(self): + from dongtai_engine.tasks import export_report + export_report() + + def test_search_vul_from_replay_method_pool(self): + from dongtai_engine.tasks import search_vul_from_replay_method_pool + method_id = 110 + search_vul_from_replay_method_pool(method_id) + + def test_search_vul_from_method_pool(self): + method_pool_id = 657160 + + from dongtai_engine.tasks import search_vul_from_method_pool + search_vul_from_method_pool(method_pool_id) + + def test_update_agent_status(self): + from dongtai_engine.tasks import update_agent_status + update_agent_status() + + def test_verify_agent_status(self): + from dongtai_common.models.agent import IastAgent + from dongtai_engine.tasks import is_alive + import time + + timestamp = int(time.time()) + stopped_agents = IastAgent.objects.values("id").filter(is_running=0) + is_running_agents = list() + for agent in stopped_agents: + agent_id = agent['id'] + if is_alive(agent_id=agent_id, timestamp=timestamp): + is_running_agents.append(agent_id) + else: + continue + if is_running_agents: + IastAgent.objects.filter(id__in=is_running_agents).update(is_running=1, is_core_running=1) + + def test_update_sca(self): + from dongtai_engine.tasks import update_one_sca + update_one_sca(2379, "/Users/xxx/spring-boot/2.3.2.RELEASE/org.springframework:spring-beans.jar", "a4bb5ffad5564e4a0e25955e3a40b1c6158385b2", "org.springframework:spring-beans.jar", "SHA-1") + + def test_http_header(self): + from dongtai_common.models.agent import IastAgent + agents = IastAgent.objects.filter(bind_project_id=1252).values('id') + from dongtai_common.models.agent_method_pool import MethodPool + method_pools = MethodPool.objects.filter(agent_id__in=agents).values('req_header_fs') + + from http.server import BaseHTTPRequestHandler + + class HttpRequest(BaseHTTPRequestHandler): + def __init__(self, raw_request): + self.body = None + self.uri = None + self.params = None + from io import BytesIO + self.rfile = BytesIO(raw_request.encode()) + self.raw_requestline = self.rfile.readline() + self.error_code = self.error_message = None + self.parse_request() + self.parse_path() + self.parse_body() + self._cookie_keys = set() + + @property + def cookie_keys(self): + return self._cookie_keys + + def init_cookie_keys(self): + cookies = self.headers.get('cookies').split(';') + for cookie in cookies: + self._cookie_keys.add(cookie.strip().split('=')[0]) + + def parse_body(self): + if self.body is None: + self.body = self.rfile.read().decode('utf-8') + return self.body + + def parse_path(self): + items = self.path.split('?') + self.uri = items[0] + self.params = '?'.join(items[1:]) + + project_headers = set() + project_cookies = set() + for method_pool in method_pools: + try: + request = HttpRequest(method_pool['req_header_fs']) + project_headers = project_headers | set(request.headers.keys()) + # project_cookies = project_cookies | request.cookie_keys + except BaseException: + pass + print(project_headers) + print(project_cookies) + + +if __name__ == '__main__': + unittest.main() diff --git a/test/engine_pool.py b/test/engine_pool.py new file mode 100644 index 000000000..794fd3905 --- /dev/null +++ b/test/engine_pool.py @@ -0,0 +1,10 @@ +import requests + +index = 1 +while index<2800: + res = requests.get("http://127.0.0.1:8000/api/engine/run?method_pool_id={}".format(str(index))) + + print(res.status_code) + print(res.content) + print(index) + index = index + 1 diff --git a/test/iast/__init__.py b/test/iast/__init__.py new file mode 100644 index 000000000..4107c2413 --- /dev/null +++ b/test/iast/__init__.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/7/13 下午10:21 +# project: dongtai-engine diff --git a/test/iast/seriliazer/__init__.py b/test/iast/seriliazer/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/test/iast/seriliazer/test_vul_details.py b/test/iast/seriliazer/test_vul_details.py new file mode 100644 index 000000000..070d29fe2 --- /dev/null +++ b/test/iast/seriliazer/test_vul_details.py @@ -0,0 +1,37 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : test_vul_details +# @created : 星期四 12月 09, 2021 10:23:10 CST +# +# @description : +###################################################################### + + + +from django.test import TestCase +from django.urls import include, path, reverse +from dongtai_common.models.user import User +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.models.hook_type import HookType +import time +from ddt import ddt, data, unpack +from dongtai_web.serializers.vul import VulSerializer + +TEST_DATA = ('', None, 'Django', 'Apache Tomcat/9.0.37', 'Tomcat/8.x', + 'php-fpm', 'WebLogic') + +@ddt +class VulTestCase(TestCase): + def setUp(self): + pass + + @data(*TEST_DATA) + def test_vul(self, value): + try: + res = VulSerializer.split_container_name(value) + except Exception as e: + self.fail("raised Exception:{}".format(e)) + assert isinstance(res, str) + + def tearDown(self): + pass diff --git a/test/iast/views/__init__.py b/test/iast/views/__init__.py new file mode 100644 index 000000000..4107c2413 --- /dev/null +++ b/test/iast/views/__init__.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/7/13 下午10:21 +# project: dongtai-engine diff --git a/test/iast/views/test_scan_strategy.py b/test/iast/views/test_scan_strategy.py new file mode 100644 index 000000000..3bb598414 --- /dev/null +++ b/test/iast/views/test_scan_strategy.py @@ -0,0 +1,24 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : scan_strategy +# @created : 星期四 12月 02, 2021 19:57:44 CST +# +# @description : +###################################################################### + + + +from rest_framework.test import APITestCase +from django.urls import include, path, reverse +from dongtai_web.views.scan_strategys import ScanStrategyViewSet +from dongtai_common.models.user import User + + +class ScanStrategyTestCase(APITestCase): + def setUp(self): + pass + def test_create(self): + self.client.force_authenticate(user=User.objects.filter(pk=1).first()) + response = self.client.get('/api/v1/scan_strategy') + print(response.content) + assert response.status_code == 200 diff --git a/test/iast/views/test_sensitive_info_rule.py b/test/iast/views/test_sensitive_info_rule.py new file mode 100644 index 000000000..9a2e11c0b --- /dev/null +++ b/test/iast/views/test_sensitive_info_rule.py @@ -0,0 +1,26 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : test_sensitive_info_rule +# @created : 星期一 12月 13, 2021 19:50:46 CST +# +# @description : +###################################################################### + + +from rest_framework.test import APITestCase +from dongtai_common.models.user import User + +class SensitiveInfoRuleTestCase(APITestCase): + def setUp(self): + self.user = User.objects.filter(pk=1).first() + self.client.force_authenticate(user=self.user) + + def test_sensitive_info_rule_create(self): + response = self.client.post('/api/v1/sensitive_info_rule', + data={ + 'pattern': '0', + 'pattern_type_id': 1, + 'status': 0, + 'strategy_id': 2147483648 + }) + assert response.status_code == 200 diff --git a/test/iast/views/test_vul_details.py b/test/iast/views/test_vul_details.py new file mode 100644 index 000000000..508b3b5bb --- /dev/null +++ b/test/iast/views/test_vul_details.py @@ -0,0 +1,36 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : test_vul_details +# @created : 星期三 12月 08, 2021 15:44:10 CST +# +# @description : +###################################################################### + + +from rest_framework.test import APITestCase +from dongtai_common.models.server import IastServer +from dongtai_common.models.user import User +from dongtai_web.views.vul_details import VulDetail + + +class VulDetailTestCase(APITestCase): + def login(self): + self.user = User.objects.filter(pk=1).first() + self.client.force_authenticate(user=self.user) + + def setUp(self): + self.login() + self.mockdata() + + def mockdata(self): + self.server = IastServer.objects.create( + hostname='DESKTOP-JLVFSOV-test', + ip='0.0.0.0', + port=22, + container=None) + + + def test_get_server(self): + obj = VulDetail() + obj.server = self.server + assert obj.get_server() diff --git a/test/iast/views/test_vul_summary.py b/test/iast/views/test_vul_summary.py new file mode 100644 index 000000000..8d19dd35c --- /dev/null +++ b/test/iast/views/test_vul_summary.py @@ -0,0 +1,71 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : test_vul_summary +# @created : 星期三 12月 08, 2021 14:43:47 CST +# +# @description : +###################################################################### + + +from rest_framework.test import APITestCase +from django.urls import include, path, reverse +from dongtai_common.models.user import User +from dongtai_common.models.agent import IastAgent +from dongtai_common.models.vulnerablity import IastVulnerabilityModel +from dongtai_common.models.hook_type import HookType +import time +import unittest + +@unittest.skip("waiting for rebuild mock data") +class ScanStrategyTestCase(APITestCase): + def setUp(self): + self.user = User.objects.filter(pk=1).first() + self.client.force_authenticate(user=self.user) + agent = IastAgent.objects.create(token='testtoken', + version='121231', + latest_time=int(time.time()), + user=self.user, + is_running=1, + bind_project_id=-1, + project_name='test', + control=0, + is_control=0, + is_core_running=1, + online=1, + project_version_id=1, + language='NGUAGE', + is_audit=1) + vuln = IastVulnerabilityModel.objects.create( + level_id=1, + url='', + uri='', + http_method='', + http_scheme='', + http_protocol='', + req_header='', + req_params='', + req_data='', + res_header='', + res_body='', + full_stack='', + top_stack='', + bottom_stack='', + taint_value='', + taint_position='', + agent=agent, + context_path='', + counts=1, + first_time=int(time.time()), + latest_time=int(time.time()), + client_ip='0', + param_name='', + method_pool_id=1, + strategy_id=-1, + hook_type_id=1, + status_id=1) + self.mockdata = [agent, vuln] + + def test_create(self): + response = self.client.get('/api/v1/vuln/summary') + assert response.status_code == 200 + diff --git a/test/iast/views/vul_request_replay_test.py b/test/iast/views/vul_request_replay_test.py new file mode 100644 index 000000000..c1657550c --- /dev/null +++ b/test/iast/views/vul_request_replay_test.py @@ -0,0 +1,68 @@ +import unittest + +from test import DongTaiTestCase + + +class MyTestCase(DongTaiTestCase): + def test_something(self): + self.assertEqual(True, False) + + def test_HttpRequest(self): + from dongtai_web.views.vul_request_replay import HttpRequest + raw_request = 'POST /system/role/list HTTP/1.1\n' \ + 'host:localhost\n' \ + 'connection:keep-alive\n' \ + 'content-length:125\n' \ + 'sec-ch-ua:\" Not;A Brand\";v=\"99\", \"Google Chrome\";v=\"91\", \"Chromium\";v=\"91\"\n' \ + 'accept:application/json, text/javascript, */*; q=0.01\n' \ + 'x-requested-with:XMLHttpRequest\n' \ + 'sec-ch-ua-mobile:?0\n' \ + 'user-agent:Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 ' \ + '(KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36\n' \ + 'content-type:application/x-www-form-urlencoded\n' \ + 'origin:http://localhost\n' \ + 'sec-fetch-site:same-origin\n' \ + 'sec-fetch-mode:cors\n' \ + 'sec-fetch-dest:empty\n' \ + 'referer:http://localhost/system/role\n' \ + 'accept-encoding:gzip, deflate, br\n' \ + 'accept-language:zh-CN,zh;q=0.9\n' \ + 'cookie:JSESSIONID=ec213e6e-ff23-42f6-b8f9-079eeb00d1a8\n\n' \ + 'pageSize=10&pageNum=1&orderByColumn=roleSort&isAsc=asc&roleName=&roleKey=&status=&' \ + 'params[beginTime]=¶ms[endTime]=' + request = HttpRequest(raw_request) + print(request) + + def test_split_request(self): + raw_request = 'POST /system/role/list?name=123 HTTP/1.1\n' \ + 'host:127.0.0.1:8002\n' \ + 'connection:keep-alive\n' \ + 'content-length:125\n' \ + 'sec-ch-ua:\" Not;A Brand\";v=\"99\", \"Google Chrome\";v=\"91\", \"Chromium\";v=\"91\"\n' \ + 'accept:application/json, text/javascript, */*; q=0.01\n' \ + 'x-requested-with:XMLHttpRequest\n' \ + 'sec-ch-ua-mobile:?0\n' \ + 'user-agent:Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 ' \ + '(KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36\n' \ + 'content-type:application/x-www-form-urlencoded\n' \ + 'origin:http://localhost\n' \ + 'sec-fetch-site:same-origin\n' \ + 'sec-fetch-mode:cors\n' \ + 'sec-fetch-dest:empty\n' \ + 'referer:http://localhost/system/role\naccept-encoding:gzip, deflate, br\n' \ + 'accept-language:zh-CN,zh;q=0.9\n' \ + 'cookie:JSESSIONID=ec213e6e-ff23-42f6-b8f9-079eeb00d1a8\n\n' \ + 'pageSize=10&pageNum=1&orderByColumn=roleSort&isAsc=asc&roleName=&roleKey=&' \ + 'status=¶ms[beginTime]=¶ms[endTime]=' + from dongtai_web.views.vul_request_replay import HttpRequest + request = HttpRequest(raw_request) + print(request.command) + print(request.uri) + print(request.request_version) + print(request.headers.as_string().strip()) + print(request.params) + print(request.body) + + +if __name__ == '__main__': + unittest.main() diff --git a/test/init.sql b/test/init.sql new file mode 100644 index 000000000..94fd61299 --- /dev/null +++ b/test/init.sql @@ -0,0 +1,4 @@ +INSERT INTO authtoken_token +(`key`, created, user_id) +VALUES('67aebd78e700ad36a82a152276196b5f49fafeb0', '2021-07-21 12:10:46.220431000', 1); + diff --git a/test/schemathesishooks.py b/test/schemathesishooks.py new file mode 100644 index 000000000..0c4a28dde --- /dev/null +++ b/test/schemathesishooks.py @@ -0,0 +1,11 @@ + + +import time +import schemathesis + + +@schemathesis.hooks.register +def add_case(context, case, response): + time.sleep(0.1) + return None + diff --git a/test/signals/__init__.py b/test/signals/__init__.py new file mode 100644 index 000000000..f5c6bd0e2 --- /dev/null +++ b/test/signals/__init__.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/5/6 上午11:31 +# project: dongtai-engine diff --git a/test/signals/handlers/__init__.py b/test/signals/handlers/__init__.py new file mode 100644 index 000000000..f5c6bd0e2 --- /dev/null +++ b/test/signals/handlers/__init__.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/5/6 上午11:31 +# project: dongtai-engine diff --git a/test/signals/handlers/vul_handler.py b/test/signals/handlers/vul_handler.py new file mode 100644 index 000000000..291bb6334 --- /dev/null +++ b/test/signals/handlers/vul_handler.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/5/6 上午11:35 +# project: dongtai-engine +import unittest + +from test import DongTaiTestCase + + +class VulHandlerTest(DongTaiTestCase): + def test_send_vul_notify(self): + from dongtai_common.models.vulnerablity import IastVulnerabilityModel + vul = IastVulnerabilityModel.objects.filter(id=2208).first() + from dongtai_engine.signals.handlers import send_vul_notify + send_vul_notify(vul) + + def test_create_notify_config(self): + web_hook_config = { + 'url': 'https://open.feishu.cn/open-apis/bot/v2/hook/af91727f-7287-427e-8206-78f4e65d1fe5', + 'template': 'url:{{url}}\n漏洞类型:{{vul_type}}\n账号:{{username}}\n项目:{{project}}' + } + import json + from dongtai_common.models.notify_config import IastNotifyConfig + notify_config = IastNotifyConfig.objects.create( + notify_type=IastNotifyConfig.WEB_HOOK, + notify_metadata=json.dumps(web_hook_config), + user_id=18 + ) + + + +if __name__ == '__main__': + unittest.main() diff --git a/test/task.py b/test/task.py new file mode 100644 index 000000000..933232f1d --- /dev/null +++ b/test/task.py @@ -0,0 +1,69 @@ +import unittest + +from dongtai_engine.tasks import heartbeat, search_vul_from_method_pool +from test import DongTaiTestCase + + +class MyTestCase(DongTaiTestCase): + + def test_celery_beat(self): + from django_celery_beat.models import PeriodicTask, IntervalSchedule + schedule, created = IntervalSchedule.objects.get_or_create(every=1, period=IntervalSchedule.HOURS, ) + + import json + heartbeat_task = PeriodicTask.objects.filter(name='engine.heartbeat').first() + if heartbeat_task: + heartbeat_task.task = 'dongtai_engine.tasks.heartbeat' + heartbeat_task.interval = schedule + heartbeat_task.save(update_fields=['task', 'interval']) + else: + PeriodicTask.objects.create( + interval=schedule, + name='engine.heartbeat', + task='dongtai_engine.tasks.heartbeat', + args=json.dumps([]), + ) + + schedule, created = IntervalSchedule.objects.get_or_create(every=5, period=IntervalSchedule.MINUTES, ) + update_agent_task = PeriodicTask.objects.filter(name='engine.update_agent_status').first() + if update_agent_task: + update_agent_task.task = 'dongtai_engine.tasks.update_agent_status' + update_agent_task.interval = schedule + update_agent_task.save() + else: + PeriodicTask.objects.create( + interval=schedule, + name='engine.update_agent_status', + task='dongtai_engine.tasks.update_agent_status', + args=json.dumps([]), + ) + + schedule, created = IntervalSchedule.objects.get_or_create(every=1, period=IntervalSchedule.DAYS, ) + update_sca_task = PeriodicTask.objects.filter(name='engine.update_sca').first() + if update_sca_task: + update_sca_task.task = 'dongtai_engine.tasks.update_sca' + update_sca_task.interval = schedule + update_sca_task.save() + else: + PeriodicTask.objects.create( + interval=schedule, + name='engine.update_sca', + task='dongtai_engine.tasks.update_sca', + args=json.dumps([]), + ) + + def test_agent_status_update(self): + from dongtai_engine.tasks import update_agent_status + update_agent_status() + + def test_heart_beat(self): + import os + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dongtai_conf.settings") + os.environ.setdefault("debug", "true") + import django + django.setup() + heartbeat() + + +if __name__ == '__main__': + unittest.main() diff --git a/test/testcase.py b/test/testcase.py new file mode 100644 index 000000000..71abc507d --- /dev/null +++ b/test/testcase.py @@ -0,0 +1,93 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : tesecase +# @created : Monday Aug 09, 2021 16:50:19 CST +# +# @description : +###################################################################### + +import django +from rest_framework.test import APITestCase +from dongtai_web.views.documents import DocumentsEndpoint +from django.urls import reverse +from urllib.parse import urlencode +from rest_framework.serializers import SerializerMetaclass +from rest_framework.serializers import CharField, IntegerField +from django.contrib.auth import get_user_model +from ddt import ddt, data, file_data, unpack, idata +from itertools import product + + +def fuzz_test_data(end_point, httpmethod): + method = getattr(end_point, httpmethod) + queryfield = [] + data_tuple = [] + for query in method.querys: + if isinstance(query, SerializerMetaclass): + fields = query().get_fields() + for k, v in fields.items(): + queryfield.append(k) + if isinstance(v, IntegerField): + data_tuple.append([0, -1, '', '1', '-1', 0, 'ale']) + elif isinstance(v, CharField): + data_tuple.append([0, -1, '', '1', '-1', 0, 'ale']) + elif isinstance(query, dict): + queryfield.append(query['name']) + if query['type'] == int: + data_tuple.append([0, -1, '', '1', '-1', 0, 'alw']) + li = list(product(*data_tuple)) + return li + + +#@ddt +#class DocumentsEndpointTests(APITestCase): +# def setUp(self): +# self.url = '/api/v1/documents' +# self.view = DocumentsEndpoint +# self.method = list( +# filter(lambda x: x in ['get', 'post'], dir(DocumentsEndpoint))) +# self.httpmethod = getattr(DocumentsEndpoint, self.method[0]) +# user_model = get_user_model() +# self.client.force_login(user_model.objects.first()) +# +# def test_documents_retrive(self): +# response = self.client.get(self.url, {'language': 'python'}) +# self.assertEqual(response.status_code, 200) +# +# @data('python', 'java') +# def test_documents_retrive(self, value): +# response = self.client.get(self.url, {'language': value}) +# self.assertEqual(response.status_code, 200) +# +# @idata([{'language': 'python'}, {'language': 'java'}]) +# def test_documents_retrive2(self, value): +# response = self.client.get(self.url, value) +# self.assertEqual(response.status_code, 200) +# +# def test_edge_case(self): +# data = {} +# for i in self.httpmethod.querys: +# if isinstance(i, SerializerMetaclass): +# fields = i().get_fields() +# for k, v in fields.items(): +# if isinstance(v, IntegerField): +# data[k] = 0 +# if isinstance(v, CharField): +# data[k] = '' +# getattr(self.client, self.method[0])(self.url, data) +# +# def test_documents_retrive22(self): +# response = self.client.get(self.url, {'language': 'python'}) +# self.assertEqual(response.status_code, 200) +#from django.urls import resolve +# +#class DocumentsEndpointTests(APITestCase): +# def setUp(self): +# self.urls = ['/api/v1/documents', '/api/v1/api_route/search'] +# url = '/api/v1/documents' +# view = resolve(url).func.view_class +# self.method = list( +# filter(lambda x: x in ['get', 'post'], dir(view))) +# self.httpmethod = getattr(DocumentsEndpoint, self.method[0]) +# user_model = get_user_model() +# self.client.force_login(user_model.objects.first()) diff --git a/test/utils.py b/test/utils.py new file mode 100644 index 000000000..e46462272 --- /dev/null +++ b/test/utils.py @@ -0,0 +1,60 @@ +###################################################################### +# @author : bidaya0 (bidaya0@$HOSTNAME) +# @file : utils +# @created : Monday Sep 27, 2021 18:03:20 CST +# +# @description : +###################################################################### + + +from random import choice +from rest_framework.serializers import ChoiceField +from faker import Faker +from rest_framework.serializers import SerializerMetaclass +from rest_framework.serializers import CharField, IntegerField +from rest_framework import serializers +from django.utils.translation import gettext_lazy as _ +fake = Faker() +def _datagen_serializer(ser): + fields = ser.get_fields() + data = {} + for field in fields: + data[field] = data_gen_route(fields[field]) + return data + + +def data_gen_route(obj): + if isinstance(obj, IntegerField): + return _datagen_int(obj) + elif isinstance(obj, CharField): + return _datagen_char(obj) + elif isinstance(obj, ChoiceField): + return _datagen_choice(obj) + elif isinstance(obj, serializers.Serializer): + return _datagen_serializer(obj) + print(type(obj)) + + +def _datagen_int(field: IntegerField) -> int: + max_value = 255 + min_value = -255 + if hasattr(field, 'max_value') and field.max_value is not None: + max_value = field.max_value + if hasattr(field, 'min_value') and field.min_value is not None: + min_value = field.min_value + return fake.pyint(min(max_value, min_value), max(max_value, min_value)) + + +def _datagen_char(field: CharField) -> str: + max_length = 255 + min_length = 0 + if hasattr(field, 'max_length') and field.max_length is not None: + max_length = field.max_length + if hasattr(field, 'min_length') and field.min_length is not None: + min_length = field.min_length + return fake.pystr(min(max_length, min_length), max(max_length, min_length)) + + +def _datagen_choice(field: ChoiceField): + return choice(list(field.choices.values())) + diff --git a/test/vuln/__init__.py b/test/vuln/__init__.py new file mode 100644 index 000000000..027184f10 --- /dev/null +++ b/test/vuln/__init__.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- +# author: owefsad@huoxian.cn +# datetime: 2021/8/25 下午4:20 +# project: dongtai-engine diff --git a/test/vuln/health.py b/test/vuln/health.py new file mode 100644 index 000000000..bcbab4b63 --- /dev/null +++ b/test/vuln/health.py @@ -0,0 +1,53 @@ +import unittest + +from test import DongTaiTestCase + + +class MyTestCase(DongTaiTestCase): + def test_something(self): + self.assertEqual(True, False) + + def test_mock(self): + import redis + mock_data = { + "dongtai_engine": { + "status": 1 + }, + "engine_monitoring_indicators": [ + { + "key": "dongtai-replay-vul-scan", + "value": 11, + "name": "dongtai-replay-vul-scan" + }, + { + "key": "dongtai_method_pool_scan", + "value": 11, + "name": "dongtai-method-pool-scan" + }, + ], + } + # 读取数据库中的redis键,然后查找队列大小 + from dongtai_common.models.engine_monitoring_indicators import IastEnginMonitoringIndicators + monitor_models = IastEnginMonitoringIndicators.objects.all() + if monitor_models.values('id').count() > 0: + from dongtai_conf import settings + redis_cli = redis.StrictRedis( + host=settings.config.get("redis", 'host'), + password=settings.config.get("redis", 'password'), + port=settings.config.get("redis", 'port'), + db=settings.config.get("redis", 'db'), + ) + + monitor_models = monitor_models.values('key', 'name') + mock_data['engine_monitoring_indicators'] = list() + for monitor_model in monitor_models: + mock_data['engine_monitoring_indicators'].append({ + 'key': monitor_model['key'], + 'name': monitor_model['name'], + 'value': redis_cli.llen(monitor_model['key']) + }) + print(mock_data) + + +if __name__ == '__main__': + unittest.main()