diff --git a/.dockerignore b/.dockerignore index 9dd136ea8..bfaf8419a 100644 --- a/.dockerignore +++ b/.dockerignore @@ -4,15 +4,17 @@ .mypy_cache .scannerwork .vscode -.src/flake8 .pre-commit-config.yaml .gitleaksignore .coveragerc -src/mypy.ini +.semgrepignore +src/backend/.mypy.ini +src/backend/requirements-dev.txt +src/backend/tests/* *.md -src/reports/ -src/wordlists/ -src/logs/ +LICENSE.txt +reports/ +wordlists/ +logs/ src/frontend/node_modules/* -src/backend/testing/* .DS_Store \ No newline at end of file diff --git a/.github/workflows/code-style-backend.yml b/.github/workflows/code-style-backend.yml deleted file mode 100644 index 23f3fad55..000000000 --- a/.github/workflows/code-style-backend.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: Backend style -on: - workflow_dispatch: - pull_request: - paths: - - 'src/backend/**' - -jobs: - flake8: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - - name: Flake8 check - uses: valentijnscholten/flake8-your-pr@master - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - mypy: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - - uses: actions/setup-python@v4 - with: - python-version: '3.8' - - - name: Install Python dependencies - run: | - python -m pip install -U pip - python -m pip install -r src/backend/requirements.txt - - - name: Install MyPy - run: python3 -m pip install mypy==0.931 - - - name: MyPy check - run: mypy --namespace-packages --package rekono --install-types --non-interactive diff --git a/.github/workflows/code-style-frontend.yml b/.github/workflows/code-style-frontend.yml deleted file mode 100644 index f7d94d5ce..000000000 --- a/.github/workflows/code-style-frontend.yml +++ /dev/null @@ -1,24 +0,0 @@ -name: Frontend style -on: - workflow_dispatch: - pull_request: - paths: - - 'src/frontend/**' - -jobs: - eslint: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - - name: Install ESLint - working-directory: src/frontend - run: | - npm install . - npm install -g eslint - - - name: ESLint check - run: eslint src/frontend/ --ext .js,.jsx,.ts,.tsx diff --git a/.github/workflows/code-style.yml b/.github/workflows/code-style.yml new file mode 100644 index 000000000..e9f318f58 --- /dev/null +++ b/.github/workflows/code-style.yml @@ -0,0 +1,76 @@ +name: Code style +on: + workflow_dispatch: + pull_request: + +jobs: + backend: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + include: + - tool: black + arguments: --check src/backend/ + working_directory: . + - tool: isort + arguments: src/backend/ --check-only + working_directory: . + - tool: mypy + arguments: --namespace-packages --package backend --install-types --non-interactive + working_directory: ./src + - tool: flake8 + arguments: --ignore=E501 src/backend + working_directory: . + name: ${{ matrix.tool }} + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - uses: actions/setup-python@v4 + with: + python-version: '3.11' + + - name: Install Python dependencies + run: | + python -m pip install -U pip + python -m pip install -r src/backend/requirements-dev.txt + + - uses: dorny/paths-filter@3c49e64ca26115121162fb767bc6af9e8d059f1a + id: changes + with: + filters: | + backend: + - 'src/backend/**' + + - name: Check + working-directory: ${{ matrix.working_directory }} + if: ${{ steps.changes.outputs.backend == 'true' || github.event_name != 'pull_request' }} + run: ${{ matrix.tool }} ${{ matrix.arguments }} + + frontend: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Install ESLint + working-directory: src/frontend + run: | + npm install . + npm install -g eslint + + - uses: dorny/paths-filter@3c49e64ca26115121162fb767bc6af9e8d059f1a + id: changes + with: + filters: | + frontend: + - 'src/frontend/**' + + - name: ESLint check + if: ${{ steps.changes.outputs.frontend == 'true' || github.event_name != 'pull_request' }} + run: eslint src/frontend/ --ext .js,.jsx,.ts,.tsx \ No newline at end of file diff --git a/.github/workflows/desktop.yml b/.github/workflows/desktop.yml index ae5b3f6dc..408646ccd 100644 --- a/.github/workflows/desktop.yml +++ b/.github/workflows/desktop.yml @@ -1,4 +1,4 @@ -name: Desktop app +name: Desktop on: release: types: [published] diff --git a/.github/workflows/security-containers.yml b/.github/workflows/security-containers.yml index 16c420d1d..81c251f3d 100644 --- a/.github/workflows/security-containers.yml +++ b/.github/workflows/security-containers.yml @@ -21,7 +21,7 @@ jobs: - name: Scan Nginx image with Trivy continue-on-error: true - uses: aquasecurity/trivy-action@master + uses: aquasecurity/trivy-action@91713af97dc80187565512baba96e4364e983601 with: image-ref: rekono-nginx format: table @@ -29,7 +29,7 @@ jobs: - name: Scan Kali image with Trivy continue-on-error: true - uses: aquasecurity/trivy-action@master + uses: aquasecurity/trivy-action@91713af97dc80187565512baba96e4364e983601 with: image-ref: rekono-kali format: table @@ -37,7 +37,7 @@ jobs: - name: Scan Backend image with Trivy continue-on-error: true - uses: aquasecurity/trivy-action@master + uses: aquasecurity/trivy-action@91713af97dc80187565512baba96e4364e983601 with: image-ref: rekono-backend format: table @@ -45,7 +45,7 @@ jobs: - name: Scan Frontend image with Trivy continue-on-error: true - uses: aquasecurity/trivy-action@master + uses: aquasecurity/trivy-action@91713af97dc80187565512baba96e4364e983601 with: image-ref: rekono-frontend format: table @@ -77,7 +77,7 @@ jobs: - name: Scan Debian image with Trivy continue-on-error: true - uses: aquasecurity/trivy-action@master + uses: aquasecurity/trivy-action@91713af97dc80187565512baba96e4364e983601 with: image-ref: rekono-debian format: table diff --git a/.github/workflows/security-sast.yml b/.github/workflows/security-sast.yml index c13b5ef0e..d516ec6d6 100644 --- a/.github/workflows/security-sast.yml +++ b/.github/workflows/security-sast.yml @@ -2,38 +2,79 @@ name: SAST on: workflow_dispatch: pull_request: - paths: - - '.github/workflows/**' - - 'src/**' jobs: - semgrep: - name: Semgrep + gitleaks: + name: GitLeaks runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v3 with: fetch-depth: 0 - + + - uses: gitleaks/gitleaks-action@4df650038e2eb9f7329218df929c2780866e61a3 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITLEAKS_NOTIFY_USER_LIST: "@pablosnt" + GITLEAKS_ENABLE_COMMENTS: true + GITLEAKS_ENABLE_UPLOAD_ARTIFACT: true + GITLEAKS_ENABLE_SUMMARY: true + + sast: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + include: + - name: Semgrep Backend + tool: semgrep + path: src/backend + report: semgrep-backend.json + arguments: --config=auto --error --json + - name: Semgrep CI/CD + tool: semgrep + path: .github/workflows + report: semgrep-cicd.json + arguments: --config=auto --error --json + - name: Bandit + tool: bandit + path: src/backend + report: bandit.json + arguments: -r -f json + name: ${{ matrix.name }} + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: Setup Python 3 uses: actions/setup-python@v4 with: - python-version: 3.7 - - - name: Install Semgrep - run: pip install semgrep + python-version: 3.11 - - name: Scan code - run: semgrep --config=auto --error --json -o semgrep_code.json src/ + - uses: dorny/paths-filter@3c49e64ca26115121162fb767bc6af9e8d059f1a + id: changes + name: Path filter + with: + filters: | + path: + - '${{ matrix.path }}/**' + + - name: Installation + if: ${{ steps.changes.outputs.path == 'true' || github.event_name != 'pull_request' }} + run: pip install ${{ matrix.tool }} - - name: Scan workflows - run: semgrep --config=auto --error --json -o semgrep_cicd.json .github/workflows/ + - name: Scan + if: ${{ steps.changes.outputs.path == 'true' || github.event_name != 'pull_request' }} + run: ${{ matrix.tool }} ${{ matrix.arguments }} -o ${{ matrix.report }} ${{ matrix.path }} - - name: Upload Semgrep report as GitHub artifact - if: ${{ always() }} + - name: Upload report as GitHub artifact + if: ${{ !cancelled() && (steps.changes.outputs.path == 'true' || github.event_name != 'pull_request') }} uses: actions/upload-artifact@v3 with: - name: Semgrep - path: semgrep_*.json - if-no-files-found: warn \ No newline at end of file + name: ${{ matrix.tool }} + path: ${{ matrix.report }} + if-no-files-found: warn + \ No newline at end of file diff --git a/.github/workflows/security-secrets.yml b/.github/workflows/security-secrets.yml deleted file mode 100644 index f8c4992ee..000000000 --- a/.github/workflows/security-secrets.yml +++ /dev/null @@ -1,22 +0,0 @@ -name: Secrets -on: - workflow_dispatch: - pull_request: - -jobs: - gitleaks: - name: GitLeaks - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - - uses: gitleaks/gitleaks-action@v2 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - GITLEAKS_NOTIFY_USER_LIST: "@pablosnt" - GITLEAKS_ENABLE_COMMENTS: true - GITLEAKS_ENABLE_UPLOAD_ARTIFACT: true - GITLEAKS_ENABLE_SUMMARY: true diff --git a/.github/workflows/security-ssc.yml b/.github/workflows/security-ssc.yml new file mode 100644 index 000000000..5947bf79e --- /dev/null +++ b/.github/workflows/security-ssc.yml @@ -0,0 +1,19 @@ +name: Software Supply Chain +on: + workflow_dispatch: + schedule: + - cron: '0 0 * * *' + pull_request: + +jobs: + legitify: + name: Legitify + runs-on: ubuntu-latest + environment: github + steps: + - name: Legitify + uses: Legit-Labs/legitify@d64d18810d9093458f11731c3a0a36d7e573187e + with: + github_token: ${{ secrets.ADMIN_PAT }} + analyze_self_only: true + artifact_name: legitify diff --git a/.github/workflows/unit-testing.yml b/.github/workflows/unit-testing.yml deleted file mode 100644 index 3226b5efb..000000000 --- a/.github/workflows/unit-testing.yml +++ /dev/null @@ -1,55 +0,0 @@ -name: Unit tests -on: - workflow_dispatch: - pull_request: - paths: - - 'src/backend/**' - -env: - REQUIRED_COVERAGE: 95 - -jobs: - unit-tests: - name: Unit tests - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - - name: Install and start Redis server - run: | - sudo apt update -y - sudo apt install redis-server -y - sudo systemctl start redis-server - - - name: Install Nmap to check its installation - run: sudo apt install nmap -y - - - name: Install Dirsearch to check its installation - run: | - git clone https://github.com/maurosoria/dirsearch.git - ln -s dirsearch/dirsearch.py /usr/local/bin/dirsearch - - - name: Install GitLeaks to check its installation - run: | - wget https://github.com/zricethezav/gitleaks/releases/download/v8.5.1/gitleaks_8.5.1_linux_x64.tar.gz - tar -xvf gitleaks_8.5.1_linux_x64.tar.gz - chmod +x gitleaks - mv gitleaks /usr/local/bin/ - - - uses: actions/setup-python@v4 - with: - python-version: '3.9' - - - name: Install Python dependencies - run: python3 -m pip install -r src/backend/requirements.txt - - - name: Run unit tests - working-directory: src/backend - run: coverage run manage.py test - - - name: Check coverage - working-directory: src/backend - run: coverage report -m --skip-covered --omit="telegram_bot/*" --fail-under=$REQUIRED_COVERAGE diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml new file mode 100644 index 000000000..1dc76e96f --- /dev/null +++ b/.github/workflows/unit-tests.yml @@ -0,0 +1,40 @@ +name: Unit tests +on: + workflow_dispatch: + pull_request: + paths: + - 'src/backend/**' + +env: + REQUIRED_COVERAGE: 90 + +jobs: + unit-tests: + name: Unit tests + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Install and start Redis server + run: | + sudo apt update -y + sudo apt install redis-server -y + sudo systemctl start redis-server + + - uses: actions/setup-python@v4 + with: + python-version: '3.11' + + - name: Install Python dependencies + run: python3 -m pip install -r src/backend/requirements-dev.txt + + - name: Run unit tests + working-directory: src/backend + run: coverage run manage.py test + + - name: Check coverage + working-directory: src/backend + run: coverage report -m --skip-covered --omit="tests/*,platforms/telegram_app/bot/*,platforms/telegram_app/framework.py,platforms/**/notifications.py" --fail-under=$REQUIRED_COVERAGE diff --git a/.gitignore b/.gitignore index 94bdadf61..9736d1097 100644 --- a/.gitignore +++ b/.gitignore @@ -132,10 +132,11 @@ dmypy.json .DS_Store .vscode/ .scannerwork/ -/reports/ -/wordlists/ -/logs/ +reports/ +wordlists/ +logs/ /static/ +/src/backend/tests/home/ # Vue.JS node_modules/ @@ -156,4 +157,8 @@ yarn-error.log* # Debian package *.desktop rekono-kbx -*.kaboxer.yaml \ No newline at end of file +*.kaboxer.yaml + +# Temporal ignore +src/backend-1.x/ +migrations/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2ffbf1ca5..8ca03fb9d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,10 +9,10 @@ repos: require_serial: true verbose: true - repo: https://github.com/gitleaks/gitleaks - rev: v8.16.1 + rev: v8.17.0 hooks: - id: gitleaks - - repo: https://github.com/PyCQA/flake8 - rev: 4.0.1 + - repo: https://github.com/python/black.git + rev: 23.7.0 hooks: - - id: flake8 + - id: black diff --git a/CHANGELOG.md b/CHANGELOG.md index 5cf6d7ee3..321660649 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,48 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [2.0.0] - + +### Added + +- Optimize, improve, clean and test source code (https://github.com/pablosnt/rekono/issues/222) +- Remove tasks status (https://github.com/pablosnt/rekono/issues/222) +- Remove steps priority (https://github.com/pablosnt/rekono/issues/222) +- New target ports path to limit executions to it (https://github.com/pablosnt/rekono/issues/222) +- New skipped reason field for skipped executions (https://github.com/pablosnt/rekono/issues/222) +- New executions group to aggregate those that can be executed at the same time (https://github.com/pablosnt/rekono/issues/222) +- Keep tool versions updated from the system in the database (https://github.com/pablosnt/rekono/issues/222) +- Configure Defect-Dojo product type at Rekono project level (https://github.com/pablosnt/rekono/issues/222) +- Add Rekono project tags to Defect-Dojo products (https://github.com/pablosnt/rekono/issues/222) + +### Security + +- New target blacklist configurable by the administrators to prevent scans on Rekono and internal components (https://github.com/pablosnt/rekono/issues/222) +- New user-handled API tokens (https://github.com/pablosnt/rekono/issues/222) +- New configuration property to enable the encryption of sensitive data like Defect-Dojo API keys, Telegram tokens, or authentication credentials when stored in the database (https://github.com/pablosnt/rekono/issues/222) +- Close user sessions and unlink the Telegram bot when the user password changes (https://github.com/pablosnt/rekono/issues/222) +- Store hashed user One-Time Passwords with SHA-512 in the database (https://github.com/pablosnt/rekono/issues/222) + +### Fixed + +- [**BREAKING**] Upgrade required `Python` version to `3.11` (https://github.com/pablosnt/rekono/issues/222) +- [**BREAKING**] Remove deprecated settings (https://github.com/pablosnt/rekono/issues/222) +- Send emails using a new thread instead of the `emails-queue` (https://github.com/pablosnt/rekono/issues/222) +- Fix the creation of multiple engagements in Defect-Dojo to import scans from the same target (https://github.com/pablosnt/rekono/issues/222) +- Remove duplicated API field to sort the data (https://github.com/pablosnt/rekono/issues/222) +- Upgrade `Django` version to `5.0` (https://github.com/pablosnt/rekono/issues/222) +- Upgrade `djangorestframework` version to `3.14.0` (https://github.com/pablosnt/rekono/issues/222) +- Upgrade `djangorestframework-simplejwt` version to `5.3.1` (https://github.com/pablosnt/rekono/issues/222) +- Upgrade `django-filter` version to `23.5` (https://github.com/pablosnt/rekono/issues/222) +- Upgrade `django-rq` version to `2.10.1` (https://github.com/pablosnt/rekono/issues/222) +- Upgrade `django-taggit` version to `5.0.1` (https://github.com/pablosnt/rekono/issues/222) +- Upgrade `drf-spectacular` version to `0.27.0` (https://github.com/pablosnt/rekono/issues/222) +- Upgrade `pycryptodome` version to `3.19.0` (https://github.com/pablosnt/rekono/issues/222) +- Upgrade `psycopg2-binary` version to `2.9.9` (https://github.com/pablosnt/rekono/issues/222) +- Upgrade `pyjwt` version to `2.8.0` (https://github.com/pablosnt/rekono/issues/222) +- Upgrade `python-magic` version to `0.4.27` (https://github.com/pablosnt/rekono/issues/222) + + ## [1.6.1] - 2023-05-31 ### Security diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index bb503248c..b4301ff35 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -50,7 +50,7 @@ New Rekono contributions should tested using unit tests. This project has the following checks in _Continuous Integration_: -1. `Code style`: check the source code style using `mypy`, `flake8` and `eslint`. +1. `Code style`: check the source code style using `mypy`, `black` and `eslint`. 2. `Desktop applications`: generate installers for Rekono Desktop in Linux, MacOS and Windows. diff --git a/README.md b/README.md index d678a5088..faf78235d 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@
-
-
+
+
@@ -11,9 +11,6 @@
-
-
-
diff --git a/src/config.yaml b/config.yaml
similarity index 65%
rename from src/config.yaml
rename to config.yaml
index f8c69ead8..ee3e984ad 100644
--- a/src/config.yaml
+++ b/config.yaml
@@ -1,33 +1,28 @@
-rootpath: null
-frontend:
- url: https://127.0.0.1
-security:
- # secret-key:
- allowed-hosts:
- - 127.0.0.1
- - localhost
- - ::1
database:
- name: rekono
- # user:
- # password:
host: 127.0.0.1
+ name: rekono
+ password: null
port: 5432
+ user: null
+frontend:
+ url: https://127.0.0.1
+rootpath: null
rq:
host: 127.0.0.1
port: 6379
-email:
- # host:
- # port:
- # user:
- # password:
- tls: true
+security:
+ allowed-hosts:
+ - 127.0.0.1
+ - localhost
+ - ::1
+ encryption-key: null
+ secret-key: null
tools:
cmseek:
directory: /usr/share/cmseek
+ gittools:
+ directory: /opt/GitTools
log4j-scan:
directory: /opt/log4j-scan
spring4shell-scan:
directory: /opt/spring4shell-scan
- gittools:
- directory: /opt/GitTools
\ No newline at end of file
diff --git a/docker/Dockerfile.backend b/docker/Dockerfile.backend
index 31760cf29..e0af7ab48 100644
--- a/docker/Dockerfile.backend
+++ b/docker/Dockerfile.backend
@@ -1,4 +1,4 @@
-FROM python:3.9.10-alpine
+FROM python:3.11.4-alpine
# Environment
ENV PYTHONDONTWRITEBYTECODE 1
@@ -14,7 +14,7 @@ RUN apk update && \
mkdir /code
# Configuration
-COPY src/config.yaml /rekono
+COPY config.yaml /rekono
# Source code
COPY src/backend/ /code
diff --git a/docker/Dockerfile.kali b/docker/Dockerfile.kali
index 66b3539ef..6a372f688 100644
--- a/docker/Dockerfile.kali
+++ b/docker/Dockerfile.kali
@@ -16,7 +16,7 @@ RUN apt update -y && \
mkdir /code
# Configuration
-COPY src/config.yaml /rekono
+COPY config.yaml /rekono
# Source code
COPY src/backend/ /code
diff --git a/docker/debian/Dockerfile b/docker/debian/Dockerfile
index 24d5e81ce..6e2579a23 100644
--- a/docker/debian/Dockerfile
+++ b/docker/debian/Dockerfile
@@ -33,14 +33,14 @@ RUN apt update -y && \
# Source code and configuration
COPY src/backend/ /code
COPY src/frontend/dist_electron/rekono_*.deb /code
-COPY src/config.yaml /code
+COPY config.yaml /rekono
COPY docker/debian/entrypoint.sh /entrypoint.sh
COPY docker/debian/set_permissions.sh /set_permissions.sh
# Install dependencies and Desktop app
RUN pip install -r /code/requirements.txt && \
dpkg -i /code/rekono_*.deb || apt -f install -y && \
- rm -R /code/testing/ && \
+ rm -R /code/tests/ && \
# Install security tools
apt install nmap dirsearch theharvester nikto sslscan sslyze cmseek zaproxy exploitdb metasploit-framework emailharvester joomscan gitleaks smbmap nuclei gobuster -y && \
apt install seclists dirb -y && \
diff --git a/src/backend/.flake8 b/src/backend/.flake8
deleted file mode 100644
index 0f1ff22dd..000000000
--- a/src/backend/.flake8
+++ /dev/null
@@ -1,7 +0,0 @@
-[flake8]
-exclude = .git,__pycache__,*/migrations/*,venv/*,src/frontend/*
-ignore=W504,W605
-; W504: Disallow line break after binary operator (and, or, etc.). Inconsistency with W503
-; W605: Invalid escape characters (needed to send Telegram messages with Markdown style)
-max-line-length = 120
-max-complexity = 10
\ No newline at end of file
diff --git a/src/backend/.mypy.ini b/src/backend/.mypy.ini
deleted file mode 100644
index fc36a9534..000000000
--- a/src/backend/.mypy.ini
+++ /dev/null
@@ -1,5 +0,0 @@
-[mypy]
-files = src/backend/**
-; Mypy fails due to some external imports without hints
-ignore_missing_imports = True
-exclude = (.*/migrations/.*|venv/.*|src/frontend/.*)
\ No newline at end of file
diff --git a/src/backend/api/__init__.py b/src/backend/api/__init__.py
deleted file mode 100644
index fc4d1e969..000000000
--- a/src/backend/api/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-'''Common API configurations and utilities.'''
diff --git a/src/backend/api/fields.py b/src/backend/api/fields.py
deleted file mode 100644
index 21871e50d..000000000
--- a/src/backend/api/fields.py
+++ /dev/null
@@ -1,65 +0,0 @@
-from drf_spectacular.types import OpenApiTypes
-from drf_spectacular.utils import extend_schema_field
-from rest_framework import serializers
-from taggit.serializers import TagListSerializerField
-
-
-@extend_schema_field(OpenApiTypes.STR)
-class IntegerChoicesField(serializers.Field):
- '''Serializer field to manage IntegerChoices values.'''
-
- def to_representation(self, value: int) -> str:
- '''Return text value to send to the client.
-
- Args:
- value (int): Integer value of the IntegerChoices field
-
- Returns:
- str: String value associated to the integer
- '''
- return self.model(value).name.capitalize()
-
- def to_internal_value(self, data: str) -> int:
- '''Return integer value to be stored in database.
-
- Args:
- data (str): String value of the IntegerChoices field
-
- Returns:
- int: Integer value associated to the string
- '''
- return self.model[data.upper()].value
-
-
-@extend_schema_field({'type': 'array', 'items': {'type': 'string'}})
-class RekonoTagField(TagListSerializerField):
- '''Internal serializer field for TagListSerializerField, including API documentation.'''
-
- pass
-
-
-@extend_schema_field(OpenApiTypes.STR)
-class ProtectedStringValueField(serializers.Field):
- '''Serializer field to manage protected system values.'''
-
- def to_representation(self, value: str) -> str:
- '''Return text value to send to the client.
-
- Args:
- value (str): Internal text value
-
- Returns:
- str: Text value that contains multiple '*' characters
- '''
- return '*' * len(value)
-
- def to_internal_value(self, value: str) -> str:
- '''Return text value to be stored in database.
-
- Args:
- value (str): Text value provided by the client
-
- Returns:
- str: Text value to be stored. Save value than the provided one.
- '''
- return value
diff --git a/src/backend/api/filters.py b/src/backend/api/filters.py
deleted file mode 100644
index ec4dc16d8..000000000
--- a/src/backend/api/filters.py
+++ /dev/null
@@ -1,104 +0,0 @@
-from typing import Any, List
-
-from django.db.models import Q, QuerySet
-from django.views import View
-from django_filters.rest_framework import (DjangoFilterBackend, FilterSet,
- filters)
-from rest_framework.filters import OrderingFilter, SearchFilter
-from rest_framework.request import Request
-from tools.models import Tool
-
-
-class RekonoFilterBackend(DjangoFilterBackend):
- '''Rekono filter backend from DjangoFilterBackend.
-
- This can't be added as default backend because cause warnings when access swagger-ui.
- This is required at least for Finding views to allow filters by N-M relations like 'executions' field.
- '''
-
- def filter_queryset(self, request: Request, queryset: QuerySet, view: View) -> Any:
- '''Filter queryset.
-
- Args:
- request (Request): HTTP request
- queryset (QuerySet): Queryset to filter
- view (View): Django view affected
-
- Returns:
- Any: Filtered queryset
- '''
- return super().filter_queryset(request, queryset, view).distinct()
-
-
-class RekonoSearchFilter(SearchFilter):
- '''Rekono search filter from SearchFilter.'''
-
- def filter_queryset(self, request: Request, queryset: QuerySet, view: View) -> QuerySet:
- '''Filter queryset.
-
- Args:
- request (Request): HTTP request
- queryset (QuerySet): Queryset to filter
- view (View): Django view affected
-
- Returns:
- QuerySet: Filtered queryset
- '''
- return super().filter_queryset(request, queryset, view).distinct() # Ignore duplicates if exist
-
-
-class RekonoOrderingFilter(OrderingFilter):
- '''Rekono ordering filter from OrderingFilter.'''
-
- def filter_queryset(self, request: Request, queryset: QuerySet, view: View) -> QuerySet:
- '''Filter queryset.
-
- Args:
- request (Request): HTTP request
- queryset (QuerySet): Queryset to filter
- view (View): Django view affected
-
- Returns:
- QuerySet: Filtered queryset
- '''
- return super().filter_queryset(request, queryset, view).distinct() # Ignore duplicates if exist
-
-
-class RekonoMultipleFieldFilter(FilterSet):
- '''Filter that allows querysets filtering using two model fields.'''
-
- def multiple_field_filter(self, queryset: QuerySet, value: Any, fields: List[str]) -> QuerySet:
- '''Filter queryset using two model fields simultaneously.
-
- Args:
- queryset (QuerySet): Queryset to be filtered
- value (Any): Value to filter the queryset
- fields (List[str]): List with the name of the fields to use
-
- Returns:
- QuerySet: Queryset filtered by the two fields
- '''
- filter_query = Q()
- for field in fields:
- filter_query |= Q(**{field: value})
- return queryset.filter(filter_query)
-
-
-class BaseToolFilter(RekonoMultipleFieldFilter):
- '''Filter that allows querysets filtering by Tool using two model fields.'''
-
- tool = filters.NumberFilter(field_name='tool', method='filter_tool') # Tool Id given by the user
- tool_fields: List[str] = [] # Tool field names to use in the filter
-
- def filter_tool(self, queryset: QuerySet, name: str, value: Tool) -> QuerySet:
- '''Filter queryset by Tool using two model fields simultaneously.
-
- Args:
- queryset (QuerySet): Queryset to be filtered
- name (str): Field name. Not used in this case
- value (Tool): Tool to filter the queryset
-
- Returns:
- QuerySet: Queryset filtered by the Tool using the defined 'tool_fields'
- '''
- return self.multiple_field_filter(queryset, value, self.tool_fields)
diff --git a/src/backend/api/log.py b/src/backend/api/log.py
deleted file mode 100644
index 1824b7e44..000000000
--- a/src/backend/api/log.py
+++ /dev/null
@@ -1,26 +0,0 @@
-import logging
-from typing import Any
-
-
-class RekonoLoggingFilter(logging.Filter):
- '''Logging filter for Rekono.'''
-
- def filter(self, record: Any) -> bool:
- '''Filter logging records.
-
- Args:
- record (Any): Log record
-
- Returns:
- bool: Indicate if log record is included or not
- '''
- if hasattr(record, 'request'): # Record with request data
- record.source_ip = record.request.META.get('REMOTE_ADDR') # Remote address by default
- record.user = 'anonymous' # Anonymous user by default
- if hasattr(record.request, 'user') and record.request.user and record.request.user.id:
- # Authenticated request
- record.user = record.request.user.id
- else: # Record without request data
- record.source_ip = record.source_ip if hasattr(record, 'source_ip') else ''
- record.user = record.user if hasattr(record, 'user') else ''
- return True
diff --git a/src/backend/api/pagination.py b/src/backend/api/pagination.py
deleted file mode 100644
index aa4748e7b..000000000
--- a/src/backend/api/pagination.py
+++ /dev/null
@@ -1,10 +0,0 @@
-from rest_framework.pagination import PageNumberPagination
-
-
-class Pagination(PageNumberPagination):
- '''Pagination configuration for API Rest.'''
-
- page_query_param = 'page' # Page parameter
- page_size_query_param = 'limit' # Size parameter
- page_size = 25 # Default page size
- max_page_size = 1000 # Max page size
diff --git a/src/backend/api/views.py b/src/backend/api/views.py
deleted file mode 100644
index 8cd0f8e8e..000000000
--- a/src/backend/api/views.py
+++ /dev/null
@@ -1,70 +0,0 @@
-from typing import Any, Dict, List, cast
-
-from django.core.exceptions import PermissionDenied
-from django.db.models import QuerySet
-from rest_framework.serializers import Serializer
-from rest_framework.viewsets import GenericViewSet
-from users.models import User
-
-
-class GetViewSet(GenericViewSet):
- '''Rekono base ViewSet for GET operations.'''
-
- def get_queryset(self) -> QuerySet:
- '''Get the queryset that the user is allowed to get, based on project members.
-
- Returns:
- QuerySet: Execution queryset
- '''
- # Prevent warnings when access the API schema in SwaggerUI or Redoc
- # This is caused by the use of RekonoFilterBackend, that is required for Findings entities
- if self.request.user.id:
- project_filter = {self.members_field: self.request.user}
- return super().get_queryset().filter(**project_filter)
- return None
-
-
-class CreateWithUserViewSet(GenericViewSet):
- '''Rekono base ViewSet for POST operations with user ownershipt.'''
-
- def perform_create(self, serializer: Serializer) -> None:
- '''Create a new instance using a serializer and including the user owner.
-
- Args:
- serializer (Serializer): Serializer to use in the instance creation
- '''
- if self.user_field:
- parameters = {self.user_field: self.request.user}
- serializer.save(**parameters)
- else:
- super().perform_create(serializer)
-
-
-class CreateViewSet(GenericViewSet):
- '''Rekono base ViewSet for POST operations.'''
-
- def get_project_members(self, data: Dict[str, Any]) -> List[User]:
- '''Get project members related to the current entity.
-
- Args:
- data (Dict[str, Any]): Serialized data
-
- Returns:
- List[User]: List of project members
- '''
- fields = self.members_field.split('__')
- data = data.get(fields[0], {}) # Get first serialized field
- for field in fields[1:]:
- data = getattr(data, field) # Get all fields
- return cast(QuerySet, data).all() if data else [] # Return all members
-
- def perform_create(self, serializer: Serializer) -> None:
- '''Create a new instance using a serializer.
-
- Args:
- serializer (Serializer): Serializer to use in the instance creation
- '''
- if self.request.user not in self.get_project_members(serializer.validated_data):
- # Current user can't create a new entity in this project
- raise PermissionDenied()
- super().perform_create(serializer)
diff --git a/src/backend/authentications/migrations/__init__.py b/src/backend/api_tokens/__init__.py
similarity index 100%
rename from src/backend/authentications/migrations/__init__.py
rename to src/backend/api_tokens/__init__.py
diff --git a/src/backend/api_tokens/admin.py b/src/backend/api_tokens/admin.py
new file mode 100644
index 000000000..baaf819ca
--- /dev/null
+++ b/src/backend/api_tokens/admin.py
@@ -0,0 +1,6 @@
+from api_tokens.models import ApiToken
+from django.contrib import admin
+
+# Register your models here.
+
+admin.site.register(ApiToken)
diff --git a/src/backend/api_tokens/apps.py b/src/backend/api_tokens/apps.py
new file mode 100644
index 000000000..f49ef01cc
--- /dev/null
+++ b/src/backend/api_tokens/apps.py
@@ -0,0 +1,6 @@
+from django.apps import AppConfig
+from framework.apps import BaseApp
+
+
+class ApiTokensConfig(BaseApp, AppConfig):
+ name = "api_tokens"
diff --git a/src/backend/api_tokens/filters.py b/src/backend/api_tokens/filters.py
new file mode 100644
index 000000000..b61ffca82
--- /dev/null
+++ b/src/backend/api_tokens/filters.py
@@ -0,0 +1,13 @@
+from api_tokens.models import ApiToken
+from django_filters.rest_framework import FilterSet
+
+
+class ApiTokenFilter(FilterSet):
+ """FilterSet to filter Project entities."""
+
+ class Meta:
+ model = ApiToken
+ fields = {
+ "name": ["exact", "icontains"],
+ "expiration": ["gte", "lte", "exact"],
+ }
diff --git a/src/backend/api_tokens/models.py b/src/backend/api_tokens/models.py
new file mode 100644
index 000000000..2f99b45d1
--- /dev/null
+++ b/src/backend/api_tokens/models.py
@@ -0,0 +1,36 @@
+from django.db import models
+from framework.models import BaseModel
+from rekono.settings import AUTH_USER_MODEL
+from rest_framework.authtoken.models import Token
+from security.input_validator import FutureDatetimeValidator, Regex, Validator
+
+
+class ApiToken(Token, BaseModel):
+ key = models.CharField(max_length=128, unique=True)
+ name = models.TextField(
+ max_length=100,
+ validators=[Validator(Regex.NAME.value, code="name")],
+ )
+ user = models.ForeignKey(
+ AUTH_USER_MODEL,
+ related_name="api_tokens",
+ on_delete=models.CASCADE,
+ )
+ expiration = models.DateTimeField(
+ blank=True, null=True, validators=[FutureDatetimeValidator(code="expiration")]
+ )
+
+ class Meta:
+ constraints = [
+ models.UniqueConstraint(fields=["name", "user"], name="unique_api_token")
+ ]
+
+ @classmethod
+ def generate_key(cls):
+ key = Token.generate_key()
+ return (
+ Token.generate_key() if ApiToken.objects.filter(key=key).exists() else key
+ )
+
+ def __str__(self) -> str:
+ return f"{self.user.__str__()} - {self.name}"
diff --git a/src/backend/api_tokens/serializers.py b/src/backend/api_tokens/serializers.py
new file mode 100644
index 000000000..7a0f4474c
--- /dev/null
+++ b/src/backend/api_tokens/serializers.py
@@ -0,0 +1,25 @@
+from typing import Any
+
+from api_tokens.models import ApiToken
+from rest_framework.serializers import ModelSerializer
+from security.cryptography.hashing import hash
+
+
+class ApiTokenSerializer(ModelSerializer):
+ class Meta:
+ model = ApiToken
+ fields = ("id", "name", "expiration")
+
+
+class CreateApiTokenSerializer(ModelSerializer):
+ class Meta:
+ model = ApiToken
+ fields = ("id", "key", "name", "expiration")
+ read_only_fields = ("key",)
+
+ def save(self, **kwargs: Any) -> ApiToken:
+ plain_key = ApiToken.generate_key()
+ self.validated_data["key"] = hash(plain_key)
+ api_token = super().save(**kwargs)
+ api_token.key = plain_key
+ return api_token
diff --git a/src/backend/api_tokens/urls.py b/src/backend/api_tokens/urls.py
new file mode 100644
index 000000000..60640fa11
--- /dev/null
+++ b/src/backend/api_tokens/urls.py
@@ -0,0 +1,7 @@
+from api_tokens.views import ApiTokenViewSet
+from rest_framework.routers import SimpleRouter
+
+router = SimpleRouter()
+router.register("api-tokens", ApiTokenViewSet)
+
+urlpatterns = router.urls
diff --git a/src/backend/api_tokens/views.py b/src/backend/api_tokens/views.py
new file mode 100644
index 000000000..561b90f01
--- /dev/null
+++ b/src/backend/api_tokens/views.py
@@ -0,0 +1,32 @@
+from api_tokens.filters import ApiTokenFilter
+from api_tokens.models import ApiToken
+from api_tokens.serializers import ApiTokenSerializer, CreateApiTokenSerializer
+from django.db.models import QuerySet
+from framework.views import BaseViewSet
+from rest_framework.permissions import IsAuthenticated
+from rest_framework.serializers import Serializer
+
+# Create your views here.
+
+
+class ApiTokenViewSet(BaseViewSet):
+ queryset = ApiToken.objects.all()
+ serializer_class = ApiTokenSerializer
+ filterset_class = ApiTokenFilter
+ permission_classes = [IsAuthenticated]
+ http_method_names = [
+ "get",
+ "post",
+ "delete",
+ ]
+ search_fields = ["name"]
+ ordering_fields = ["id", "name", "expiration"]
+ owner_field = "user"
+
+ def get_queryset(self) -> QuerySet:
+ return super().get_queryset().filter(user=self.request.user).all()
+
+ def get_serializer_class(self) -> Serializer:
+ if self.request.method == "POST":
+ return CreateApiTokenSerializer
+ return super().get_serializer_class()
diff --git a/src/backend/authentications/__init__.py b/src/backend/authentications/__init__.py
index e4ebe9578..2e5792fa1 100644
--- a/src/backend/authentications/__init__.py
+++ b/src/backend/authentications/__init__.py
@@ -1 +1 @@
-'''Authentications.'''
+"""Authentications."""
diff --git a/src/backend/authentications/admin.py b/src/backend/authentications/admin.py
index 33f7c57ad..913ebd61b 100644
--- a/src/backend/authentications/admin.py
+++ b/src/backend/authentications/admin.py
@@ -1,6 +1,5 @@
-from django.contrib import admin
-
from authentications.models import Authentication
+from django.contrib import admin
# Register your models here.
diff --git a/src/backend/authentications/apps.py b/src/backend/authentications/apps.py
index f57f3b4bb..e7c5989c4 100644
--- a/src/backend/authentications/apps.py
+++ b/src/backend/authentications/apps.py
@@ -1,7 +1,6 @@
from django.apps import AppConfig
+from framework.apps import BaseApp
-class AuthenticationConfig(AppConfig):
- '''Authentication Django application.'''
-
- name = 'authentications'
+class AuthenticationConfig(BaseApp, AppConfig):
+ name = "authentications"
diff --git a/src/backend/authentications/enums.py b/src/backend/authentications/enums.py
index 9cb7b3ffc..d7d2bbc40 100644
--- a/src/backend/authentications/enums.py
+++ b/src/backend/authentications/enums.py
@@ -2,11 +2,12 @@
class AuthenticationType(models.TextChoices):
- '''Supported authentication types.'''
+ """Supported authentication types."""
- BASIC = 'Basic'
- BEARER = 'Bearer'
- COOKIE = 'Cookie'
- DIGEST = 'Digest'
- JWT = 'JWT'
- NTLM = 'NTLM'
+ BASIC = "Basic"
+ BEARER = "Bearer"
+ COOKIE = "Cookie"
+ DIGEST = "Digest"
+ JWT = "JWT"
+ NTLM = "NTLM"
+ TOKEN = "Token"
diff --git a/src/backend/authentications/filters.py b/src/backend/authentications/filters.py
index 48f499af0..29af78742 100644
--- a/src/backend/authentications/filters.py
+++ b/src/backend/authentications/filters.py
@@ -1,26 +1,24 @@
-from django_filters import rest_framework
-from django_filters.rest_framework.filters import OrderingFilter
-
from authentications.models import Authentication
+from django_filters.filters import ModelChoiceFilter
+from django_filters.rest_framework import FilterSet
+from projects.models import Project
+from targets.models import Target
-class AuthenticationFilter(rest_framework.FilterSet):
- '''FilterSet to filter and sort authentications entities.'''
+class AuthenticationFilter(FilterSet):
+ """FilterSet to filter and sort authentications entities."""
- o = OrderingFilter(fields=('target_port', 'name', 'type')) # Ordering fields
+ target = ModelChoiceFilter(
+ queryset=Target.objects.all(), field_name="target_port__target"
+ )
+ project = ModelChoiceFilter(
+ queryset=Project.objects.all(), field_name="target_port__target__project"
+ )
class Meta:
model = Authentication
- fields = { # Filter fields
- 'target_port': ['exact'],
- 'target_port__port': ['exact'],
- 'target_port__target': ['exact'],
- 'target_port__target__project': ['exact'],
- 'target_port__target__project__name': ['exact', 'icontains'],
- 'target_port__target__project__owner': ['exact'],
- 'target_port__target__project__owner__username': ['exact', 'icontains'],
- 'target_port__target__target': ['exact', 'icontains'],
- 'target_port__target__type': ['exact'],
- 'name': ['exact', 'icontains'],
- 'type': ['exact']
+ fields = {
+ "target_port": ["exact", "isnull"],
+ "name": ["exact", "icontains"],
+ "type": ["exact"],
}
diff --git a/src/backend/authentications/migrations/0001_initial.py b/src/backend/authentications/migrations/0001_initial.py
deleted file mode 100644
index 93b06da28..000000000
--- a/src/backend/authentications/migrations/0001_initial.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Generated by Django 3.2.16 on 2023-01-08 12:56
-
-from django.db import migrations, models
-import django.db.models.deletion
-import input_types.base
-import security.input_validation
-
-
-class Migration(migrations.Migration):
-
- initial = True
-
- dependencies = [
- ('targets', '0002_auto_20230108_1356'),
- ]
-
- operations = [
- migrations.CreateModel(
- name='Authentication',
- fields=[
- ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
- ('name', models.TextField(max_length=100, validators=[security.input_validation.validate_name])),
- ('credential', models.TextField(max_length=500, validators=[security.input_validation.validate_credential])),
- ('type', models.TextField(choices=[('Basic', 'Basic'), ('Bearer', 'Bearer'), ('Cookie', 'Cookie'), ('Digest', 'Digest'), ('JWT', 'Jwt'), ('NTLM', 'Ntlm')], max_length=8)),
- ('target_port', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='authentication', to='targets.targetport')),
- ],
- bases=(models.Model, input_types.base.BaseInput),
- ),
- ]
diff --git a/src/backend/authentications/models.py b/src/backend/authentications/models.py
index 36ad4ab2a..0ae807b35 100644
--- a/src/backend/authentications/models.py
+++ b/src/backend/authentications/models.py
@@ -1,75 +1,83 @@
import base64
from typing import Any, Dict
-from django.db import models
-from input_types.enums import InputKeyword
-from input_types.models import BaseInput
-from projects.models import Project
-from security.input_validation import validate_credential, validate_name
-from targets.models import TargetPort
-from tools.models import Input
-
from authentications.enums import AuthenticationType
+from django.db import models
+from framework.enums import InputKeyword
+from framework.models import BaseEncrypted, BaseInput
+from security.input_validator import Regex, Validator
+from target_ports.models import TargetPort
# Create your models here.
-class Authentication(models.Model, BaseInput):
- '''Authentication model.'''
-
- # Related target port
- target_port = models.OneToOneField(TargetPort, related_name='authentication', on_delete=models.CASCADE)
- name = models.TextField(max_length=100, validators=[validate_name]) # Credential name
- credential = models.TextField(max_length=500, validators=[validate_credential]) # Credential value
- type = models.TextField(max_length=8, choices=AuthenticationType.choices) # Authentication type
-
- def filter(self, input: Input) -> bool:
- '''Check if this instance is valid based on input filter.
-
- Args:
- input (Input): Tool input whose filter will be applied
-
- Returns:
- bool: Indicate if this instance match the input filter or not
- '''
- if input.filter and input.filter[0] == '!': # Negative filter
- return self.type.lower() not in input.filter[1:].split(',') # Check if filter doesn't match the type
- # Check if filter matches the type
- return not input.filter or self.type.lower() in input.filter.lower().split(',')
+class Authentication(BaseInput, BaseEncrypted):
+ """Authentication model."""
+
+ name = models.TextField(
+ max_length=100,
+ validators=[Validator(Regex.NAME.value, code="name")],
+ null=True,
+ blank=True,
+ )
+ _secret = models.TextField(
+ max_length=500,
+ validators=[Validator(Regex.SECRET.value, code="secret")],
+ null=True,
+ blank=True,
+ db_column="secret",
+ )
+ type = models.TextField(max_length=8, choices=AuthenticationType.choices)
+ target_port = models.OneToOneField(
+ TargetPort,
+ related_name="authentication",
+ on_delete=models.CASCADE,
+ )
+
+ filters = [BaseInput.Filter(type=AuthenticationType, field="type")]
+ _encrypted_field = "_secret"
def parse(self, accumulated: Dict[str, Any] = {}) -> Dict[str, Any]:
- '''Get useful information from this instance to be used in tool execution as argument.
+ """Get useful information from this instance to be used in tool execution as argument.
Args:
accumulated (Dict[str, Any], optional): Information from other instances of the same type. Defaults to {}.
Returns:
Dict[str, Any]: Useful information for tool executions, including accumulated if setted
- '''
- output = self.target_port.parse()
- credential = {
- InputKeyword.USERNAME.name.lower(): self.name if self.type == AuthenticationType.BASIC else None,
- InputKeyword.COOKIE_NAME.name.lower(): self.name if self.type == AuthenticationType.COOKIE else None,
- InputKeyword.SECRET.name.lower(): self.credential,
- InputKeyword.TOKEN.name.lower(): self.credential if self.type != AuthenticationType.BASIC else base64.b64encode(f'{self.name}:{self.credential}'.encode()).decode(), # noqa: E501
+ """
+ return {
+ InputKeyword.COOKIE_NAME.name.lower(): self.name
+ if self.type == AuthenticationType.COOKIE
+ else None,
+ InputKeyword.SECRET.name.lower(): self.secret,
InputKeyword.CREDENTIAL_TYPE.name.lower(): self.type,
InputKeyword.CREDENTIAL_TYPE_LOWER.name.lower(): self.type.lower(),
+ **(
+ {
+ InputKeyword.USERNAME.name.lower(): self.name,
+ InputKeyword.TOKEN.name.lower(): base64.b64encode(
+ f"{self.name}:{self.secret}".encode()
+ ).decode(),
+ }
+ if self.type == AuthenticationType.BASIC
+ else {
+ InputKeyword.USERNAME.name.lower(): None,
+ InputKeyword.TOKEN.name.lower(): self.secret,
+ }
+ ),
}
- output.update(credential)
- return output
def __str__(self) -> str:
- '''Instance representation in text format.
+ """Instance representation in text format.
Returns:
str: String value that identifies this instance
- '''
- return f'{self.target_port.__str__()} - {self.name}'
-
- def get_project(self) -> Project:
- '''Get the related project for the instance. This will be used for authorization purposes.
-
- Returns:
- Project: Related project entity
- '''
- return self.target_port.target.project
+ """
+ return (
+ f"{self.target_port.__str__()} - " if self.target_port else ""
+ ) + self.name
+
+ @classmethod
+ def get_project_field(cls) -> str:
+ return "target_port__target__project"
diff --git a/src/backend/authentications/serializers.py b/src/backend/authentications/serializers.py
index 23e8c45b6..b50785299 100644
--- a/src/backend/authentications/serializers.py
+++ b/src/backend/authentications/serializers.py
@@ -1,35 +1,20 @@
-from typing import Any, Dict
-
-from api.fields import ProtectedStringValueField
-from rest_framework import serializers
-from security.input_validation import validate_credential
-
from authentications.models import Authentication
+from framework.fields import ProtectedSecretField
+from rest_framework.serializers import ModelSerializer
+from security.input_validator import Regex, Validator
-class AuthenticationSerializer(serializers.ModelSerializer):
- '''Serializer to manage authentications via API.'''
+class AuthenticationSerializer(ModelSerializer):
+ """Serializer to manage authentications via API."""
- credential = ProtectedStringValueField(required=True, allow_null=False) # Credential value in a protected way
+ secret = ProtectedSecretField(
+ Validator(Regex.SECRET.value, code="secret").__call__,
+ required=True,
+ allow_null=False,
+ )
class Meta:
- '''Serializer metadata.'''
+ """Serializer metadata."""
model = Authentication
- fields = ('id', 'target_port', 'name', 'credential', 'type') # Authentication fields exposed via API
-
- def validate(self, attrs: Dict[str, Any]) -> Dict[str, Any]:
- '''Validate the provided data before use it.
-
- Args:
- attrs (Dict[str, Any]): Provided data
-
- Raises:
- ValidationError: Raised if provided data is invalid
-
- Returns:
- Dict[str, Any]: Data after validation process
- '''
- attrs = super().validate(attrs)
- validate_credential(attrs['credential'])
- return attrs
+ fields = ("id", "name", "secret", "type", "target_port")
diff --git a/src/backend/authentications/views.py b/src/backend/authentications/views.py
index cf068d679..469afc45d 100644
--- a/src/backend/authentications/views.py
+++ b/src/backend/authentications/views.py
@@ -1,26 +1,31 @@
-from api.views import CreateViewSet, GetViewSet
-from rest_framework.mixins import (CreateModelMixin, DestroyModelMixin,
- ListModelMixin, RetrieveModelMixin)
-
from authentications.filters import AuthenticationFilter
from authentications.models import Authentication
from authentications.serializers import AuthenticationSerializer
+from framework.views import BaseViewSet
+from rest_framework.permissions import IsAuthenticated
+from security.authorization.permissions import (
+ ProjectMemberPermission,
+ RekonoModelPermission,
+)
# Create your views here.
-class AuthenticationViewSet(
- GetViewSet,
- CreateViewSet,
- CreateModelMixin,
- ListModelMixin,
- RetrieveModelMixin,
- DestroyModelMixin
-):
- '''Authentication ViewSet that includes: get, retrieve, create, and delete features.'''
+class AuthenticationViewSet(BaseViewSet):
+ """Authentication ViewSet that includes: get, retrieve, create, and delete features."""
- queryset = Authentication.objects.all().order_by('-id')
+ queryset = Authentication.objects.all()
serializer_class = AuthenticationSerializer
filterset_class = AuthenticationFilter
- search_fields = ['name']
- members_field = 'target_port__target__project__members'
+ permission_classes = [
+ IsAuthenticated,
+ RekonoModelPermission,
+ ProjectMemberPermission,
+ ]
+ search_fields = ["name"]
+ ordering_fields = ["id", "name", "type"]
+ http_method_names = [
+ "get",
+ "post",
+ "delete",
+ ]
diff --git a/src/backend/defectdojo/__init__.py b/src/backend/defectdojo/__init__.py
deleted file mode 100644
index 02aca9607..000000000
--- a/src/backend/defectdojo/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-'''Defect-Dojo integration to import Rekono findings and executions.'''
diff --git a/src/backend/defectdojo/api.py b/src/backend/defectdojo/api.py
deleted file mode 100644
index 64de1d57a..000000000
--- a/src/backend/defectdojo/api.py
+++ /dev/null
@@ -1,304 +0,0 @@
-import logging
-from datetime import datetime, timedelta
-from typing import Any, Tuple
-from urllib.parse import urlparse
-
-import requests
-from findings.enums import Severity
-from projects.models import Project
-from requests.adapters import HTTPAdapter, Retry
-from system.models import System
-
-from defectdojo.constants import DD_DATE_FORMAT, DD_DATETIME_FORMAT
-
-# Mapping between Rekono and Defect-Dojo severities
-SEVERITY_MAPPING = {
- str(Severity.INFO): 'S0',
- str(Severity.LOW): 'S1',
- str(Severity.MEDIUM): 'S3',
- str(Severity.HIGH): 'S4',
- str(Severity.CRITICAL): 'S5',
-}
-
-logger = logging.getLogger() # Rekono logger
-
-
-class DefectDojo:
- '''Defect-Dojo API handler to allow Rekono integration.'''
-
- def __init__(self):
- '''Defect-Dojo API constructor.'''
- self.system = None
- self.http_session = None
-
- def get_system(self) -> System:
- '''Get system settings instance.
-
- Returns:
- System: System settings
- '''
- if not self.system:
- self.system = System.objects.first()
- return self.system
-
- def get_http_session(self) -> requests.Session:
- '''Get HTTP session configured to retry requests after unexpected errors.
-
- Returns:
- requests.Session: HTTP session properly configured
- '''
- if not self.http_session:
- schema = urlparse(self.get_system().defect_dojo_url).scheme # Get API schema
- # Configure retry protocol to prevent unexpected errors
- retries = Retry(total=5, backoff_factor=0.1, status_forcelist=[500, 502, 503, 504, 599])
- self.http_session = requests.Session()
- self.http_session.mount(f'{schema}://', HTTPAdapter(max_retries=retries))
- return self.http_session
-
- def request(
- self,
- method: str,
- endpoint: str,
- params: dict = None,
- data: dict = None,
- files: dict = None,
- expected_status: int = 200
- ) -> Tuple[bool, Any]:
- '''Perform a Defect-Dojo API request.
-
- Args:
- method (str): HTTP method to use
- endpoint (str): Endpoint to call
- params (dict, optional): Query params to include in the request. Defaults to None.
- data (dict, optional): Body data to include in the request. Defaults to None.
- files (dict, optional): Files to include in the request. Defaults to None.
- expected_status (int, optional): Expected HTTP response status. Defaults to 200.
-
- Returns:
- Tuple[bool, dict]: Indicates if request was successful or not (bool), and return the response body (dict)
- '''
- system = self.get_system()
- headers = {
- 'User-Agent': 'Rekono', # Rekono User-Agent
- 'Authorization': f'Token {system.defect_dojo_api_key}' # Authentication via API key
- }
- try:
- response = self.get_http_session().request( # Defect-Dojo API request
- method=method,
- url=f'{system.defect_dojo_url}/api/v2{endpoint}',
- headers=headers,
- params=params,
- data=data,
- files=files,
- verify=system.defect_dojo_verify_tls
- )
- except requests.exceptions.ConnectionError:
- response = self.get_http_session().request( # Defect-Dojo API request
- method=method,
- url=f'{system.defect_dojo_url}/api/v2{endpoint}',
- headers=headers,
- params=params,
- data=data,
- files=files,
- verify=system.defect_dojo_verify_tls
- )
- logger.info(f'[Defect-Dojo] {method.upper()} /api/v2{endpoint} > HTTP {response.status_code}')
- if response.status_code == expected_status:
- return True, response.json() # Successful request
- else:
- return False, response # Failed request
-
- def is_available(self) -> bool:
- '''Check if Defect-Dojo integration is available.
-
- Returns:
- bool: Indicate if Defect-Dojo integration is available or not
- '''
- if not self.get_system().defect_dojo_url:
- return False
- try:
- success, _ = self.request('get', '/test_types/', params={'limit': 1})
- except requests.exceptions.ConnectionError:
- success = False
- if not success:
- logger.error('[Defect-Dojo] Integration with Defect-Dojo is not available')
- return success
-
- def get_rekono_product_type(self) -> Tuple[bool, dict]:
- '''Get product type associated to Rekono, based on configurated name.
-
- Returns:
- Tuple[bool, dict]: Indicates if request was successful or not (bool), and return the response body (dict)
- '''
- return self.request('GET', '/product_types/', params={'name': self.get_system().defect_dojo_product_type})
-
- def create_rekono_product_type(self) -> Tuple[bool, dict]:
- '''Create new product type associated to Rekono, based on configurated name.
-
- Returns:
- Tuple[bool, dict]: Indicates if request was successful or not (bool), and return the response body (dict)
- '''
- system = self.get_system()
- data = {'name': system.defect_dojo_product_type, 'description': system.defect_dojo_product_type}
- return self.request('POST', '/product_types/', data=data, expected_status=201)
-
- def get_product(self, id: int) -> Tuple[bool, dict]:
- '''Get product by Id.
-
- Args:
- id (int): Product Id to get
-
- Returns:
- Tuple[bool, dict]: Indicates if request was successful or not (bool), and return the response body (dict)
- '''
- return self.request('GET', f'/products/{id}/')
-
- def create_product(self, product_type: int, project: Project) -> Tuple[bool, dict]:
- '''Create new Defect-Dojo product from Rekono project.
-
- Args:
- product_type (int): Product type associated to the product
- project (Project): Rekono project to create in Defect-Dojo as product
-
- Returns:
- Tuple[bool, dict]: Indicates if request was successful or not (bool), and return the response body (dict)
- '''
- data = {
- 'tags': [self.get_system().defect_dojo_tag], # Includes the configurated tag
- 'name': project.name,
- 'description': project.description,
- 'prod_type': product_type
- }
- return self.request('POST', '/products/', data=data, expected_status=201)
-
- def get_engagement(self, id: int) -> Tuple[bool, dict]:
- '''Get engagement by Id.
-
- Args:
- id (int): Engagement Id to get
-
- Returns:
- Tuple[bool, dict]: Indicates if request was successful or not (bool), and return the response body (dict)
- '''
- return self.request('GET', f'/engagements/{id}/')
-
- def create_engagement(self, product: int, name: str, description: str) -> Tuple[bool, dict]:
- '''Create new engagement.
-
- Args:
- product (int): Product Id where the engagement will be created
- name (str): Engagement name
- description (str): Engagement description
-
- Returns:
- Tuple[bool, dict]: Indicates if request was successful or not (bool), and return the response body (dict)
- '''
- start = datetime.now()
- end = start + timedelta(days=7) # End date after 7 days
- data = {
- 'name': name,
- 'description': description,
- 'tags': [self.get_system().defect_dojo_tag], # Includes the configurated tag
- 'product': product,
- 'status': 'In Progress',
- 'engagement_type': 'Interactive', # The other option is 'CI/CD'
- 'target_start': start.strftime(DD_DATE_FORMAT),
- 'target_end': end.strftime(DD_DATE_FORMAT),
- }
- return self.request('POST', '/engagements/', data=data, expected_status=201)
-
- def get_rekono_test_type(self) -> Tuple[bool, dict]:
- '''Get test type associated to Rekono, based on configurated name.
-
- Returns:
- Tuple[bool, dict]: Indicates if request was successful or not (bool), and return the response body (dict)
- '''
- return self.request('GET', '/test_types/', params={'name': self.get_system().defect_dojo_test_type})
-
- def create_rekono_test_type(self) -> Tuple[bool, dict]:
- '''Create new test type associated to Rekono, based on configurated name.
-
- Returns:
- Tuple[bool, dict]: Indicates if request was successful or not (bool), and return the response body (dict)
- '''
- system = self.get_system()
- data = {
- 'name': system.defect_dojo_test_type,
- 'tags': [system.defect_dojo_tag], # Includes the configurated tag
- 'dynamic_tool': True # Cause most Rekono tools are dynamic
- }
- return self.request('POST', '/test_types/', data=data, expected_status=201)
-
- def create_rekono_test(self, test_type: int, engagement: int) -> Tuple[bool, dict]:
- '''Create new Rekono test.
-
- Args:
- test_type (int): Test type Id associated to the test
- engagement (int): Engagement Id where the test will be created
-
- Returns:
- Tuple[bool, dict]: Indicates if request was successful or not (bool), and return the response body (dict)
- '''
- system = self.get_system()
- data = {
- 'engagement': engagement,
- 'test_type': test_type,
- 'title': system.defect_dojo_test,
- 'description': system.defect_dojo_test,
- 'target_start': datetime.now().strftime(DD_DATETIME_FORMAT),
- 'target_end': datetime.now().strftime(DD_DATETIME_FORMAT) # Because the test is completed
- }
- return self.request('POST', '/tests/', data=data, expected_status=201)
-
- def create_endpoint(self, product: int, endpoint: Any) -> Tuple[bool, dict]:
- '''Create new Defect-Dojo endpoint from Rekono endpoint.
-
- Args:
- product (int): Product Id where the endpoint will be created
- endpoint (Path): Rekono endpoint to create in Defect-Dojo
-
- Returns:
- Tuple[bool, dict]: Indicates if request was successful or not (bool), and return the response body (dict)
- '''
- data = endpoint.defect_dojo()
- data.update({'product': product})
- return self.request('POST', '/endpoints/', data=data, expected_status=201)
-
- def create_finding(self, test: int, finding: Any) -> Tuple[bool, dict]:
- '''Create new Defect-Dojo finding from Rekono finding.
-
- Args:
- test (int): Test Id where the finding will be created
- finding (Finding): Rekono finding to create in Defect-Dojo
-
- Returns:
- Tuple[bool, dict]: Indicates if request was successful or not (bool), and return the response body (dict)
- '''
- data = finding.defect_dojo()
- data.update({
- 'test': test,
- 'numerical_severity': SEVERITY_MAPPING[data.get('severity')], # Mapping between severity values
- 'active': True # Always created as active
- })
- return self.request('POST', '/findings/', data=data, expected_status=201)
-
- def import_scan(self, engagement: int, execution: Any) -> Tuple[bool, dict]:
- '''Import Rekono execution output in Defect-Dojo.
-
- Args:
- engagement (int): Engagement Id where the scan will be imported
- execution (Execution): Completed Rekono execution to import in Defect-Dojo
-
- Returns:
- Tuple[bool, dict]: Indicates if request was successful or not (bool), and return the response body (dict)
- '''
- data = {
- # https://defectdojo.github.io/django-DefectDojo/integrations/parsers/
- 'scan_type': execution.tool.defectdojo_scan_type,
- 'engagement': engagement,
- 'tags': [self.get_system().defect_dojo_tag] # Includes the configurated tag
- }
- files = {
- 'file': open(execution.output_file, 'r') # Execution output file
- }
- return self.request('POST', '/import-scan/', data=data, files=files, expected_status=201)
diff --git a/src/backend/defectdojo/constants.py b/src/backend/defectdojo/constants.py
deleted file mode 100644
index 64c88654e..000000000
--- a/src/backend/defectdojo/constants.py
+++ /dev/null
@@ -1,4 +0,0 @@
-'''Defect-Dojo constants.'''
-
-DD_DATE_FORMAT = '%Y-%m-%d'
-DD_DATETIME_FORMAT = '%Y-%m-%dT%H:%M'
diff --git a/src/backend/defectdojo/exceptions.py b/src/backend/defectdojo/exceptions.py
deleted file mode 100644
index 8a07389f1..000000000
--- a/src/backend/defectdojo/exceptions.py
+++ /dev/null
@@ -1,4 +0,0 @@
-class DefectDojoException(Exception):
- '''Defect-Dojo generic exception.'''
-
- pass
diff --git a/src/backend/defectdojo/reporter.py b/src/backend/defectdojo/reporter.py
deleted file mode 100644
index c10282985..000000000
--- a/src/backend/defectdojo/reporter.py
+++ /dev/null
@@ -1,98 +0,0 @@
-import logging
-from typing import List, Tuple
-
-from defectdojo.api import DefectDojo
-from defectdojo.exceptions import DefectDojoException
-from executions.models import Execution
-from findings.models import Finding, Path
-from projects.models import Project
-from targets.models import Target
-
-dd_client = DefectDojo() # Defect-Dojo client
-
-logger = logging.getLogger() # Rekono logger
-
-
-def get_product_and_engagement_id(project: Project, target: Target) -> Tuple[int, int]:
- '''Get product Id and engagement Id to use to Defect-Dojo import.
-
- Args:
- project (Project): Rekono project
- target (Target): Rekono target
-
- Returns:
- Tuple[int, int]: Defect-Dojo product Id and engagement Id
- '''
- product_id = project.defectdojo_product_id
- to_check = [(dd_client.get_product, product_id, 'product')]
- engagement_id = project.defectdojo_engagement_id
- if project.defectdojo_engagement_by_target:
- engagement_id = target.get_defectdojo_engagement(dd_client)
- else:
- to_check.append((dd_client.get_engagement, engagement_id, 'engagement'))
- for checker, id, name in to_check:
- check, _ = checker(id)
- if not check:
- raise DefectDojoException({name.lower(): [f'{name.capitalize()} {id} is not found in Defect-Dojo']})
- return product_id, engagement_id
-
-
-def get_rekono_test(engagement_id: int) -> int:
- '''Create a new test associated to Rekono in a specific Defect-Dojo engagement.
-
- Args:
- engagement_id (int): Engagement Id where the test will be created
-
- Raises:
- DefectDojoException: Raised if the test can't be created
-
- Returns:
- int: Defect-Dojo test Id
- '''
- test_type = None
- result, body = dd_client.get_rekono_test_type() # Get Rekono test type
- if result and body and len(body.get('results', [])) > 0:
- test_type = body['results'][0].get('id')
- else: # Rekono test type not found
- result, body = dd_client.create_rekono_test_type() # Create Rekono test type
- if result:
- logger.info(f'[Defect-Dojo] Rekono test type {body["id"]} has been created')
- test_type = body.get('id')
- if test_type: # If test type found or created
- result, body = dd_client.create_rekono_test(test_type, engagement_id) # Create Rekono test
- if result:
- logger.info(f'[Defect-Dojo] Rekono test {body["id"]} has been created')
- return body['id']
- logger.warning("[Defect-Dojo] Rekono test can't be created")
- raise DefectDojoException({'test': ['Unexpected error in Rekono test creation']}) # Rekono test can't be created
-
-
-def report(execution: Execution, findings: List[Finding]) -> None:
- '''Report to Defect-Dojo the results of one Rekono execution.
-
- Args:
- execution (Execution): Execution to be reported
- findings (List[Finding]): Findings detected during the execution
-
- Raises:
- DefectDojoException: Raised if Defect-Dojo is not available
- '''
- if not dd_client.is_available():
- raise DefectDojoException({'defect-dojo': ['Integration with Defect-Dojo is not available']})
- product_id, engagement_id = get_product_and_engagement_id(execution.task.target.project, execution.task.target)
- if execution.tool.defectdojo_scan_type:
- dd_client.import_scan(engagement_id, execution) # Import the execution output
- logger.info(f'[Defect-Dojo] Execution {execution.id} has been imported in engagement {engagement_id}')
- else:
- test_id = None
- for finding in findings:
- if isinstance(finding, Path): # Path finding
- dd_client.create_endpoint(product_id, finding) # Import finding as Defect-Dojo endpoint
- else:
- test_id = test_id if test_id else get_rekono_test(engagement_id)
- dd_client.create_finding(test_id, finding) # Import finding as Defect-Dojo finding
- logger.info(
- f'[Defect-Dojo] {finding.__class__.__name__} {finding.id} has been imported in product {product_id}'
- )
- execution.imported_in_defectdojo = True # Update the execution as reported
- execution.save(update_fields=['imported_in_defectdojo'])
diff --git a/src/backend/email_notifications/__init__.py b/src/backend/email_notifications/__init__.py
deleted file mode 100644
index bc9cedc49..000000000
--- a/src/backend/email_notifications/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-'''Email sender.'''
diff --git a/src/backend/email_notifications/constants.py b/src/backend/email_notifications/constants.py
deleted file mode 100644
index 96220204a..000000000
--- a/src/backend/email_notifications/constants.py
+++ /dev/null
@@ -1,3 +0,0 @@
-'''Email messages constants.'''
-
-DATETIME_FORMAT = '%Y-%m-%d %H:%M'
diff --git a/src/backend/email_notifications/sender.py b/src/backend/email_notifications/sender.py
deleted file mode 100644
index ae19b8150..000000000
--- a/src/backend/email_notifications/sender.py
+++ /dev/null
@@ -1,145 +0,0 @@
-import logging
-from typing import Any, Dict, List
-
-import django_rq
-from django.core.mail import EmailMultiAlternatives
-from django.template.loader import get_template
-from django.utils import timezone
-from django_rq import job
-from email_notifications.constants import DATETIME_FORMAT
-from findings.models import Finding
-
-from rekono.settings import EMAIL_HOST, EMAIL_PORT, FRONTEND_URL
-
-logger = logging.getLogger() # Rekono logger
-
-
-@job('emails-queue')
-def consumer(addresses: List[str], subject: str, template_name: str, data: Dict[str, Any]) -> None:
- '''Send HTML email message.
-
- Args:
- addresses (List[str]): Destinatary email addresses
- subject (str): Email subject
- template_name (str): HTML template to use
- data (Dict[str, Any]): Data to include in the HTML template
- '''
- if EMAIL_HOST and EMAIL_PORT:
- template = get_template(template_name) # Get HTML template
- data['rekono_url'] = FRONTEND_URL # Include frontend address for links
- # nosemgrep: python.flask.security.xss.audit.direct-use-of-jinja2.direct-use-of-jinja2
- content = template.render(data) # Render HTML template using data
- try:
- message = EmailMultiAlternatives(subject, '', None, addresses) # Create email message
- message.attach_alternative(content, 'text/html') # Add HTML content to email message
- message.send() # Send email message
- except Exception:
- logger.error('[Email] Error during email message sending')
-
-
-def user_invitation(user: Any) -> None:
- '''Send email user invitation.
-
- Args:
- user (Any): User to invite to Rekono
- '''
- emails_queue = django_rq.get_queue('emails-queue') # Get emails queue
- emails_queue.enqueue( # Enqueue email notification
- consumer,
- addresses=[user.email],
- subject='Welcome to Rekono',
- template_name='user_invitation.html',
- data={'user': user}
- )
-
-
-def user_password_reset(user: Any) -> None:
- '''Send email for reset password.
-
- Args:
- user (Any): User that requests the password reset
- '''
- emails_queue = django_rq.get_queue('emails-queue') # Get emails queue
- emails_queue.enqueue( # Enqueue email notification
- consumer,
- addresses=[user.email],
- subject='Reset Rekono password',
- template_name='user_password_reset.html',
- data={'user': user}
- )
-
-
-def user_enable_account(user: Any) -> None:
- '''Send email for enable user account.
-
- Args:
- user (Any): Recently enabled user
- '''
- emails_queue = django_rq.get_queue('emails-queue') # Get emails queue
- emails_queue.enqueue( # Enqueue email notification
- consumer,
- addresses=[user.email],
- subject='Rekono user enabled',
- template_name='user_enable_account.html',
- data={'user': user}
- )
-
-
-def user_login_notification(user: Any) -> None:
- '''Send email notification after user login.
-
- Args:
- user (Any): Recently enabled user
- '''
- emails_queue = django_rq.get_queue('emails-queue') # Get emails queue
- emails_queue.enqueue( # Enqueue email notification
- consumer,
- addresses=[user.email],
- subject='New login on your Rekono account',
- template_name='user_login_notification.html',
- data={'time': timezone.now().strftime(DATETIME_FORMAT)}
- )
-
-
-def user_telegram_linked_notification(user: Any) -> None:
- '''Send email notification after link user account to Telegram bot.
-
- Args:
- user (Any): Recently enabled user
- '''
- emails_queue = django_rq.get_queue('emails-queue') # Get emails queue
- emails_queue.enqueue( # Enqueue email notification
- consumer,
- addresses=[user.email],
- subject='Welcome to Rekono Bot',
- template_name='user_telegram_linked_notification.html',
- data={'time': timezone.now().strftime(DATETIME_FORMAT)}
- )
-
-
-def execution_notifications(emails: List[str], execution: Any, findings: List[Finding]) -> None:
- '''Send email notifications with execution results.
-
- Args:
- emails (List[str]): Email address list to notify
- execution (Any): Completed execution
- findings (List[Finding]): Findings obtained during execution
- '''
- data = { # Data to include in notification
- 'execution': execution,
- 'tool': execution.tool,
- 'configuration': execution.configuration
- }
- for finding in findings: # For each finding
- if finding.__class__.__name__.lower() not in data:
- data[finding.__class__.__name__.lower()] = []
- data[finding.__class__.__name__.lower()].append(finding) # Add finding to the data
- # Send email notifications
- emails_queue = django_rq.get_queue('emails-queue') # Get emails queue
- emails_queue.enqueue( # Enqueue email notifications
- consumer,
- addresses=emails,
- subject=f'[Rekono] {data["tool"].name} execution completed',
- template_name='execution_notification.html',
- data=data
- )
diff --git a/src/backend/executions/__init__.py b/src/backend/executions/__init__.py
index f3390c9e6..e69de29bb 100644
--- a/src/backend/executions/__init__.py
+++ b/src/backend/executions/__init__.py
@@ -1 +0,0 @@
-'''Executions.'''
diff --git a/src/backend/executions/apps.py b/src/backend/executions/apps.py
index 5ac0eff7b..7223c4465 100644
--- a/src/backend/executions/apps.py
+++ b/src/backend/executions/apps.py
@@ -1,7 +1,6 @@
from django.apps import AppConfig
+from framework.apps import BaseApp
-class ExecutionsConfig(AppConfig):
- '''Executions Django application.'''
-
- name = 'executions'
+class ExecutionsConfig(BaseApp, AppConfig):
+ name = "executions"
diff --git a/src/backend/executions/enums.py b/src/backend/executions/enums.py
new file mode 100644
index 000000000..8ba4ce8be
--- /dev/null
+++ b/src/backend/executions/enums.py
@@ -0,0 +1,12 @@
+from django.db import models
+
+# Create your enums here.
+
+
+class Status(models.TextChoices):
+ REQUESTED = "Requested"
+ SKIPPED = "Skipped"
+ RUNNING = "Running"
+ CANCELLED = "Cancelled"
+ ERROR = "Error"
+ COMPLETED = "Completed"
diff --git a/src/backend/executions/filters.py b/src/backend/executions/filters.py
index 93ac26a30..18faf573f 100644
--- a/src/backend/executions/filters.py
+++ b/src/backend/executions/filters.py
@@ -1,47 +1,38 @@
+from django_filters.filters import ChoiceFilter, ModelChoiceFilter
from django_filters.rest_framework import FilterSet
-from django_filters.rest_framework.filters import OrderingFilter
-
from executions.models import Execution
+from processes.models import Process
+from projects.models import Project
+from targets.models import Target
+from tools.models import Tool
+from users.models import User
class ExecutionFilter(FilterSet):
- '''FilterSet to filter and sort executions entities.'''
-
- o = OrderingFilter( # Ordering fields
- fields=(
- ('task__target', 'target'),
- ('task__target__project', 'project'),
- ('task__process', 'process'),
- ('task__intensity', 'intensity'),
- ('task__executor', 'executor'),
- 'tool',
- 'configuration',
- 'status',
- 'start',
- 'end'
- ),
+ target = ModelChoiceFilter(queryset=Target.objects.all(), field_name="task__target")
+ project = ModelChoiceFilter(
+ queryset=Project.objects.all(), field_name="task__target__project"
+ )
+ process = ModelChoiceFilter(
+ queryset=Process.objects.all(), field_name="task__process"
+ )
+ tool = ModelChoiceFilter(
+ queryset=Tool.objects.all(), field_name="configuration__tool"
+ )
+ stage = ChoiceFilter(field_name="configuration__stage")
+ intensity = ChoiceFilter(field_name="task__intensity")
+ executor = ModelChoiceFilter(
+ queryset=User.objects.all(), field_name="task__executor"
)
class Meta:
- '''FilterSet metadata.'''
-
model = Execution
- fields = { # Filter fields
- 'task': ['exact'],
- 'task__target': ['exact'],
- 'task__target__target': ['exact', 'icontains'],
- 'task__target__project': ['exact'],
- 'task__target__project__name': ['exact', 'icontains'],
- 'task__process': ['exact'],
- 'task__intensity': ['exact'],
- 'task__executor': ['exact'],
- 'task__executor__username': ['exact', 'icontains'],
- 'tool': ['exact'],
- 'tool__name': ['exact', 'icontains'],
- 'configuration': ['exact'],
- 'configuration__name': ['exact', 'icontains'],
- 'configuration__stage': ['exact'],
- 'status': ['exact'],
- 'start': ['gte', 'lte', 'exact'],
- 'end': ['gte', 'lte', 'exact']
+ fields = {
+ "task": ["exact", "isnull"],
+ "group": ["exact"],
+ "configuration": ["exact"],
+ "status": ["exact"],
+ "enqueued_at": ["gte", "lte", "exact"],
+ "start": ["gte", "lte", "exact"],
+ "end": ["gte", "lte", "exact"],
}
diff --git a/src/backend/executions/migrations/0001_initial.py b/src/backend/executions/migrations/0001_initial.py
deleted file mode 100644
index d29401f3a..000000000
--- a/src/backend/executions/migrations/0001_initial.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# Generated by Django 3.2.13 on 2022-04-24 15:14
-
-from django.db import migrations, models
-import django.db.models.deletion
-
-
-class Migration(migrations.Migration):
-
- initial = True
-
- dependencies = [
- ('tools', '0002_initial'),
- ('tasks', '0002_initial'),
- ]
-
- operations = [
- migrations.CreateModel(
- name='Execution',
- fields=[
- ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
- ('rq_job_id', models.TextField(blank=True, max_length=50, null=True)),
- ('extra_data_path', models.TextField(blank=True, max_length=50, null=True)),
- ('output_file', models.TextField(blank=True, max_length=50, null=True)),
- ('output_plain', models.TextField(blank=True, null=True)),
- ('output_error', models.TextField(blank=True, null=True)),
- ('status', models.TextField(choices=[('Requested', 'Requested'), ('Skipped', 'Skipped'), ('Running', 'Running'), ('Cancelled', 'Cancelled'), ('Error', 'Error'), ('Completed', 'Completed')], default='Requested', max_length=10)),
- ('start', models.DateTimeField(blank=True, null=True)),
- ('end', models.DateTimeField(blank=True, null=True)),
- ('imported_in_defectdojo', models.BooleanField(default=False)),
- ('configuration', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='tools.configuration')),
- ('task', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='executions', to='tasks.task')),
- ('tool', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tools.tool')),
- ],
- ),
- ]
diff --git a/src/backend/executions/models.py b/src/backend/executions/models.py
index 3fc5bbbe4..8df029009 100644
--- a/src/backend/executions/models.py
+++ b/src/backend/executions/models.py
@@ -1,43 +1,44 @@
from django.db import models
-from projects.models import Project
-from tasks.enums import Status
+from executions.enums import Status
+from framework.models import BaseModel
from tasks.models import Task
-from tools.models import Configuration, Tool
+from tools.models import Configuration
# Create your models here.
-class Execution(models.Model):
- '''Execution model.'''
-
- task = models.ForeignKey(Task, related_name='executions', on_delete=models.CASCADE) # Related Task
- rq_job_id = models.TextField(max_length=50, blank=True, null=True) # Job Id in the executions queue
- tool = models.ForeignKey(Tool, on_delete=models.CASCADE) # Tool
- configuration = models.ForeignKey(Configuration, on_delete=models.CASCADE, blank=True, null=True) # Configuration
- extra_data_path = models.TextField(max_length=50, blank=True, null=True) # Filepath with extra data
- output_file = models.TextField(max_length=50, blank=True, null=True) # Tool output filepath
- output_plain = models.TextField(blank=True, null=True) # Tool output in plain text
- output_error = models.TextField(blank=True, null=True) # Tool errors
- status = models.TextField(max_length=10, choices=Status.choices, default=Status.REQUESTED) # Execution status
- start = models.DateTimeField(blank=True, null=True) # Start date
- end = models.DateTimeField(blank=True, null=True) # End date
- imported_in_defectdojo = models.BooleanField(default=False) # Indicate if it has been imported yet
+class Execution(BaseModel):
+ """Execution model."""
+
+ task = models.ForeignKey(
+ Task, related_name="executions", on_delete=models.CASCADE, blank=True, null=True
+ )
+ group = models.IntegerField(default=1)
+ # Job Id in the executions queue
+ rq_job_id = models.TextField(max_length=50, blank=True, null=True)
+ configuration = models.ForeignKey(
+ Configuration, on_delete=models.CASCADE, blank=True, null=True
+ )
+ output_file = models.TextField(max_length=50, blank=True, null=True)
+ output_plain = models.TextField(blank=True, null=True)
+ output_error = models.TextField(blank=True, null=True)
+ skipped_reason = models.TextField(blank=True, null=True)
+ status = models.TextField(
+ max_length=10, choices=Status.choices, default=Status.REQUESTED
+ )
+ enqueued_at = models.DateTimeField(blank=True, null=True)
+ start = models.DateTimeField(blank=True, null=True)
+ end = models.DateTimeField(blank=True, null=True)
+ defect_dojo_test_id = models.IntegerField(blank=True, null=True)
def __str__(self) -> str:
- '''Instance representation in text format.
+ """Instance representation in text format.
Returns:
str: String value that identifies this instance
- '''
- return (
- f'{self.task.target.project.name} - {self.task.target.target} - '
- f'{self.tool.name} - {self.configuration.name}'
- )
-
- def get_project(self) -> Project:
- '''Get the related project for the instance. This will be used for authorization purposes.
+ """
+ return f"{self.task.__str__()}{f' - {self.configuration.__str__()}' if self.task.process else ''}"
- Returns:
- Project: Related project entity
- '''
- return self.task.target.project
+ @classmethod
+ def get_project_field(cls) -> str:
+ return "task__target__project"
diff --git a/src/backend/executions/queue/__init__.py b/src/backend/executions/queue/__init__.py
deleted file mode 100644
index f43f31bb7..000000000
--- a/src/backend/executions/queue/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-'''Executions queue.'''
diff --git a/src/backend/executions/queue/consumer.py b/src/backend/executions/queue/consumer.py
deleted file mode 100644
index 98686d2a5..000000000
--- a/src/backend/executions/queue/consumer.py
+++ /dev/null
@@ -1,54 +0,0 @@
-from typing import List
-
-import rq
-from django.utils import timezone
-from django_rq import job
-from executions.models import Execution
-from executions.queue import utils as queue_utils
-from input_types.base import BaseInput
-from tasks.enums import Status
-from tools import utils as tool_utils
-from tools.models import Argument, Intensity
-from tools.tools.base_tool import BaseTool
-
-
-@job('executions-queue')
-def consumer(
- execution: Execution,
- intensity: Intensity,
- arguments: List[Argument],
- targets: List[BaseInput],
- previous_findings: List[BaseInput]
-) -> BaseTool:
- '''Consume jobs from executions queue and executes them.
-
- Args:
- execution (Execution): Execution associated to the job
- intensity (Intensity): Intensity to apply in the execution
- arguments (List[Argument]): Arguments implied in the execution
- targets (List[BaseInput]): Targets and resources to include in the execution
- previous_findings (List[Finding]): Findings from previous executions to include in the execution
-
- Returns:
- BaseTool: Tool instance that executed the tool and saved the results
- '''
- tool_class = tool_utils.get_tool_class_by_name(execution.tool.name) # Get Tool class from Tool name
- tool_runner = tool_class(execution, intensity, arguments) # Create Tool instance
- current_job = rq.get_current_job() # Get current Job
- if not previous_findings and current_job._dependency_ids: # No previous findings and dependencies
- previous_findings = queue_utils.process_dependencies( # Get findings from dependencies
- execution,
- intensity,
- arguments,
- targets,
- current_job,
- tool_runner
- )
- # If related task start date is null
- # It could be established before, if this execution belongs to a process execution
- if not execution.task.start:
- execution.task.status = Status.RUNNING # Set task status to Running
- execution.task.start = timezone.now() # Set task start date
- execution.task.save(update_fields=['status', 'start'])
- tool_runner.run(targets=targets, previous_findings=previous_findings) # Tool execution
- return tool_runner
diff --git a/src/backend/executions/queue/producer.py b/src/backend/executions/queue/producer.py
deleted file mode 100644
index 058b4da81..000000000
--- a/src/backend/executions/queue/producer.py
+++ /dev/null
@@ -1,66 +0,0 @@
-import logging
-from typing import Callable, List
-
-import django_rq
-from executions.models import Execution
-from executions.queue import consumer
-from input_types.base import BaseInput
-from rq.job import Job
-from tools.models import Argument, Intensity
-
-logger = logging.getLogger() # Rekono logger
-
-
-def producer(
- execution: Execution,
- intensity: Intensity,
- arguments: List[Argument],
- targets: List[BaseInput] = [],
- previous_findings: List[BaseInput] = [],
- callback: Callable = None,
- dependencies: List[Job] = [],
- at_front: bool = False
-) -> Job:
- '''Enqueue a new execution in the executions queue.
-
- Args:
- execution (Execution): Execution to enqueue
- intensity (Intensity): Intensity to apply in the execution
- arguments (List[Argument]): Arguments implied in the execution
- targets (List[BaseInput], optional): Targets and resources to include. Defaults to [].
- previous_findings (List[BaseInput], optional): Findings from previous executions to include. Defaults to [].
- callback (Callable, optional): Function to call after success execution. Defaults to None.
- dependencies (List[Any], optional): Job list whose output is required to perform this execution. Defaults to [].
- at_front (bool, optional): Indicate that the execution should be enqueued at first start. Defaults to False.
-
- Returns:
- Any: Enqueued job in the executions queue
- '''
- executions_queue = django_rq.get_queue('executions-queue') # Get executions queue
- execution_job = executions_queue.enqueue( # Enqueue the Execution job
- consumer.consumer,
- execution=execution,
- intensity=intensity,
- arguments=arguments,
- targets=targets,
- previous_findings=previous_findings,
- on_success=callback,
- # Required to get results from dependent jobs
- result_ttl=7200,
- depends_on=dependencies,
- at_front=at_front
- )
- logger.info(
- f'[Execution] Execution {execution.id} ({execution.tool.name} - '
- f'{execution.configuration.name}) has been enqueued'
- )
- # Save important data in job metadata if it is needed later
- execution_job.meta['execution'] = execution
- execution_job.meta['intensity'] = intensity
- execution_job.meta['arguments'] = arguments
- execution_job.meta['callback'] = callback
- execution_job.meta['targets'] = targets
- execution_job.save_meta()
- execution.rq_job_id = execution_job.id # Save job Id in execution model
- execution.save(update_fields=['rq_job_id'])
- return execution_job
diff --git a/src/backend/executions/queue/utils.py b/src/backend/executions/queue/utils.py
deleted file mode 100644
index a8e56f9a4..000000000
--- a/src/backend/executions/queue/utils.py
+++ /dev/null
@@ -1,125 +0,0 @@
-import logging
-from typing import List, cast
-
-import django_rq
-from executions import utils
-from executions.models import Execution
-from executions.queue import producer
-from findings.models import Finding
-from input_types.models import BaseInput
-from processes.executor.callback import process_callback
-from queues.utils import cancel_and_delete_job
-from rq.job import Job
-from rq.registry import DeferredJobRegistry
-from tools.models import Argument, Intensity
-from tools.tools.base_tool import BaseTool
-
-logger = logging.getLogger() # Rekono logger
-
-
-def get_findings_from_dependencies(dependencies: list) -> List[BaseInput]:
- '''Get findings from dependencies.
-
- Args:
- dependencies (list): Id list of dependency jobs
-
- Returns:
- List[BaseInput]: Finding list obtained from dependencies
- '''
- executions_queue = django_rq.get_queue('executions-queue') # Get execution list
- findings = []
- for dep_id in dependencies: # For each dependency Id
- dependency = executions_queue.fetch_job(dep_id) # Get dependency job
- if not dependency or not dependency.result:
- continue # No job or results found
- findings.extend(dependency.result.findings) # Get findings from result
- return findings
-
-
-def update_new_dependencies(parent_job: str, new_jobs: list) -> None:
- '''Update on hold jobs dependencies to include new jobs as dependency. Based on the parent job dependents.
-
- Args:
- parent_job (str): Parent job Id, used to get affected on hold jobs
- new_jobs (list): Id list of new jobs
- '''
- executions_queue = django_rq.get_queue('executions-queue') # Get execution list
- registry = DeferredJobRegistry(queue=executions_queue) # Get on hold jobs registry
- for job_id in registry.get_job_ids(): # For each on hold job
- job_on_hold = executions_queue.fetch_job(job_id) # Get on hold job
- # If on hold job is waiting for parent job
- if job_on_hold and parent_job in job_on_hold._dependency_ids:
- dependencies = job_on_hold._dependency_ids # Get on hold job original dependencies
- # Include new jobs as on hold job dependency
- dependencies.extend(new_jobs)
- meta = job_on_hold.get_meta() # Get on hold job metadata
- cancel_and_delete_job('executions-queue', job_id) # Cancel and delete on hold job
- # Enqueue an on hold job copy with new dependencies
- producer.producer(
- meta['execution'],
- meta['intensity'],
- meta['arguments'],
- targets=meta['targets'],
- callback=meta['callback'],
- dependencies=dependencies
- )
-
-
-def process_dependencies(
- execution: Execution,
- intensity: Intensity,
- arguments: List[Argument],
- targets: List[BaseInput],
- current_job: Job,
- tool_runner: BaseTool
-) -> List[BaseInput]:
- '''Get findings from job dependencies and enqueue new executions if required.
-
- Args:
- execution (Execution): Execution associated to the current job
- intensity (Intensity): Intensity to apply in the execution
- arguments (List[Argument]): Arguments implied in the execution
- targets (List[BaseInput]): Targets and resources to include in the execution
- current_job (Job): Current job
- tool_runner (BaseTool): Tool instance associated to the tool
-
- Returns:
- List[Finding]: Finding list to include in the current job execution
- '''
- # Get findings from dependent jobs
- findings = get_findings_from_dependencies(current_job._dependency_ids)
- if not findings:
- logger.info('[Execution] No findings found from dependencies')
- return [] # No findings found
- new_jobs_ids = []
- # Get required executions to include all previous findings
- executions: List[List[BaseInput]] = utils.get_executions_from_findings(findings, execution.tool)
- logger.info(f'[Execution] {len(executions) - 1} new executions from previous findings')
- # Filter executions based on tool arguments
- executions = [
- param_set for param_set in executions if tool_runner.check_arguments(targets, cast(List[Finding], param_set))
- ]
- # For each executions, except first whose findings will be included in the current jobs
- for findings in executions[1:]:
- # Create a new execution entity from the current execution data
- new_execution = Execution.objects.create(
- task=execution.task,
- tool=execution.tool,
- configuration=execution.configuration
- )
- job = producer.producer( # Enqueue the new execution
- new_execution,
- intensity,
- arguments,
- targets=targets,
- previous_findings=findings, # Include the previous findings
- callback=process_callback,
- # At queue start, because it could be a dependency of next jobs
- at_front=True
- )
- new_jobs_ids.append(job.id) # Save new Job Id
- if new_jobs_ids: # New Jobs has been created
- # Update next jobs dependencies based on current job dependents
- update_new_dependencies(current_job.id, new_jobs_ids)
- # Return first findings list to be used in the current job
- return executions[0] if executions else []
diff --git a/src/backend/executions/queues.py b/src/backend/executions/queues.py
new file mode 100644
index 000000000..3f0a1f11f
--- /dev/null
+++ b/src/backend/executions/queues.py
@@ -0,0 +1,173 @@
+import logging
+from typing import Dict, List, Tuple
+
+import rq
+from django.utils import timezone
+from django_rq import job
+from executions.models import Execution
+from findings.framework.models import Finding
+from findings.queues import FindingsQueue
+from framework.models import BaseInput
+from framework.queues import BaseQueue
+from parameters.models import InputTechnology, InputVulnerability
+from rq.job import Job
+from rq.registry import DeferredJobRegistry
+from target_ports.models import TargetPort
+from tools.executors.base import BaseExecutor
+from tools.models import Input, Tool
+from tools.parsers.base import BaseParser
+from wordlists.models import Wordlist
+
+logger = logging.getLogger()
+
+
+class ExecutionsQueue(BaseQueue):
+ name = "executions-queue"
+
+ def enqueue(
+ self,
+ execution: Execution,
+ findings: List[Finding],
+ target_ports: List[TargetPort],
+ input_vulnerabilities: List[InputVulnerability],
+ input_technologies: List[InputTechnology],
+ wordlists: List[Wordlist],
+ dependencies: List[Job] = [],
+ at_front: bool = False,
+ ) -> Job:
+ job = self._get_queue().enqueue(
+ self.consume,
+ execution=execution,
+ findings=findings,
+ target_ports=target_ports,
+ input_vulnerabilities=input_vulnerabilities,
+ input_technologies=input_technologies,
+ wordlists=wordlists,
+ result_ttl=7200,
+ depends_on=dependencies,
+ at_front=at_front,
+ )
+ logger.info(
+ f"[Execution] Execution {execution.id} ({execution.configuration.tool.name} - "
+ f"{execution.configuration.name}) has been enqueued"
+ )
+ job.meta["execution"] = execution
+ job.meta["target_ports"] = target_ports
+ job.meta["input_vulnerabilities"] = input_vulnerabilities
+ job.meta["input_technologies"] = input_technologies
+ job.meta["wordlists"] = wordlists
+ execution.enqueued_at = timezone.now()
+ execution.rq_job_id = job.id
+ execution.save(update_fields=["rq_job_id"])
+ return job
+
+ @staticmethod
+ @job("executions-queue")
+ def consume(
+ execution: Execution,
+ findings: List[Finding],
+ target_ports: List[TargetPort],
+ input_vulnerabilities: List[InputVulnerability],
+ input_technologies: List[InputTechnology],
+ wordlists: List[Wordlist],
+ ) -> Tuple[Execution, List[Finding]]:
+ executor: BaseExecutor = execution.configuration.tool.get_executor_class()(
+ execution
+ )
+ current_job = rq.get_current_job()
+ if not findings and current_job._dependency_ids:
+ (
+ findings,
+ target_ports,
+ input_vulnerabilities,
+ input_technologies,
+ wordlists,
+ ) = ExecutionsQueue._get_findings_from_dependencies(
+ executor,
+ target_ports,
+ input_vulnerabilities,
+ input_technologies,
+ wordlists,
+ ).values()
+ executor.execute(
+ findings, target_ports, input_vulnerabilities, input_technologies, wordlists
+ )
+ parser: BaseParser = execution.configuration.tool.get_parser_class()(
+ executor, execution.output_plain
+ )
+ parser.parse()
+ FindingsQueue().enqueue(execution, parser.findings)
+ return execution, parser.findings
+
+ @staticmethod
+ def _get_findings_from_dependencies(
+ executor: BaseExecutor,
+ target_ports: List[TargetPort],
+ input_vulnerabilities: List[InputVulnerability],
+ input_technologies: List[InputTechnology],
+ wordlists: List[Wordlist],
+ current_job: Job,
+ ) -> Dict[int, List[BaseInput]]:
+ findings = []
+ queue = ExecutionsQueue._get_queue()
+ for dependency_id in current_job._dependency_ids:
+ dependency = queue.fetch_job(dependency_id)
+ if dependency and dependency.result:
+ findings.extend(dependency.result[1])
+ if not findings:
+ return findings
+ executions = [
+ e
+ for e in ExecutionsQueue._calculate_executions(
+ executor.execution.configuration.tool,
+ findings,
+ target_ports,
+ input_vulnerabilities,
+ input_technologies,
+ wordlists,
+ )
+ if executor.check_arguments(
+ e.get(0, []), e.get(1, []), e.get(2, []), e.get(3, []), e.get(4, [])
+ )
+ ]
+ logger.info(
+ f"[Execution] New {len(executions) - 1} executions from previous findings"
+ )
+ new_jobs = []
+ for execution in executions[1:]:
+ new_execution = Execution.objects.create(
+ task=executor.execution.task,
+ configuration=executor.execution.configuration,
+ group=executor.execution.group,
+ )
+ job = queue.enqueue(
+ new_execution,
+ execution.get(0, []),
+ execution.get(1, []),
+ execution.get(2, []),
+ execution.get(3, []),
+ execution.get(4, []),
+ # At queue start, because it could be a dependency of next jobs
+ at_front=True,
+ )
+ new_jobs.append(job.id)
+ if new_jobs:
+ instance = ExecutionsQueue()
+ registry = DeferredJobRegistry(queue=queue)
+ for pending_job_id in registry.get_job_ids():
+ pending_job = queue.fetch_job(pending_job_id)
+ if pending_job and current_job.id in pending_job._dependency_ids:
+ dependencies = pending_job._dependency_ids
+ meta = pending_job.get_meta()
+ instance.cancel_job(pending_job_id)
+ instance.delete_job(pending_job_id)
+ instance.enqueue(
+ meta["execution"],
+ [],
+ meta["target_ports"],
+ meta["input_vulnerabilities"],
+ meta["input_technologies"],
+ meta["wordlists"],
+ dependencies=dependencies + new_jobs,
+ )
+ return executions[0] if executions else {}
diff --git a/src/backend/executions/serializers.py b/src/backend/executions/serializers.py
index 1341ad02f..9669e6190 100644
--- a/src/backend/executions/serializers.py
+++ b/src/backend/executions/serializers.py
@@ -1,20 +1,22 @@
from executions.models import Execution
-from rest_framework import serializers
-from tools.serializers import ConfigurationSerializer, SimplyToolSerializer
+from rest_framework.serializers import ModelSerializer
+from tools.serializers import ConfigurationSerializer
-class ExecutionSerializer(serializers.ModelSerializer):
- '''Serializer to get the executions data via API.'''
-
- tool = SimplyToolSerializer(many=False, read_only=True) # Tool details
- configuration = ConfigurationSerializer(many=False, read_only=True) # Configuration details
+class ExecutionSerializer(ModelSerializer):
+ configuration = ConfigurationSerializer(many=False, read_only=True)
class Meta:
- '''Serializer metadata.'''
-
model = Execution
- fields = ( # Execution fields exposed via API
- 'id', 'task', 'tool', 'configuration', 'output_plain', 'output_error',
- 'status', 'start', 'end', 'imported_in_defectdojo', 'osint', 'host',
- 'port', 'path', 'technology', 'vulnerability', 'credential', 'exploit'
+ fields = (
+ "id",
+ "task",
+ "group",
+ "configuration",
+ "output_plain",
+ "output_error",
+ "skipped_reason",
+ "status",
+ "start",
+ "end",
)
diff --git a/src/backend/executions/utils.py b/src/backend/executions/utils.py
deleted file mode 100644
index 1a181c500..000000000
--- a/src/backend/executions/utils.py
+++ /dev/null
@@ -1,116 +0,0 @@
-from typing import Any, Dict, List, cast
-
-from input_types import utils
-from input_types.base import BaseInput
-from input_types.models import InputType
-from stringcase import snakecase
-from tools.models import Argument, Input, Tool
-
-
-def get_executions_from_findings_with_relationships(
- base_inputs: Dict[InputType, List[BaseInput]],
- tool: Tool
-) -> List[List[BaseInput]]:
- '''Get needed executions for a tool based on a given inputs with relationships between them.
-
- Args:
- base_inputs (Dict[InputType, List[BaseInput]]): InputTypes for this tool and related input list
- tool (Tool): Tool that will be executed
-
- Returns:
- List[List[BaseInput]]: List of inputs to be passed for each tool execution
- '''
- executions: List[List[BaseInput]] = [[]] # BaseInput list for each execution
- # It's required because base inputs will be assigned to executions based on relationships between them
- input_relations = utils.get_relations_between_input_types() # Get relations between input types
- # For each input type, and his related input types
- for input_type, related_input_types in list(reversed(input_relations.items())):
- if input_type not in base_inputs:
- continue
- # Get argument by tool and input type
- argument = Argument.objects.filter(tool=tool, inputs__type=input_type).order_by('inputs__order').first()
- if related_input_types: # Input with related input types
- for base_input in base_inputs[input_type]: # For each input
- for index, execution_list in enumerate(executions.copy()): # For each execution list
- assigned = False
- for related_input_type in related_input_types: # For each related input type
- # Check number of inputs of the same type in this execution
- base_inputs_by_class = [bi for bi in execution_list if bi.__class__ == base_input.__class__]
- # Get callback model class from related input type
- callback_model = related_input_type.get_callback_model_class()
- # Get field name to the related callback model
- callback_model_field = snakecase(cast(Any, callback_model).__name__) if callback_model else ''
- if (
- (
- # Check if input has a relationship
- hasattr(base_input, related_input_type.name.lower()) and
- getattr(base_input, related_input_type.name.lower()) in execution_list
- ) or
- (
- # Check if input has a relationship with a callback model
- hasattr(base_input, callback_model_field) and
- getattr(base_input, callback_model_field) in execution_list
- )
- ):
- if argument.multiple or len(base_inputs_by_class) == 0:
- # Add input in current execution
- executions[index].append(base_input)
- assigned = True
- break
- elif not argument.multiple and len(base_inputs_by_class) > 0:
- # Duplicate current execution
- new_execution = execution_list.copy() # Copy input list
- new_execution.remove(base_inputs_by_class[0]) # Remove input with same type
- new_execution.append(base_input) # Add input
- executions.append(new_execution)
- assigned = True
- break
- if assigned:
- break
- elif argument.multiple:
- # Input type without relationships and argument that allows multiple inputs
- for item in range(len(executions)):
- executions[item].extend(base_inputs[input_type]) # Add inputs in all executions
- else: # Input type without relationships
- new_executions: List[List[BaseInput]] = []
- for base_input in base_inputs[input_type]: # For each input
- for execution_list in executions: # For each execution
- new_executions.append(list(execution_list + [base_input])) # Add input to the execution
- executions = new_executions
- return executions
-
-
-def get_executions_from_findings(base_inputs: List[BaseInput], tool: Tool) -> List[List[BaseInput]]:
- '''Get needed executions for a tool based on a given input (Finding, Resource or Target) list.
-
- Args:
- base_inputs (List[BaseInput]): BaseInput list
- tool (Tool): Tool that will be executed
-
- Returns:
- List[List[BaseInput]]: List of inputs to be passed for each tool execution
- '''
- tool_inputs: List[Input] = Input.objects.filter(argument__tool=tool).all() # Get inputs by tool
- filtered_base_inputs: Dict[InputType, List[BaseInput]] = {}
- for tool_input in tool_inputs:
- base_input_list = [
- bi for bi in base_inputs if bi.__class__ in [
- tool_input.type.get_model_class(), tool_input.type.get_callback_model_class()
- ]
- ]
- if base_input_list:
- filtered_base_inputs[tool_input.type] = base_input_list # Relation between inputs and classes
- if len(filtered_base_inputs.keys()) > 1: # Multiple input types
- # Get executions from inputs with maybe relationships
- return get_executions_from_findings_with_relationships(filtered_base_inputs, tool)
- elif len(filtered_base_inputs.keys()) == 1: # Only one input type
- # Get argument by tool and input type
- argument = Argument.objects.filter(
- tool=tool, inputs__type=list(filtered_base_inputs.keys())[0]
- ).order_by('inputs__order').first()
- if argument.multiple: # Argument with multiple inputs
- return list(filtered_base_inputs.values()) # One execution with all inputs
- else:
- return [[bi] for bi in list(filtered_base_inputs.values())[0]] # One execution for each input
- # By default, one execution with all inputs
- return [base_inputs]
diff --git a/src/backend/executions/views.py b/src/backend/executions/views.py
index 687241fe1..af601a3dc 100644
--- a/src/backend/executions/views.py
+++ b/src/backend/executions/views.py
@@ -1,19 +1,42 @@
-from api.views import GetViewSet
-from rest_framework.mixins import ListModelMixin, RetrieveModelMixin
-
from executions.filters import ExecutionFilter
from executions.models import Execution
from executions.serializers import ExecutionSerializer
+from framework.views import BaseViewSet
+from rest_framework.permissions import IsAuthenticated
+from security.authorization.permissions import (
+ ProjectMemberPermission,
+ RekonoModelPermission,
+)
# Create your views here.
-class ExecutionViewSet(GetViewSet, ListModelMixin, RetrieveModelMixin):
- '''Execution ViewSet that includes: get and retrieve features.'''
-
- queryset = Execution.objects.all().order_by('-id')
+class ExecutionViewSet(BaseViewSet):
+ queryset = Execution.objects.all()
serializer_class = ExecutionSerializer
filterset_class = ExecutionFilter
- # Fields used to search executions
- search_fields = ['task__target__target', 'tool__name', 'configuration__name']
- members_field = 'task__target__project__members'
+ permission_classes = [
+ IsAuthenticated,
+ RekonoModelPermission,
+ ProjectMemberPermission,
+ ]
+ search_fields = [
+ "task__target__target",
+ "task__process__name",
+ "configuration__tool__name",
+ "configuration__name",
+ ]
+ ordering_fields = [
+ "id",
+ "task",
+ "group",
+ "configuration",
+ "configuration__tool",
+ "creation",
+ "enqueued_at",
+ "start",
+ "end",
+ ]
+ http_method_names = [
+ "get",
+ ]
diff --git a/src/backend/findings/__init__.py b/src/backend/findings/__init__.py
index c12bc1f9f..e69de29bb 100644
--- a/src/backend/findings/__init__.py
+++ b/src/backend/findings/__init__.py
@@ -1 +0,0 @@
-'''Findings.'''
diff --git a/src/backend/findings/apps.py b/src/backend/findings/apps.py
index 3c1b0b0c8..71af2bf71 100644
--- a/src/backend/findings/apps.py
+++ b/src/backend/findings/apps.py
@@ -1,7 +1,6 @@
from django.apps import AppConfig
+from framework.apps import BaseApp
-class FindingsConfig(AppConfig):
- '''Findings Django application.'''
-
- name = 'findings'
+class FindingsConfig(BaseApp, AppConfig):
+ name = "findings"
diff --git a/src/backend/findings/enums.py b/src/backend/findings/enums.py
index 15d392f37..46abe26f3 100644
--- a/src/backend/findings/enums.py
+++ b/src/backend/findings/enums.py
@@ -2,60 +2,55 @@
class Severity(models.TextChoices):
- '''Severity values to categorize findings, specially Vulnerability findings.'''
-
- INFO = 'Info'
- LOW = 'Low'
- MEDIUM = 'Medium'
- HIGH = 'High'
- CRITICAL = 'Critical'
-
-
-class DataType(models.TextChoices):
- '''Data types to categorize OSINT findings.'''
-
- IP = 'IP'
- DOMAIN = 'Domain'
- VHOST = 'VHOST'
- URL = 'Url'
- EMAIL = 'Email'
- LINK = 'Link'
- ASN = 'ASN'
- USER = 'Username'
- PASSWORD = 'Password'
-
-
-class OSType(models.TextChoices):
- '''OS types to categorize Host findings.'''
-
- LINUX = 'Linux'
- WINDOWS = 'Windows'
- MACOS = 'MacOS'
- IOS = 'iOS'
- ANDROID = 'Android'
- SOLARIS = 'Solaris'
- FREEBSD = 'FreeBSD'
- OTHER = 'Other'
+ INFO = "Info"
+ LOW = "Low"
+ MEDIUM = "Medium"
+ HIGH = "High"
+ CRITICAL = "Critical"
+
+
+class OSINTDataType(models.TextChoices):
+ IP = "IP"
+ DOMAIN = "Domain"
+ VHOST = "VHOST"
+ URL = "Url"
+ EMAIL = "Email"
+ LINK = "Link"
+ ASN = "ASN"
+ USER = "Username"
+ PASSWORD = "Password"
+
+
+class HostOS(models.TextChoices):
+ LINUX = "Linux"
+ WINDOWS = "Windows"
+ MACOS = "MacOS"
+ IOS = "iOS"
+ ANDROID = "Android"
+ SOLARIS = "Solaris"
+ FREEBSD = "FreeBSD"
+ OTHER = "Other"
class PortStatus(models.TextChoices):
- '''Port statuses to categorize ports.'''
-
- OPEN = 'Open'
- OPEN_FILTERED = 'Open - Filtered'
- FILTERED = 'Filtered'
- CLOSED = 'Closed'
+ OPEN = "Open"
+ OPEN_FILTERED = "Open - Filtered"
+ FILTERED = "Filtered"
+ CLOSED = "Closed"
class Protocol(models.TextChoices):
- '''Protocols to categorize Port services.'''
-
- UDP = 'UDP'
- TCP = 'TCP'
+ UDP = "UDP"
+ TCP = "TCP"
class PathType(models.TextChoices):
- '''Protocols to categorize Paths.'''
+ ENDPOINT = "ENDPOINT"
+ SHARE = "SHARE"
+
- ENDPOINT = 'ENDPOINT'
- SHARE = 'SHARE'
+class TriageStatus(models.TextChoices):
+ FALSE_POSITIVE = "False Positive"
+ TRUE_POSITIVE = "True Positive"
+ WONT_FIX = "Won't Fix"
+ UNTRIAGED = "Untriaged"
diff --git a/src/backend/findings/filters.py b/src/backend/findings/filters.py
index 3e378db5c..562083b27 100644
--- a/src/backend/findings/filters.py
+++ b/src/backend/findings/filters.py
@@ -1,323 +1,165 @@
-from typing import List
-
-from api.filters import BaseToolFilter
-from django.db.models import QuerySet
-from django_filters.rest_framework import filters
-from django_filters.rest_framework.filters import OrderingFilter
-
-from findings.enums import OSType
-from findings.models import (OSINT, Credential, Exploit, Host, Path, Port,
- Technology, Vulnerability)
-
-# Common ordering anf filtering fields for all Finding models
-FINDING_ORDERING = (
- ('executions__task', 'task'),
- ('executions__task__target', 'target'),
- ('executions__task__target__project', 'project'),
- ('executions__task__executor', 'executor'),
- 'executions',
- 'detected_by',
- 'first_seen',
- 'last_seen',
- 'is_active'
+from django_filters.filters import ModelChoiceFilter
+from findings.framework.filters import FindingFilter
+from findings.models import (
+ OSINT,
+ Credential,
+ Exploit,
+ Host,
+ Path,
+ Port,
+ Technology,
+ Vulnerability,
)
-FINDING_FILTERING = {
- 'executions': ['exact'],
- 'executions__task': ['exact'],
- 'executions__task__target': ['exact'],
- 'executions__task__target__target': ['exact', 'icontains'],
- 'executions__task__target__project': ['exact'],
- 'executions__task__target__project__name': ['exact', 'icontains'],
- 'executions__task__executor': ['exact'],
- 'executions__task__executor__username': ['exact', 'icontains'],
- 'executions__start': ['gte', 'lte', 'exact'],
- 'executions__end': ['gte', 'lte', 'exact'],
- 'detected_by': ['exact'],
- 'detected_by__name': ['exact', 'icontains'],
- 'first_seen': ['gte', 'lte', 'exact'],
- 'last_seen': ['gte', 'lte', 'exact'],
- 'is_active': ['exact'],
-}
-
-
-class FindingFilter(BaseToolFilter):
- '''Common FilterSet to filter and sort findings entities.'''
-
- tool_fields: List[str] = ['executions__task__tool', 'executions__step__tool'] # Filter by two Tool fields
-
-
-class BaseVulnerabilityFilter(FindingFilter):
- '''Common FilterSet to filter findings entities based on vulnerability fields.'''
-
- port = filters.NumberFilter(method='filter_port') # Filter by port
- port_number = filters.NumberFilter(method='filter_port_number') # Filter by port number
- host = filters.NumberFilter(method='filter_host') # Filter by host
- host_address = filters.CharFilter(method='filter_host_address') # Filter by host address
- host_os_type = filters.ChoiceFilter(method='filter_host_os_type', choices=OSType.choices) # Filter by host OS
- # Port field names to use in the filters
- port_fields: List[str] = []
- host_fields: List[str] = [] # Host field names to use in the filters
-
- def filter_port(self, queryset: QuerySet, name: str, value: int) -> QuerySet:
- '''Filter queryset by port Id.
-
- Args:
- queryset (QuerySet): Finding queryset to be filtered
- name (str): Field name, not used in this case
- value (int): Port Id
-
- Returns:
- QuerySet: Filtered queryset by port Id
- '''
- return self.multiple_field_filter(queryset, value, self.port_fields)
-
- def filter_port_number(self, queryset: QuerySet, name: str, value: int) -> QuerySet:
- '''Filter queryset by port number.
-
- Args:
- queryset (QuerySet): Finding queryset to be filtered
- name (str): Field name, not used in this case
- value (int): Port number
-
- Returns:
- QuerySet: Filtered queryset by port number
- '''
- return self.multiple_field_filter(queryset, value, [f'{f}__port' for f in self.port_fields])
-
- def filter_host(self, queryset: QuerySet, name: str, value: int) -> QuerySet:
- '''Filter queryset by host Id.
-
- Args:
- queryset (QuerySet): Finding queryset to be filtered
- name (str): Field name, not used in this case
- value (int): Host Id
-
- Returns:
- QuerySet: Filtered queryset by host Id
- '''
- return self.multiple_field_filter(queryset, value, self.host_fields)
-
- def filter_host_address(self, queryset: QuerySet, name: str, value: str) -> QuerySet:
- '''Filter queryset by host address.
-
- Args:
- queryset (QuerySet): Finding queryset to be filtered
- name (str): Field name, not used in this case
- value (str): Host address
-
- Returns:
- QuerySet: Filtered queryset by host address
- '''
- return self.multiple_field_filter(queryset, value, [f'{f}__address' for f in self.host_fields])
-
- def filter_host_os_type(self, queryset: QuerySet, name: str, value: OSType) -> QuerySet:
- '''Filter queryset by host OS type.
-
- Args:
- queryset (QuerySet): Finding queryset to be filtered
- name (str): Field name, not used in this case
- value (OSType): OS type
-
- Returns:
- QuerySet: Filtered queryset by host OS type
- '''
- return self.multiple_field_filter(queryset, value, [f'{f}__os_type' for f in self.host_fields])
+from framework.filters import MultipleCharFilter, MultipleNumberFilter
class OSINTFilter(FindingFilter):
- '''FilterSet to filter and sort OSINT entities.'''
-
- # Ordering fields including common ones
- o = OrderingFilter(fields=FINDING_ORDERING + ('data', 'data_type', 'source'))
-
class Meta:
- '''FilterSet metadata.'''
-
model = OSINT
- fields = FINDING_FILTERING.copy() # Common filtering fields
- fields.update({ # Include specific filtering fields
- 'data': ['exact', 'icontains'],
- 'data_type': ['exact'],
- 'source': ['exact', 'icontains'],
- })
+ fields = {
+ **FindingFilter.Meta.fields.copy(),
+ "data": ["exact", "icontains"],
+ "data_type": ["exact"],
+ "source": ["exact", "icontains"],
+ }
class HostFilter(FindingFilter):
- '''FilterSet to filter and sort Host entities.'''
-
- o = OrderingFilter(fields=FINDING_ORDERING + ('address', 'os_type')) # Ordering fields including common ones
-
class Meta:
- '''FilterSet metadata.'''
-
model = Host
- fields = FINDING_FILTERING.copy() # Common filtering fields
- fields.update({ # Include specific filtering fields
- 'address': ['exact', 'icontains'],
- 'os_type': ['exact'],
- })
+ fields = {
+ **FindingFilter.Meta.fields.copy(),
+ "address": ["exact", "icontains"],
+ "os": ["exact", "icontains"],
+ "os_type": ["exact"],
+ }
class PortFilter(FindingFilter):
- '''FilterSet to filter and sort Port entities.'''
-
- # Ordering fields including common ones
- o = OrderingFilter(
- fields=FINDING_ORDERING + (('host__os_type', 'os_type'), 'host', 'port', 'protocol', 'service', 'status')
- )
-
class Meta:
- '''FilterSet metadata.'''
-
model = Port
- fields = FINDING_FILTERING.copy() # Common filtering fields
- fields.update({ # Include specific filtering fields
- 'host': ['exact', 'isnull'],
- 'host__address': ['exact', 'icontains'],
- 'host__os_type': ['exact'],
- 'port': ['exact'],
- 'status': ['iexact'],
- 'protocol': ['iexact'],
- 'service': ['exact', 'icontains'],
- })
+ fields = {
+ **FindingFilter.Meta.fields.copy(),
+ "host": ["exact"],
+ "port": ["exact"],
+ "status": ["exact"],
+ "protocol": ["iexact"],
+ "service": ["exact", "icontains"],
+ }
class PathFilter(FindingFilter):
- '''FilterSet to filter and sort Path entities.'''
-
- # Ordering fields including common ones
- o = OrderingFilter(fields=FINDING_ORDERING + (('port__host', 'host'), 'port', 'path', 'status', 'type'))
+ host = ModelChoiceFilter(queryset=Host.objects.all(), field_name="port__host")
class Meta:
- '''FilterSet metadata.'''
-
model = Path
- fields = FINDING_FILTERING.copy() # Common filtering fields
- fields.update({ # Include specific filtering fields
- 'port': ['exact', 'isnull'],
- 'port__host': ['exact'],
- 'port__host__address': ['exact', 'icontains'],
- 'port__host__os_type': ['exact'],
- 'port__port': ['exact'],
- 'path': ['exact', 'icontains'],
- 'status': ['exact'],
- 'type': ['exact'],
- })
+ fields = {
+ **FindingFilter.Meta.fields.copy(),
+ "port": ["exact"],
+ "path": ["exact", "icontains"],
+ "status": ["exact"],
+ "type": ["exact"],
+ }
class TechnologyFilter(FindingFilter):
- '''FilterSet to filter and sort Technology entities.'''
-
- # Ordering fields including common ones
- o = OrderingFilter(fields=FINDING_ORDERING + (('port__host', 'host'), 'port', 'name', 'version'))
+ host = ModelChoiceFilter(queryset=Host.objects.all(), field_name="port__host")
class Meta:
- '''FilterSet metadata.'''
-
model = Technology
- fields = FINDING_FILTERING.copy() # Common filtering fields
- fields.update({ # Include specific filtering fields
- 'port': ['exact', 'isnull'],
- 'port__host': ['exact'],
- 'port__host__address': ['exact', 'icontains'],
- 'port__host__os_type': ['exact'],
- 'port__port': ['exact'],
- 'name': ['exact', 'icontains'],
- 'version': ['exact', 'icontains'],
- 'related_to': ['exact'],
- })
+ fields = {
+ **FindingFilter.Meta.fields.copy(),
+ "port": ["exact"],
+ "name": ["exact", "icontains"],
+ "version": ["exact", "icontains"],
+ "description": ["exact", "icontains"],
+ "related_to": ["exact"],
+ }
class CredentialFilter(FindingFilter):
- '''FilterSet to filter and sort Credential entities.'''
-
- o = OrderingFilter(fields=FINDING_ORDERING + ('email', 'username')) # Ordering fields including common ones
+ port = ModelChoiceFilter(queryset=Port.objects.all(), field_name="technology__port")
+ host = ModelChoiceFilter(
+ queryset=Host.objects.all(), field_name="technology__port__host"
+ )
class Meta:
- '''FilterSet metadata.'''
-
model = Credential
- fields = FINDING_FILTERING.copy() # Common filtering fields
- fields.update({ # Include specific filtering fields
- 'technology': ['exact', 'isnull'],
- 'technology__port': ['exact', 'isnull'],
- 'technology__port__host': ['exact'],
- 'technology__port__host__address': ['exact', 'icontains'],
- 'technology__port__host__os_type': ['exact'],
- 'technology__port__port': ['exact'],
- 'technology__name': ['exact', 'icontains'],
- 'technology__version': ['exact', 'icontains'],
- 'email': ['exact', 'icontains'],
- 'username': ['exact', 'icontains'],
- })
+ fields = {
+ **FindingFilter.Meta.fields.copy(),
+ "technology": ["exact"],
+ "technology__name": ["exact", "icontains"],
+ "technology__version": ["exact", "icontains"],
+ "email": ["exact", "icontains"],
+ "username": ["exact", "icontains"],
+ "secret": ["exact", "icontains"],
+ }
-class VulnerabilityFilter(BaseVulnerabilityFilter):
- '''FilterSet to filter and sort Vulnerability entities.'''
-
- # Port field names to use in the filters
- port_fields: List[str] = ['technology__port', 'port']
- # Host field names to use in the filters
- host_fields: List[str] = ['technology__port__host', 'port__host']
- # Ordering fields including common ones
- o = OrderingFilter(fields=FINDING_ORDERING + ('port', 'technology', 'name', 'severity', 'cve'))
+class VulnerabilityFilter(FindingFilter):
+ port = MultipleNumberFilter(fields=["technology__port", "port"])
+ host = MultipleNumberFilter(fields=["technology__port__host", "port__host"])
class Meta:
- '''FilterSet metadata.'''
-
model = Vulnerability
- fields = FINDING_FILTERING.copy() # Common filtering fields
- fields.update({ # Include specific filtering fields
- 'port': ['isnull'],
- 'technology': ['exact', 'isnull'],
- 'technology__name': ['exact', 'icontains'],
- 'technology__version': ['exact', 'icontains'],
- 'name': ['exact', 'icontains'],
- 'description': ['exact', 'icontains'],
- 'severity': ['exact'],
- 'cve': ['exact', 'contains'],
- 'exploit': ['isnull']
- })
-
-
-class ExploitFilter(BaseVulnerabilityFilter):
- '''FilterSet to filter and sort Exploit entities.'''
-
- # Port field names to use in the filters
- port_fields: List[str] = [
- 'technology__port', 'vulnerability__port',
- 'vulnerability__technology__port'
- ]
- # Host field names to use in the filters
- host_fields: List[str] = [
- 'technology__port__host', 'vulnerability__port__host',
- 'vulnerability__technology__port__host'
- ]
- # Ordering fields including common ones
- o = OrderingFilter(fields=FINDING_ORDERING + ('vulnerability', 'technology', 'title', 'edb_id'))
+ fields = {
+ **FindingFilter.Meta.fields.copy(),
+ "technology": ["exact"],
+ "technology__name": ["exact", "icontains"],
+ "technology__version": ["exact", "icontains"],
+ "name": ["exact", "icontains"],
+ "description": ["exact", "icontains"],
+ "severity": ["exact"],
+ "cve": ["exact", "contains"],
+ "cwe": ["exact", "contains"],
+ "osvdb": ["exact", "contains"],
+ }
+
+
+class ExploitFilter(FindingFilter):
+ port = MultipleNumberFilter(
+ fields=[
+ "technology__port",
+ "vulnerability__port",
+ "vulnerability__technology__port",
+ ]
+ )
+ host = MultipleNumberFilter(
+ fields=[
+ "technology__port__host",
+ "vulnerability__port__host",
+ "vulnerability__technology__port__host",
+ ]
+ )
+ technology = MultipleNumberFilter(
+ fields=[
+ "technology",
+ "vulnerability__technology",
+ ]
+ )
+ technology__name = MultipleCharFilter(
+ fields=[
+ "technology__name",
+ "vulnerability__technology__name",
+ ]
+ )
+ technology__version = MultipleCharFilter(
+ fields=[
+ "technology__version",
+ "vulnerability__technology__version",
+ ]
+ )
class Meta:
- '''FilterSet metadata.'''
-
model = Exploit
- fields = FINDING_FILTERING.copy() # Common filtering fields
- fields.update({ # Include specific filtering fields
- 'vulnerability': ['exact', 'isnull'],
- 'vulnerability__name': ['exact', 'icontains'],
- 'vulnerability__severity': ['exact'],
- 'vulnerability__cve': ['exact', 'contains'],
- 'vulnerability__technology': ['exact'],
- 'vulnerability__technology__name': ['exact', 'icontains'],
- 'vulnerability__technology__version': ['exact', 'icontains'],
- 'technology': ['exact', 'isnull'],
- 'technology__name': ['exact', 'icontains'],
- 'technology__version': ['exact', 'icontains'],
- 'technology__port': ['exact'],
- 'technology__port__host': ['exact'],
- 'technology__port__host__address': ['exact', 'icontains'],
- 'technology__port__host__os_type': ['exact'],
- 'technology__port__port': ['exact'],
- 'title': ['exact', 'icontains'],
- 'edb_id': ['exact'],
- 'reference': ['exact', 'icontains'],
- })
+ fields = {
+ **FindingFilter.Meta.fields.copy(),
+ "vulnerability": ["exact", "isnull"],
+ "vulnerability__severity": ["exact"],
+ "vulnerability__cve": ["exact"],
+ "vulnerability__cwe": ["exact"],
+ "vulnerability__osvdb": ["exact"],
+ "title": ["exact", "icontains"],
+ "edb_id": ["exact"],
+ "reference": ["exact", "icontains"],
+ }
diff --git a/src/backend/executions/migrations/__init__.py b/src/backend/findings/framework/__init__.py
similarity index 100%
rename from src/backend/executions/migrations/__init__.py
rename to src/backend/findings/framework/__init__.py
diff --git a/src/backend/findings/framework/filters.py b/src/backend/findings/framework/filters.py
new file mode 100644
index 000000000..e9bca8bc1
--- /dev/null
+++ b/src/backend/findings/framework/filters.py
@@ -0,0 +1,34 @@
+from django_filters.filters import ModelChoiceFilter
+from findings.models import OSINT
+from framework.filters import MultipleFieldFilterSet
+from projects.models import Project
+from targets.models import Target
+from tasks.models import Task
+from tools.models import Tool
+from users.models import User
+
+
+class FindingFilter(MultipleFieldFilterSet):
+ tool = ModelChoiceFilter(
+ queryset=Tool.objects.all(), field_name="executions__configuration__tool"
+ )
+ task = ModelChoiceFilter(queryset=Task.objects.all(), field_name="executions__task")
+ target = ModelChoiceFilter(
+ queryset=Target.objects.all(), field_name="executions__task__target"
+ )
+ project = ModelChoiceFilter(
+ queryset=Project.objects.all(), field_name="executions__task__target__project"
+ )
+ executor = ModelChoiceFilter(
+ queryset=User.objects.all(), field_name="executions__task__executor"
+ )
+
+ class Meta:
+ model = OSINT # It's needed to define a non-abstract model as default. It will be overwritten
+ fields = {
+ "executions": ["exact"],
+ "first_seen": ["gte", "lte", "exact"],
+ "last_seen": ["gte", "lte", "exact"],
+ "triage_status": ["exact"],
+ "triage_comment": ["exact", "icontains"],
+ }
diff --git a/src/backend/findings/framework/models.py b/src/backend/findings/framework/models.py
new file mode 100644
index 000000000..b36cdb752
--- /dev/null
+++ b/src/backend/findings/framework/models.py
@@ -0,0 +1,37 @@
+from typing import Any, Dict
+
+from django.db import models
+from executions.models import Execution
+from findings.enums import TriageStatus
+from framework.models import BaseInput
+from security.input_validator import Regex, Validator
+
+
+class Finding(BaseInput):
+ executions = models.ManyToManyField(
+ Execution,
+ related_name="%(class)s",
+ )
+ first_seen = models.DateTimeField(auto_now_add=True)
+ last_seen = models.DateTimeField(auto_now=True)
+ triage_status = models.TextField(
+ max_length=15, choices=TriageStatus.choices, default=TriageStatus.UNTRIAGED
+ )
+ triage_comment = models.TextField(
+ max_length=300, validators=[Validator(Regex.TEXT.value, code="triage_comment")]
+ )
+ defect_dojo_id = models.IntegerField(blank=True, null=True)
+ unique_fields = []
+
+ class Meta:
+ abstract = True
+
+ def get_project(self) -> Any:
+ return self.executions.first().task.target.project
+
+ @classmethod
+ def get_project_field(cls) -> str:
+ return "executions__task__target__project"
+
+ def defect_dojo(self) -> Dict[str, Any]:
+ pass # pragma: no cover
diff --git a/src/backend/findings/framework/serializers.py b/src/backend/findings/framework/serializers.py
new file mode 100644
index 000000000..5774c05f3
--- /dev/null
+++ b/src/backend/findings/framework/serializers.py
@@ -0,0 +1,21 @@
+from findings.models import OSINT
+from rest_framework.serializers import ModelSerializer
+
+
+class FindingSerializer(ModelSerializer):
+ class Meta:
+ model = OSINT # It's needed to define a non-abstract model as default. It will be overwritten
+ fields = (
+ "id",
+ "executions",
+ "first_seen",
+ "last_seen",
+ "triage_status",
+ "triage_comment",
+ )
+
+
+class TriageFindingSerializer(ModelSerializer):
+ class Meta:
+ model = OSINT # It's needed to define a non-abstract model as default. It will be overwritten
+ fields = ("id", "triage_status", "triage_comment")
diff --git a/src/backend/findings/framework/views.py b/src/backend/findings/framework/views.py
new file mode 100644
index 000000000..c98e43eba
--- /dev/null
+++ b/src/backend/findings/framework/views.py
@@ -0,0 +1,25 @@
+from framework.views import BaseViewSet
+from rest_framework.permissions import IsAuthenticated
+from rest_framework.serializers import Serializer
+from security.authorization.permissions import (
+ ProjectMemberPermission,
+ RekonoModelPermission,
+)
+
+
+class FindingViewSet(BaseViewSet):
+ triage_serializer_class = None
+ permission_classes = [
+ IsAuthenticated,
+ RekonoModelPermission,
+ ProjectMemberPermission,
+ ]
+ http_method_names = [
+ "get",
+ "put",
+ ]
+
+ def get_serializer_class(self) -> Serializer:
+ if self.request.method == "PUT":
+ return self.triage_serializer_class
+ return super().get_serializer_class()
diff --git a/src/backend/findings/migrations/0001_initial.py b/src/backend/findings/migrations/0001_initial.py
deleted file mode 100644
index 847547f95..000000000
--- a/src/backend/findings/migrations/0001_initial.py
+++ /dev/null
@@ -1,180 +0,0 @@
-# Generated by Django 3.2.13 on 2022-04-24 15:14
-
-from django.db import migrations, models
-import django.db.models.deletion
-import input_types.base
-
-
-class Migration(migrations.Migration):
-
- initial = True
-
- dependencies = [
- ('tools', '0002_initial'),
- ('executions', '0001_initial'),
- ]
-
- operations = [
- migrations.CreateModel(
- name='Host',
- fields=[
- ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
- ('first_seen', models.DateTimeField(auto_now_add=True)),
- ('last_seen', models.DateTimeField(auto_now_add=True)),
- ('is_active', models.BooleanField(default=True)),
- ('address', models.TextField(max_length=30)),
- ('os', models.TextField(blank=True, max_length=250, null=True)),
- ('os_type', models.TextField(choices=[('Linux', 'Linux'), ('Windows', 'Windows'), ('MacOS', 'Macos'), ('iOS', 'Ios'), ('Android', 'Android'), ('Solaris', 'Solaris'), ('FreeBSD', 'Freebsd'), ('Other', 'Other')], default='Other', max_length=10)),
- ('detected_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='tools.tool')),
- ('executions', models.ManyToManyField(related_name='host', to='executions.Execution')),
- ],
- options={
- 'abstract': False,
- },
- bases=(models.Model, input_types.base.BaseInput),
- ),
- migrations.CreateModel(
- name='Port',
- fields=[
- ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
- ('first_seen', models.DateTimeField(auto_now_add=True)),
- ('last_seen', models.DateTimeField(auto_now_add=True)),
- ('is_active', models.BooleanField(default=True)),
- ('port', models.IntegerField()),
- ('status', models.TextField(choices=[('Open', 'Open'), ('Open - Filtered', 'Open Filtered'), ('Filtered', 'Filtered'), ('Closed', 'Closed')], default='Open', max_length=15)),
- ('protocol', models.TextField(blank=True, choices=[('UDP', 'Udp'), ('TCP', 'Tcp')], max_length=5, null=True)),
- ('service', models.TextField(blank=True, max_length=50, null=True)),
- ('detected_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='tools.tool')),
- ('executions', models.ManyToManyField(related_name='port', to='executions.Execution')),
- ('host', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='port', to='findings.host')),
- ],
- options={
- 'abstract': False,
- },
- bases=(models.Model, input_types.base.BaseInput),
- ),
- migrations.CreateModel(
- name='Technology',
- fields=[
- ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
- ('first_seen', models.DateTimeField(auto_now_add=True)),
- ('last_seen', models.DateTimeField(auto_now_add=True)),
- ('is_active', models.BooleanField(default=True)),
- ('name', models.TextField(max_length=100)),
- ('version', models.TextField(blank=True, max_length=100, null=True)),
- ('description', models.TextField(blank=True, max_length=200, null=True)),
- ('reference', models.TextField(blank=True, max_length=250, null=True)),
- ('detected_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='tools.tool')),
- ('executions', models.ManyToManyField(related_name='technology', to='executions.Execution')),
- ('port', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='technology', to='findings.port')),
- ('related_to', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='related_technologies', to='findings.technology')),
- ],
- options={
- 'abstract': False,
- },
- bases=(models.Model, input_types.base.BaseInput),
- ),
- migrations.CreateModel(
- name='Vulnerability',
- fields=[
- ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
- ('first_seen', models.DateTimeField(auto_now_add=True)),
- ('last_seen', models.DateTimeField(auto_now_add=True)),
- ('is_active', models.BooleanField(default=True)),
- ('name', models.TextField(max_length=50)),
- ('description', models.TextField(blank=True, null=True)),
- ('severity', models.TextField(choices=[('Info', 'Info'), ('Low', 'Low'), ('Medium', 'Medium'), ('High', 'High'), ('Critical', 'Critical')], default='Medium')),
- ('cve', models.TextField(blank=True, max_length=20, null=True)),
- ('cwe', models.TextField(blank=True, max_length=20, null=True)),
- ('osvdb', models.TextField(blank=True, max_length=20, null=True)),
- ('reference', models.TextField(blank=True, max_length=250, null=True)),
- ('detected_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='tools.tool')),
- ('executions', models.ManyToManyField(related_name='vulnerability', to='executions.Execution')),
- ('port', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='vulnerability', to='findings.port')),
- ('technology', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='vulnerability', to='findings.technology')),
- ],
- options={
- 'abstract': False,
- },
- bases=(models.Model, input_types.base.BaseInput),
- ),
- migrations.CreateModel(
- name='Path',
- fields=[
- ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
- ('first_seen', models.DateTimeField(auto_now_add=True)),
- ('last_seen', models.DateTimeField(auto_now_add=True)),
- ('is_active', models.BooleanField(default=True)),
- ('path', models.TextField(max_length=500)),
- ('status', models.IntegerField(blank=True, null=True)),
- ('extra', models.TextField(blank=True, max_length=100, null=True)),
- ('type', models.TextField(choices=[('ENDPOINT', 'Endpoint'), ('SHARE', 'Share')], default='ENDPOINT')),
- ('detected_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='tools.tool')),
- ('executions', models.ManyToManyField(related_name='path', to='executions.Execution')),
- ('port', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='path', to='findings.port')),
- ],
- options={
- 'abstract': False,
- },
- bases=(models.Model, input_types.base.BaseInput),
- ),
- migrations.CreateModel(
- name='OSINT',
- fields=[
- ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
- ('first_seen', models.DateTimeField(auto_now_add=True)),
- ('last_seen', models.DateTimeField(auto_now_add=True)),
- ('is_active', models.BooleanField(default=True)),
- ('data', models.TextField(max_length=250)),
- ('data_type', models.TextField(choices=[('IP', 'Ip'), ('Domain', 'Domain'), ('Url', 'Url'), ('Email', 'Email'), ('Link', 'Link'), ('ASN', 'Asn'), ('Username', 'User'), ('Password', 'Password')], max_length=10)),
- ('source', models.TextField(blank=True, max_length=50, null=True)),
- ('reference', models.TextField(blank=True, max_length=250, null=True)),
- ('detected_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='tools.tool')),
- ('executions', models.ManyToManyField(related_name='osint', to='executions.Execution')),
- ],
- options={
- 'abstract': False,
- },
- bases=(models.Model, input_types.base.BaseInput),
- ),
- migrations.CreateModel(
- name='Exploit',
- fields=[
- ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
- ('first_seen', models.DateTimeField(auto_now_add=True)),
- ('last_seen', models.DateTimeField(auto_now_add=True)),
- ('is_active', models.BooleanField(default=True)),
- ('title', models.TextField(max_length=100)),
- ('edb_id', models.IntegerField(blank=True, null=True)),
- ('reference', models.TextField(blank=True, max_length=250, null=True)),
- ('detected_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='tools.tool')),
- ('executions', models.ManyToManyField(related_name='exploit', to='executions.Execution')),
- ('technology', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='exploit', to='findings.technology')),
- ('vulnerability', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='exploit', to='findings.vulnerability')),
- ],
- options={
- 'abstract': False,
- },
- bases=(models.Model, input_types.base.BaseInput),
- ),
- migrations.CreateModel(
- name='Credential',
- fields=[
- ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
- ('first_seen', models.DateTimeField(auto_now_add=True)),
- ('last_seen', models.DateTimeField(auto_now_add=True)),
- ('is_active', models.BooleanField(default=True)),
- ('email', models.TextField(blank=True, max_length=100, null=True)),
- ('username', models.TextField(blank=True, max_length=100, null=True)),
- ('secret', models.TextField(blank=True, max_length=300, null=True)),
- ('context', models.TextField(blank=True, max_length=300, null=True)),
- ('detected_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='tools.tool')),
- ('executions', models.ManyToManyField(related_name='credential', to='executions.Execution')),
- ('technology', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='technology', to='findings.technology')),
- ],
- options={
- 'abstract': False,
- },
- bases=(models.Model, input_types.base.BaseInput),
- ),
- ]
diff --git a/src/backend/findings/migrations/0002_alter_osint_data_type.py b/src/backend/findings/migrations/0002_alter_osint_data_type.py
deleted file mode 100644
index 8f17793ce..000000000
--- a/src/backend/findings/migrations/0002_alter_osint_data_type.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# Generated by Django 3.2.16 on 2023-01-05 15:42
-
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('findings', '0001_initial'),
- ]
-
- operations = [
- migrations.AlterField(
- model_name='osint',
- name='data_type',
- field=models.TextField(choices=[('IP', 'Ip'), ('Domain', 'Domain'), ('VHOST', 'Vhost'), ('Url', 'Url'), ('Email', 'Email'), ('Link', 'Link'), ('ASN', 'Asn'), ('Username', 'User'), ('Password', 'Password')], max_length=10),
- ),
- ]
diff --git a/src/backend/findings/models.py b/src/backend/findings/models.py
index c79df9d2f..976a42c83 100644
--- a/src/backend/findings/models.py
+++ b/src/backend/findings/models.py
@@ -1,653 +1,445 @@
-from typing import Any, Dict, List, Union, cast
+from typing import Any, Dict
-from defectdojo.constants import DD_DATE_FORMAT
from django.db import models
-from executions.models import Execution
-from input_types.base import BaseInput
-from input_types.enums import InputKeyword
-from input_types.utils import get_url
-from projects.models import Project
+from findings.enums import (
+ HostOS,
+ OSINTDataType,
+ PathType,
+ PortStatus,
+ Protocol,
+ Severity,
+)
+from findings.framework.models import Finding
+from framework.enums import InputKeyword
+from platforms.defect_dojo.models import DefectDojoSettings
+from target_ports.models import TargetPort
from targets.enums import TargetType
-from targets.utils import get_target_type
-from tools.models import Input, Tool
-
-from findings.enums import (DataType, OSType, PathType, PortStatus, Protocol,
- Severity)
-from findings.utils import get_unique_filter
+from targets.models import Target
# Create your models here.
-def create_finding_foreign_key(model: Union[models.Model, str], name: str) -> models.ForeignKey:
- '''Create a foreign key field to create a relationship between two Finding models.
-
- Args:
- model (Union[models.Model, str]): Finding model of the foreign key
- name (str): Related name of the foreign key
-
- Returns:
- models.ForeignKey: Foreign key field
- '''
- return models.ForeignKey(model, related_name=name, on_delete=models.DO_NOTHING, blank=True, null=True)
-
-
-class Finding(models.Model, BaseInput):
- '''Common and abstract Finding model, to define the common fields for all Finding models.'''
-
- # Execution where the finding is found
- executions = models.ManyToManyField(Execution, related_name='%(class)s')
- detected_by = models.ForeignKey(Tool, on_delete=models.SET_NULL, blank=True, null=True)
- first_seen = models.DateTimeField(auto_now_add=True) # First date when the finding appear
- last_seen = models.DateTimeField(auto_now_add=True) # Last date when the finding appear
- is_active = models.BooleanField(default=True) # Indicate if the finding is active
-
- key_fields: List[Dict[str, Any]] = [] # Unique field list
-
- class Meta:
- '''Model metadata.'''
-
- abstract = True # To be extended by Finding models
-
- def __hash__(self) -> int:
- '''Get an unique value based on the object unique fields.
-
- Returns:
- int: Calculated unique value
- '''
- hash_fields = []
- # Get unique filter from key fields
- unique_filter = get_unique_filter(self.key_fields, vars(self), self.executions.first().task.target)
- for value in unique_filter.values():
- hash_fields.append(value) # Add values to the calculation
- return hash(tuple(hash_fields)) # Hash calculation
-
- def __eq__(self, o: object) -> bool:
- '''Check if other object is equals to this object.
-
- Args:
- o (object): Other object to compare
-
- Returns:
- bool: Indicate if both objects are equal or not
- '''
- if isinstance(o, self.__class__): # Check object class
- equals = True
- # Get object unique filter from object key fields
- other_filter = get_unique_filter(o.key_fields, vars(o), o.executions.first().task.target)
- self_filter = get_unique_filter(self.key_fields, vars(self), self.executions.first().task.target)
- # Get unique filter from key fields
- for key, value in self_filter.items():
- equals = equals and (other_filter.get(key) == value) # Compare all key fields
- return equals
- return False
-
- def get_project(self) -> Project:
- '''Get the related project for the instance. This will be used for authorization purposes.
-
- Returns:
- Project: Related project entity
- '''
- return self.executions.first().task.target.project
-
-
class OSINT(Finding):
- '''OSINT model.'''
-
- data = models.TextField(max_length=250) # OSINT data found
- data_type = models.TextField(max_length=10, choices=DataType.choices) # OSINT data type
- source = models.TextField(max_length=50, blank=True, null=True) # Source where data has been found
- reference = models.TextField(max_length=250, blank=True, null=True) # Reference associated to the data
+ data = models.TextField(max_length=250)
+ data_type = models.TextField(max_length=10, choices=OSINTDataType.choices)
+ source = models.TextField(max_length=50, blank=True, null=True)
+ reference = models.TextField(max_length=250, blank=True, null=True)
- key_fields: List[Dict[str, Any]] = [ # Unique field list
- {'name': 'data', 'is_base': False},
- {'name': 'data_type', 'is_base': False}
- ]
+ unique_fields = ["data", "data_type"]
def parse(self, accumulated: Dict[str, Any] = {}) -> Dict[str, Any]:
- '''Get useful information from this instance to be used in tool execution as argument.
-
- Args:
- accumulated (Dict[str, Any], optional): Information from other instances of the same type. Defaults to {}.
-
- Returns:
- Dict[str, Any]: Useful information for tool executions, including accumulated if setted
- '''
- if self.data_type in [DataType.IP, DataType.DOMAIN]:
- return {
+ return (
+ {
InputKeyword.TARGET.name.lower(): self.data,
InputKeyword.HOST.name.lower(): self.data,
- InputKeyword.URL.name.lower(): get_url(self.data)
+ InputKeyword.URL.name.lower(): self._get_url(self.data),
}
- return {}
+ if self.data_type in [OSINTDataType.IP, OSINTDataType.DOMAIN]
+ else {}
+ )
def defect_dojo(self) -> Dict[str, Any]:
- '''Get useful information to import this finding in Defect-Dojo.
-
- Returns:
- Dict[str, Any]: Useful information for Defect-Dojo imports
- '''
return {
- 'title': f'{self.data_type} found using OSINT techniques',
- 'description': self.data,
- 'severity': str(Severity.MEDIUM),
- 'date': self.last_seen.strftime(DD_DATE_FORMAT)
+ "title": f"{self.data_type} found using OSINT techniques",
+ "description": self.data,
+ "severity": Severity.MEDIUM,
+ "date": self.last_seen.strftime(
+ DefectDojoSettings.objects.first().date_format
+ ),
}
def __str__(self) -> str:
- '''Instance representation in text format.
-
- Returns:
- str: String value that identifies this instance
- '''
return self.data
class Host(Finding):
- '''Host model.'''
-
- address = models.TextField(max_length=30) # Host address
- os = models.TextField(max_length=250, blank=True, null=True) # OS full specification
- os_type = models.TextField(max_length=10, choices=OSType.choices, default=OSType.OTHER) # OS categorization
-
- key_fields: List[Dict[str, Any]] = [ # Unique field list
- {'name': 'address', 'is_base': False}
- ]
-
- def filter(self, input: Input) -> bool:
- '''Check if this instance is valid based on input filter.
+ address = models.TextField(max_length=30)
+ # OS full specification
+ os = models.TextField(max_length=250, blank=True, null=True)
+ os_type = models.TextField(
+ max_length=10, choices=HostOS.choices, default=HostOS.OTHER
+ )
- Args:
- input (Input): Tool input whose filter will be applied
-
- Returns:
- bool: Indicate if this instance match the input filter or not
- '''
- if not input.filter:
- return True
- try:
- distinct = input.filter[0] == '!'
- filter_types = [
- cast(models.TextChoices, TargetType)[f.upper()] for f in input.filter.replace('!', '').split(',s')
- ]
- host_type = get_target_type(self.address)
- return host_type not in filter_types if distinct else host_type in filter_types
- except KeyError:
- return True
+ unique_fields = ["address"]
+ filters = [Finding.Filter(TargetType, "address", lambda a: Target.get_type(a))]
def parse(self, accumulated: Dict[str, Any] = {}) -> Dict[str, Any]:
- '''Get useful information from this instance to be used in tool execution as argument.
-
- Args:
- accumulated (Dict[str, Any], optional): Information from other instances of the same type. Defaults to {}.
-
- Returns:
- Dict[str, Any]: Useful information for tool executions, including accumulated if setted
- '''
return {
InputKeyword.TARGET.name.lower(): self.address,
InputKeyword.HOST.name.lower(): self.address,
- InputKeyword.URL.name.lower(): get_url(self.address),
+ InputKeyword.URL.name.lower(): self._get_url(self.address),
}
def defect_dojo(self) -> Dict[str, Any]:
- '''Get useful information to import this finding in Defect-Dojo.
-
- Returns:
- Dict[str, Any]: Useful information for Defect-Dojo imports
- '''
- description = self.address
- if self.os:
- description += '- {self.os} ({self.os_type})'
return {
- 'title': 'Host discovered',
- 'description': description,
- 'severity': str(Severity.INFO),
- 'date': self.last_seen.strftime(DD_DATE_FORMAT)
+ "title": "Host discovered",
+ "description": " - ".join(
+ [field for field in [self.address, self.os_type] if field]
+ ),
+ "severity": Severity.INFO,
+ "date": self.last_seen.strftime(
+ DefectDojoSettings.objects.first().date_format
+ ),
}
def __str__(self) -> str:
- '''Instance representation in text format.
-
- Returns:
- str: String value that identifies this instance
- '''
return self.address
class Port(Finding):
- '''Port model.'''
-
- host = create_finding_foreign_key(Host, 'port') # Host where the port is discovered
- port = models.IntegerField() # Port number
- status = models.TextField(max_length=15, choices=PortStatus.choices, default=PortStatus.OPEN) # Port status
- protocol = models.TextField(max_length=5, choices=Protocol.choices, blank=True, null=True) # Transport protocol
- service = models.TextField(max_length=50, blank=True, null=True) # Service protocol if found
-
- key_fields: List[Dict[str, Any]] = [ # Unique field list
- {'name': 'host_id', 'is_base': True},
- {'name': 'port', 'is_base': False},
- {'name': 'protocol', 'is_base': False},
+ host = models.ForeignKey(
+ Host, related_name="port", on_delete=models.DO_NOTHING, blank=True, null=True
+ )
+ port = models.IntegerField() # Port number
+ status = models.TextField(
+ max_length=15, choices=PortStatus.choices, default=PortStatus.OPEN
+ )
+ protocol = models.TextField(
+ max_length=5, choices=Protocol.choices, blank=True, null=True
+ )
+ service = models.TextField(max_length=50, blank=True, null=True)
+
+ unique_fields = ["host", "port", "protocol"]
+ filters = [
+ Finding.Filter(int, "port"),
+ Finding.Filter(str, "service", contains=True, processor=lambda s: s.lower()),
]
- def filter(self, input: Input) -> bool:
- '''Check if this instance is valid based on input filter.
-
- Args:
- input (Input): Tool input whose filter will be applied
-
- Returns:
- bool: Indicate if this instance match the input filter or not
- '''
- if not input.filter:
- return True
- try:
- to_check = int(input.filter)
- # If the filter is a number, will be filtered by port
- return to_check == self.port
- except ValueError:
- # If the filter is a string, will be filtered by service
- return input.filter.lower() in self.service.lower()
-
def parse(self, accumulated: Dict[str, Any] = {}) -> Dict[str, Any]:
- '''Get useful information from this instance to be used in tool execution as argument.
-
- Args:
- accumulated (Dict[str, Any], optional): Information from other instances of the same type. Defaults to {}.
-
- Returns:
- Dict[str, Any]: Useful information for tool executions, including accumulated if setted
- '''
+ ports = (
+ [self.port]
+ if not accumulated
+ else accumulated.get(InputKeyword.PORTS.name.lower(), []) + [self.port]
+ )
output = {
- InputKeyword.TARGET.name.lower(): f'{self.host.address}:{self.port}',
- InputKeyword.HOST.name.lower(): self.host.address,
InputKeyword.PORT.name.lower(): self.port,
- InputKeyword.PORTS.name.lower(): [self.port],
- InputKeyword.URL.name.lower(): get_url(self.host.address, self.port),
+ InputKeyword.PORTS.name.lower(): ports,
+ InputKeyword.PORTS_COMMAS.name.lower(): ",".join([str(p) for p in ports]),
}
- if accumulated and InputKeyword.PORTS.name.lower() in accumulated:
- output[InputKeyword.PORTS.name.lower()] = accumulated[InputKeyword.PORTS.name.lower()]
- output[InputKeyword.PORTS.name.lower()].append(self.port)
- output[InputKeyword.PORTS_COMMAS.name.lower()] = ','.join([str(p) for p in output[InputKeyword.PORTS.name.lower()]]) # noqa: E501
+ if self.host:
+ output.update(
+ {
+ InputKeyword.TARGET.name.lower(): f"{self.host.address}:{self.port}",
+ InputKeyword.HOST.name.lower(): self.host.address,
+ InputKeyword.URL.name.lower(): self._get_url(
+ self.host.address, self.port
+ ),
+ }
+ )
return output
def defect_dojo(self) -> Dict[str, Any]:
- '''Get useful information to import this finding in Defect-Dojo.
-
- Returns:
- Dict[str, Any]: Useful information for Defect-Dojo imports
- '''
- description = f'{self.port} - {self.status} - {self.protocol} - {self.service}'
- if self.host:
- description = f'{self.host.address} - {description}'
+ description = f"Port: {self.port}\nStatus: {self.status}\nProtocol: {self.protocol}\nService: {self.service}"
return {
- 'title': 'Port discovered',
- 'description': description,
- 'severity': str(Severity.INFO),
- 'date': self.last_seen.strftime(DD_DATE_FORMAT)
+ "title": "Port discovered",
+ "description": f"Host: {self.host.address}\n{description}"
+ if self.host
+ else description,
+ "severity": Severity.INFO,
+ "date": self.last_seen.strftime(
+ DefectDojoSettings.objects.first().date_format
+ ),
}
def __str__(self) -> str:
- '''Instance representation in text format.
-
- Returns:
- str: String value that identifies this instance
- '''
- return f'{self.host.__str__()} - {self.port}' if self.host else str(self.port)
+ return f"{f'{self.host.__str__()} - ' if self.host else ''}{self.port}"
class Path(Finding):
- '''Path model.'''
-
- port = create_finding_foreign_key(Port, 'path') # Port where path is discovered
- path = models.TextField(max_length=500) # Path value
- # Status receive for that path. Probably HTTP status
+ port = models.ForeignKey(
+ Port, related_name="path", on_delete=models.DO_NOTHING, blank=True, null=True
+ )
+ path = models.TextField(max_length=500)
+ # Status received for that path. Probably HTTP status
status = models.IntegerField(blank=True, null=True)
- extra = models.TextField(max_length=100, blank=True, null=True) # Extra information related to the path
+ extra_info = models.TextField(max_length=100, blank=True, null=True)
# Path type depending on the protocol where it's found
type = models.TextField(choices=PathType.choices, default=PathType.ENDPOINT)
- key_fields: List[Dict[str, Any]] = [ # Unique field list
- {'name': 'port_id', 'is_base': True},
- {'name': 'path', 'is_base': False}
+ unique_fields = ["port", "path"]
+ filters = [
+ Finding.Filter(PathType, "type"),
+ Finding.Filter(int, "status"),
+ Finding.Filter(str, "path", contains=True, processor=lambda p: p.lower()),
]
- def filter(self, input: Input) -> bool:
- '''Check if this instance is valid based on input filter.
-
- Args:
- input (Input): Tool input whose filter will be applied
-
- Returns:
- bool: Indicate if this instance match the input filter or not
- '''
- if not input.filter:
- return True
- try:
- # If filter is a valid severity, vulnerability will be filtered by severity
- return cast(models.TextChoices, PathType)[input.filter.upper()] == self.type
- except KeyError:
- try:
- status_code = int(input.filter)
- # If the filter is a number, will be filtered by status
- return status_code == self.status
- except ValueError:
- # If the filter is a string, will be filtered by path
- return input.filter in self.path
+ def _clean_comparison_path(self, value: str) -> str:
+ if len(value) > 1:
+ value = self._clean_path(value)
+ if value[-1] != "/":
+ value += "/"
+ return value
+
+ def filter(self, input: Any, target: Target = None) -> bool:
+ filter = super().filter(input, target)
+ if self.port:
+ target_port = TargetPort.objects.filter(
+ target=target, port=self.port.port
+ ).first()
+ if target_port and target_port.path:
+ filter = filter and self._clean_comparison_path(self.path).startswith(
+ self._clean_comparison_path(target_port.path)
+ )
+ return filter
def parse(self, accumulated: Dict[str, Any] = {}) -> Dict[str, Any]:
- '''Get useful information from this instance to be used in tool execution as argument.
-
- Args:
- accumulated (Dict[str, Any], optional): Information from other instances of the same type. Defaults to {}.
+ path = self._clean_path(self.path)
+ output = (
+ {
+ **self.port.parse(accumulated),
+ InputKeyword.URL.name.lower(): self._get_url(
+ self.port.host.address, self.port.port, path
+ ),
+ }
+ if self.port
+ else {}
+ )
+ return {
+ **output,
+ InputKeyword.ENDPOINT.name.lower(): path,
+ }
- Returns:
- Dict[str, Any]: Useful information for tool executions, including accumulated if setted
- '''
- output = self.port.parse() if self.port else {}
- if self.type == PathType.ENDPOINT:
- output[InputKeyword.URL.name.lower()] = get_url(
- self.port.host.address,
- self.port.port,
- self.path
- )
- output[InputKeyword.ENDPOINT.name.lower()] = self.path
- return output
+ def defect_dojo_endpoint(self, target: Target) -> Dict[str, Any]:
+ return {
+ "protocol": self.port.service if self.port else None,
+ "host": self.port.host.address
+ if self.port and self.port.host
+ else target.target,
+ "port": self.port.port if self.port else None,
+ "path": self.path,
+ }
def defect_dojo(self) -> Dict[str, Any]:
- '''Get useful information to import this finding in Defect-Dojo.
-
- Returns:
- Dict[str, Any]: Useful information for Defect-Dojo imports
- '''
+ description = f"Path: {self.path}\nType: {self.type}"
+ for key, value in [("Status", self.status), ("Info", self.extra_info)]:
+ if value:
+ description = f"{description}\n{key}: {value}"
+ if self.port:
+ description = f"Port: {self.port.port}\n{description}"
+ if self.port.host:
+ description = f"Host: {self.port.host.address}\n{description}"
return {
- 'protocol': self.port.service if self.port else None,
- 'host': self.port.host.address if self.port else None,
- 'port': self.port.port if self.port else None,
- 'path': self.path
+ "title": "Path discovered",
+ "description": description,
+ "severity": Severity.INFO,
+ "date": self.last_seen.strftime(
+ DefectDojoSettings.objects.first().date_format
+ ),
}
def __str__(self) -> str:
- '''Instance representation in text format.
-
- Returns:
- str: String value that identifies this instance
- '''
- return f'{self.port.__str__()} - {self.path}' if self.port else self.path
+ return f"{f'{self.port.__str__()} - ' if self.port else ''}{self.path}"
class Technology(Finding):
- '''Technology model.'''
-
- port = create_finding_foreign_key(Port, 'technology') # Port where technology is discovered
- name = models.TextField(max_length=100) # Technology name
- version = models.TextField(max_length=100, blank=True, null=True) # Technology version
- description = models.TextField(max_length=200, blank=True, null=True) # Technology description
- related_to = create_finding_foreign_key('Technology', 'related_technologies') # Related technology if exists
- reference = models.TextField(max_length=250, blank=True, null=True) # Technology reference
-
- key_fields: List[Dict[str, Any]] = [ # Unique field list
- {'name': 'port_id', 'is_base': True},
- {'name': 'name', 'is_base': False}
+ port = models.ForeignKey(
+ Port,
+ related_name="technology",
+ on_delete=models.DO_NOTHING,
+ blank=True,
+ null=True,
+ )
+ name = models.TextField(max_length=100)
+ version = models.TextField(max_length=100, blank=True, null=True)
+ description = models.TextField(max_length=200, blank=True, null=True)
+ related_to = models.ForeignKey(
+ "Technology",
+ related_name="related_technologies",
+ on_delete=models.DO_NOTHING,
+ blank=True,
+ null=True,
+ )
+ reference = models.TextField(max_length=250, blank=True, null=True)
+
+ unique_fields = ["port", "name", "version"]
+ filters = [
+ Finding.Filter(str, "name", contains=True, processor=lambda n: n.lower())
]
- def filter(self, input: Input) -> bool:
- '''Check if this instance is valid based on input filter.
-
- Args:
- input (Input): Tool input whose filter will be applied
-
- Returns:
- bool: Indicate if this instance match the input filter or not
- '''
- return not input.filter or input.filter.lower() in self.name.lower() # Filter by technology name
-
def parse(self, accumulated: Dict[str, Any] = {}) -> Dict[str, Any]:
- '''Get useful information from this instance to be used in tool execution as argument.
+ """Get useful information from this instance to be used in tool execution as argument.
Args:
accumulated (Dict[str, Any], optional): Information from other instances of the same type. Defaults to {}.
Returns:
Dict[str, Any]: Useful information for tool executions, including accumulated if setted
- '''
- output = self.port.parse() if self.port else {}
- output[InputKeyword.TECHNOLOGY.name.lower()] = self.name
+ """
+ output = {InputKeyword.TECHNOLOGY.name.lower(): self.name}
if self.version:
- output[InputKeyword.VERSION.name.lower()] = self.version
+ output.update({InputKeyword.VERSION.name.lower(): self.version})
+ if self.port:
+ output.update(self.port.parse(accumulated))
return output
def defect_dojo(self) -> Dict[str, Any]:
- '''Get useful information to import this finding in Defect-Dojo.
-
- Returns:
- Dict[str, Any]: Useful information for Defect-Dojo imports
- '''
+ description = f"Technology: {self.name}\nVersion: {self.version}"
return {
- 'title': f'Technology {self.name} detected',
- 'description': self.description if self.description else f'{self.name} {self.version}',
- 'severity': str(Severity.LOW),
- 'cwe': 200, # CWE-200: Exposure of Sensitive Information to Unauthorized Actor
- 'references': self.reference,
- 'date': self.last_seen.strftime(DD_DATE_FORMAT)
+ "title": f"Technology {self.name} detected",
+ "description": f"{description}\nDetails: {self.description}"
+ if self.description
+ else description,
+ "severity": Severity.LOW,
+ "cwe": 200, # CWE-200: Exposure of Sensitive Information to Unauthorized Actor
+ "references": self.reference,
+ "date": self.last_seen.strftime(
+ DefectDojoSettings.objects.first().date_format
+ ),
}
def __str__(self) -> str:
- '''Instance representation in text format.
-
- Returns:
- str: String value that identifies this instance
- '''
- return f'{self.port.__str__()} - {self.name}' if self.port else self.name
+ return f"{f'{self.port.__str__()} - ' if self.port else ''}{self.name}"
class Credential(Finding):
- '''Credential model.'''
-
- # Technology where credentials are discovered
- technology = create_finding_foreign_key(Technology, 'technology')
- email = models.TextField(max_length=100, blank=True, null=True) # Email if found
- username = models.TextField(max_length=100, blank=True, null=True) # Username if found
- secret = models.TextField(max_length=300, blank=True, null=True) # Secret (password, key, etc.) if found
- context = models.TextField(max_length=300, blank=True, null=True) # Context information about credential
-
- key_fields: List[Dict[str, Any]] = [ # Unique field list
- {'name': 'email', 'is_base': False},
- {'name': 'username', 'is_base': False},
- {'name': 'secret', 'is_base': False}
- ]
+ """Credential model."""
+
+ technology = models.ForeignKey(
+ Technology,
+ related_name="credential",
+ on_delete=models.DO_NOTHING,
+ blank=True,
+ null=True,
+ )
+ email = models.TextField(max_length=100, blank=True, null=True)
+ username = models.TextField(max_length=100, blank=True, null=True)
+ # Secret (password, key, etc.) if found
+ secret = models.TextField(max_length=300, blank=True, null=True)
+ context = models.TextField(max_length=300, blank=True, null=True)
+
+ unique_fields = ["technology", "email", "username", "secret"]
def parse(self, accumulated: Dict[str, Any] = {}) -> Dict[str, Any]:
- '''Get useful information from this instance to be used in tool execution as argument.
-
- Args:
- accumulated (Dict[str, Any], optional): Information from other instances of the same type. Defaults to {}.
-
- Returns:
- Dict[str, Any]: Useful information for tool executions, including accumulated if setted
- '''
- return {
- InputKeyword.EMAIL.name.lower(): self.email,
- InputKeyword.USERNAME.name.lower(): self.username,
- InputKeyword.SECRET.name.lower(): self.secret,
- }
+ output = self.technology.parse(accumulated) if self.technology else {}
+ for key, field in [
+ (InputKeyword.EMAIL.name.lower(), self.email),
+ (InputKeyword.USERNAME.name.lower(), self.username),
+ (InputKeyword.SECRET.name.lower(), self.secret),
+ ]:
+ if field:
+ output[key] = field
+ return output
def defect_dojo(self) -> Dict[str, Any]:
- '''Get useful information to import this finding in Defect-Dojo.
-
- Returns:
- Dict[str, Any]: Useful information for Defect-Dojo imports
- '''
- description = ' - '.join([getattr(self, f) for f in ['email', 'username', 'secret']])
return {
- 'title': 'Credentials exposure',
- 'description': description,
- 'cwe': 200, # CWE-200: Exposure of Sensitive Information to Unauthorized Actor
- 'severity': str(Severity.HIGH),
- 'date': self.last_seen.strftime(DD_DATE_FORMAT)
+ "title": "Credentials exposure",
+ "description": " - ".join(
+ [field for field in [self.email, self.username, self.secret] if field]
+ ),
+ "cwe": 200, # CWE-200: Exposure of Sensitive Information to Unauthorized Actor
+ "severity": Severity.HIGH,
+ "date": self.last_seen.strftime(
+ DefectDojoSettings.objects.first().date_format
+ ),
}
def __str__(self) -> str:
- '''Instance representation in text format.
-
- Returns:
- str: String value that identifies this instance
- '''
- text = f'{self.email} - {self.username} - {self.secret}'
- if self.technology:
- text = f'{self.technology.__str__()} - {text}'
- return text
+ values = [self.technology.__str__()] if self.technology else []
+ values += [field for field in [self.email, self.username, self.secret] if field]
+ return " - ".join(values)
class Vulnerability(Finding):
- '''Vulnerability model.'''
-
- # Technology where vulnerability is found
- technology = create_finding_foreign_key(Technology, 'vulnerability')
- # Port where vulnerability is found. Only if technology is null
- port = create_finding_foreign_key(Port, 'vulnerability')
- name = models.TextField(max_length=50) # Vulnerability name
- description = models.TextField(blank=True, null=True) # Vulnerability description
- severity = models.TextField(choices=Severity.choices, default=Severity.MEDIUM) # Vulnerability severity
- cve = models.TextField(max_length=20, blank=True, null=True) # CVE
- cwe = models.TextField(max_length=20, blank=True, null=True) # CWE
- osvdb = models.TextField(max_length=20, blank=True, null=True) # OSVDB
- reference = models.TextField(max_length=250, blank=True, null=True) # Vulnerability reference
-
- key_fields: List[Dict[str, Any]] = [ # Unique field list
- {'name': 'technology_id', 'is_base': True},
- {'name': 'port_id', 'is_base': True},
- {'name': 'cve', 'is_base': False},
- {'name': 'name', 'is_base': False}
+ technology = models.ForeignKey(
+ Technology,
+ related_name="vulnerability",
+ on_delete=models.DO_NOTHING,
+ blank=True,
+ null=True,
+ )
+ port = models.ForeignKey(
+ Port,
+ related_name="vulnerability",
+ on_delete=models.DO_NOTHING,
+ blank=True,
+ null=True,
+ )
+ name = models.TextField(max_length=50)
+ description = models.TextField(blank=True, null=True)
+ severity = models.TextField(choices=Severity.choices, default=Severity.MEDIUM)
+ cve = models.TextField(max_length=20, blank=True, null=True)
+ cwe = models.TextField(max_length=20, blank=True, null=True)
+ osvdb = models.TextField(max_length=20, blank=True, null=True)
+ reference = models.TextField(max_length=250, blank=True, null=True)
+
+ unique_fields = ["technology", "port", "name", "cve"]
+ filters = [
+ Finding.Filter(Severity, "severity"),
+ Finding.Filter(str, "cve", contains=True, processor=lambda c: c.lower()),
+ Finding.Filter(str, "cwe", contains=True, processor=lambda c: c.lower()),
]
- def filter(self, input: Input) -> bool:
- '''Check if this instance is valid based on input filter.
-
- Args:
- input (Input): Tool input whose filter will be applied
-
- Returns:
- bool: Indicate if this instance match the input filter or not
- '''
- if not input.filter:
- return True
- try:
- # If filter is a valid severity, vulnerability will be filtered by severity
- return cast(models.TextChoices, Severity)[input.filter.upper()] == self.severity
- except KeyError:
- f = input.filter.lower()
- # If filter is a string, vulnerability will be filtered by:
- return (
- (self.cve and (f == 'cve' or (f.startswith('cve-') and f == self.cve.lower()))) or # CVE
- (self.cwe and (f.startswith('cwe-') and f == self.cwe.lower())) # CWE
- )
-
def parse(self, accumulated: Dict[str, Any] = {}) -> Dict[str, Any]:
- '''Get useful information from this instance to be used in tool execution as argument.
-
- Args:
- accumulated (Dict[str, Any], optional): Information from other instances of the same type. Defaults to {}.
-
- Returns:
- Dict[str, Any]: Useful information for tool executions, including accumulated if setted
- '''
- output = {}
+ output = {InputKeyword.CVE.name.lower(): self.cve}
if self.technology:
- output = self.technology.parse()
+ output.update(self.technology.parse(accumulated))
elif self.port:
- output = self.port.parse()
- if self.cve:
- output[InputKeyword.CVE.name.lower()] = self.cve
+ output.update(self.port.parse(accumulated))
return output
def defect_dojo(self) -> Dict[str, Any]:
- '''Get useful information to import this finding in Defect-Dojo.
-
- Returns:
- Dict[str, Any]: Useful information for Defect-Dojo imports
- '''
return {
- 'title': self.name,
- 'description': self.description,
- 'severity': Severity(self.severity).value,
- 'cve': self.cve,
- 'cwe': int(self.cwe.split('-', 1)[1]) if self.cwe else None,
- 'references': self.reference,
- 'date': self.last_seen.strftime(DD_DATE_FORMAT)
+ "title": self.name,
+ "description": self.description,
+ "severity": self.severity,
+ "cve": self.cve,
+ "cwe": int(self.cwe.split("-", 1)[1]) if self.cwe else None,
+ "references": self.reference,
+ "date": self.last_seen.strftime(
+ DefectDojoSettings.objects.first().date_format
+ ),
}
def __str__(self) -> str:
- '''Instance representation in text format.
-
- Returns:
- str: String value that identifies this instance
- '''
- text = self.name
- if self.technology:
- text = f'{self.technology.__str__()} - {self.name}'
- elif self.port:
- text = f'{self.port.__str__()} - {self.name}'
- if self.cve:
- text = f'{text} - {self.cve}'
- return text
+ return f"{f'{(self.technology or self.port).__str__()} - ' if self.technology or self.port else ''}{self.name}{f' - {self.cve}' if self.cve else ''}"
class Exploit(Finding):
- '''Exploit model.'''
-
- vulnerability = create_finding_foreign_key(Vulnerability, 'exploit') # Vulnerability that the exploit abuses
- # Technology that the exploit abuses. Only if vulnerability is null
- technology = create_finding_foreign_key(Technology, 'exploit')
- title = models.TextField(max_length=100) # Exploit title
- edb_id = models.IntegerField(blank=True, null=True) # Id in Exploit-DB
- reference = models.TextField(max_length=250, blank=True, null=True) # Exploit reference
-
- key_fields: List[Dict[str, Any]] = [ # Unique field list
- {'name': 'vulnerability_id', 'is_base': True},
- {'name': 'technology_id', 'is_base': True},
- {'name': 'reference', 'is_base': False}
- ]
+ vulnerability = models.ForeignKey(
+ Vulnerability,
+ related_name="exploit",
+ on_delete=models.DO_NOTHING,
+ blank=True,
+ null=True,
+ )
+ technology = models.ForeignKey(
+ Technology,
+ related_name="exploit",
+ on_delete=models.DO_NOTHING,
+ blank=True,
+ null=True,
+ )
+ title = models.TextField(max_length=100)
+ edb_id = models.IntegerField(blank=True, null=True) # Id in Exploit-DB
+ reference = models.TextField(max_length=250, blank=True, null=True)
+
+ unique_fields = ["vulnerability", "technology", "edb_id", "reference"]
def parse(self, accumulated: Dict[str, Any] = {}) -> Dict[str, Any]:
- '''Get useful information from this instance to be used in tool execution as argument.
-
- Args:
- accumulated (Dict[str, Any], optional): Information from other instances of the same type. Defaults to {}.
-
- Returns:
- Dict[str, Any]: Useful information for tool executions, including accumulated if setted
- '''
- output = {}
+ output = {InputKeyword.EXPLOIT.name.lower(): self.title}
if self.vulnerability:
- output = self.vulnerability.parse()
+ output.update(self.vulnerability.parse(accumulated))
elif self.technology:
- output = self.technology.parse()
- output[InputKeyword.EXPLOIT.name.lower()] = self.title
+ output.update(self.technology.parse(accumulated))
return output
def defect_dojo(self) -> Dict[str, Any]:
- '''Get useful information to import this finding in Defect-Dojo.
-
- Returns:
- Dict[str, Any]: Useful information for Defect-Dojo imports
- '''
return {
- 'title': f'Exploit {self.edb_id} found' if self.edb_id else 'Exploit found',
- 'description': self.title,
- 'severity': Severity(self.vulnerability.severity).value if self.vulnerability else str(Severity.MEDIUM),
- 'reference': self.reference,
- 'date': self.last_seen.strftime(DD_DATE_FORMAT)
+ "title": f"Exploit {self.edb_id} found" if self.edb_id else "Exploit found",
+ "description": self.title,
+ "severity": self.vulnerability.severity
+ if self.vulnerability
+ else Severity.MEDIUM,
+ "references": self.reference,
+ "date": self.last_seen.strftime(
+ DefectDojoSettings.objects.first().date_format
+ ),
}
def __str__(self) -> str:
- '''Instance representation in text format.
+ """Instance representation in text format.
Returns:
str: String value that identifies this instance
- '''
- text = self.title
- if self.vulnerability:
- text = f'{self.vulnerability.__str__()} - {self.title}'
- elif self.technology:
- text = f'{self.technology.__str__()} - {self.title}'
- return text
+ """
+ return f"{f'{(self.vulnerability or self.technology).__str__()} - ' if self.vulnerability or self.technology else ''}{self.title}"
diff --git a/src/backend/findings/nvd_nist.py b/src/backend/findings/nvd_nist.py
deleted file mode 100644
index 68b4f072c..000000000
--- a/src/backend/findings/nvd_nist.py
+++ /dev/null
@@ -1,106 +0,0 @@
-import logging
-from urllib.parse import urlparse
-
-import requests
-from requests.adapters import HTTPAdapter, Retry
-
-from findings.enums import Severity
-
-# Mapping between severity values and CVSS values
-CVSS_RANGES = {
- Severity.CRITICAL: (9, 10),
- Severity.HIGH: (7, 9),
- Severity.MEDIUM: (4, 7),
- Severity.LOW: (2, 4),
- Severity.INFO: (0, 2)
-}
-
-logger = logging.getLogger() # Rekono logger
-
-
-class NvdNist:
- '''NVD NIST API handler to get information for a CVE code.'''
-
- api_url_pattern = 'https://services.nvd.nist.gov/rest/json/cve/1.0/{cve}' # API Rest URL
- cve_reference_pattern = 'https://nvd.nist.gov/vuln/detail/{cve}' # CVE reference format
-
- def __init__(self, cve: str) -> None:
- '''NVE NIST API constructor.
-
- Args:
- cve (str): CVE code
- '''
- self.cve = cve
- self.reference = self.cve_reference_pattern.format(cve=cve) # CVE reference
- self.raw_cve_info = self.request() # CVE raw information
- self.description = self.parse_description() if self.raw_cve_info else '' # CVE description
- self.cwe = self.parse_cwe() if self.raw_cve_info else None # CVE weakness as CWE code
- # CVE severity based on CVSS score
- self.severity = self.parse_severity() if self.raw_cve_info else Severity.MEDIUM
-
- def request(self) -> dict:
- '''Get information from a CVE using the NVD NIST API Rest.
-
- Returns:
- dict: Raw NVD NIST CVE information
- '''
- schema = urlparse(self.api_url_pattern).scheme # Get API schema
- session = requests.Session() # Create HTTP session
- # Configure retry protocol to prevent unexpected errors
- # Free NVD NIST API has a rate limit of 10 requests by second
- retries = Retry(total=10, backoff_factor=1, status_forcelist=[403, 500, 502, 503, 504, 599])
- session.mount(f'{schema}://', HTTPAdapter(max_retries=retries))
- try:
- response = session.get(self.api_url_pattern.format(cve=self.cve))
- except requests.exceptions.ConnectionError:
- response = session.get(self.api_url_pattern.format(cve=self.cve))
- logger.info(f'[NVD NIST] GET {self.cve} > HTTP {response.status_code}')
- return response.json()['result']['CVE_Items'][0] if response.status_code == 200 else {}
-
- def parse_description(self) -> str:
- '''Get description from raw CVE information.
-
- Returns:
- str: CVE description
- '''
- for d in self.raw_cve_info['cve']['description']['description_data'] or []:
- if d.get('lang') == 'en':
- return d.get('value')
- return ''
-
- def parse_cwe(self) -> str:
- '''Get CWE from raw CVE information.
-
- Returns:
- str: CWE code
- '''
- for item in self.raw_cve_info['cve']['problemtype']['problemtype_data'] or []:
- descriptions = item.get('description')
- if descriptions:
- for desc in descriptions:
- cwe = desc.get('value')
- if not cwe:
- continue
- if cwe.lower().startswith('cwe-'):
- return cwe
- return ''
-
- def parse_severity(self) -> str:
- '''Get severity value from raw CVE information, based on CVSS score.
-
- Returns:
- Optional[str]: Severity value
- '''
- score = 5 # Score by default: MEDIUM
- if 'baseMetricV3' in self.raw_cve_info['impact']:
- # Get CVSS version 3 if exists
- score = self.raw_cve_info['impact']['baseMetricV3']['cvssV3']['baseScore']
- elif 'baseMetricV2' in self.raw_cve_info['impact']:
- # Get CVSS version 2 if version 3 not found
- score = self.raw_cve_info['impact']['baseMetricV2']['cvssV2']['baseScore']
- for severity in CVSS_RANGES.keys():
- down, up = CVSS_RANGES[severity]
- # Search severity value based on CVSS ranges
- if (score >= down and score < up) or (severity == Severity.CRITICAL and score >= down and score <= up):
- return severity
- return Severity.MEDIUM
diff --git a/src/backend/findings/queue.py b/src/backend/findings/queue.py
deleted file mode 100644
index 15617d47b..000000000
--- a/src/backend/findings/queue.py
+++ /dev/null
@@ -1,77 +0,0 @@
-import logging
-from typing import List
-
-import django_rq
-from defectdojo.exceptions import DefectDojoException
-from defectdojo.reporter import report
-from django_rq import job
-from email_notifications import sender as email_sender
-from executions.models import Execution
-from telegram_bot import sender as telegram_sender
-from telegram_bot.messages.execution import notification_messages
-from users.enums import Notification
-
-from findings.models import Finding, Vulnerability
-from findings.nvd_nist import NvdNist
-
-logger = logging.getLogger() # Rekono logger
-
-
-def producer(execution: Execution, findings: List[Finding]) -> None:
- '''Enqueue a list of findings in the findings queue.
-
- Args:
- execution (Execution): Execution where the findings are discovered
- findings (List[Finding]): Findings list to process
- '''
- findings_queue = django_rq.get_queue('findings-queue') # Get findings queue
- findings_queue.enqueue(consumer, execution=execution, findings=findings) # Enqueue findings list
- logger.info(f'[Findings] {len(findings)} findings from execution {execution.id} have been enqueued')
-
-
-@job('findings-queue')
-def consumer(execution: Execution = None, findings: List[Finding] = []) -> None:
- '''Consume jobs from findings queue and process them.
-
- Args:
- execution (Execution, optional): Execution where the findings are discovered. Defaults to None.
- findings (List[Finding], optional): Findings list to process. Defaults to [].
- '''
- if execution and findings:
- for finding in findings: # For each finding
- if isinstance(finding, Vulnerability) and finding.cve: # If it's a vulnerability with CVE
- nn_client = NvdNist(finding.cve) # NVD NIST request to get information
- # Update vulnerability fields with the NIST information
- finding.description = nn_client.description
- finding.severity = nn_client.severity
- finding.cwe = nn_client.cwe
- finding.reference = nn_client.reference
- finding.save(update_fields=['description', 'severity', 'cwe', 'reference'])
- users_to_notify = []
- # Executor with enabled own executions notification
- if execution.task.executor.notification_scope == Notification.OWN_EXECUTIONS:
- users_to_notify.append(execution.task.executor) # Save executor user in the notify list
- # Search project members with enabled all executions notification
- search_members = execution.task.target.project.members.filter(
- notification_scope=Notification.ALL_EXECUTIONS
- ).all()
- users_to_notify.extend(list(search_members)) # Save members in the notify list
- logger.info(f'[Findings] {len(users_to_notify)} will receive a notification with the findings from execution {execution.id}') # noqa: E501
- telegram_messages = notification_messages(execution, findings) # Create Telegram message
- for user in [u for u in users_to_notify if u.telegram_notification]: # Sometimes multiple messages are needed
- for telegram_message in telegram_messages:
- # For each user with enabled Telegram notifications
- telegram_sender.send_message(user.telegram_chat.chat_id, telegram_message) # Telegram notification
- # Email notifications
- email_sender.execution_notifications(
- [u.email for u in users_to_notify if u.email_notification],
- execution,
- findings
- )
- if execution.task.target.project.defectdojo_synchronization:
- try:
- report(execution, findings) # Import execution in Defect-Dojo
- except DefectDojoException:
- # Prevent errors during the import in Defect-Dojo
- # All the exceptions are managed inside the report function
- pass
diff --git a/src/backend/findings/queues.py b/src/backend/findings/queues.py
new file mode 100644
index 000000000..c46b025af
--- /dev/null
+++ b/src/backend/findings/queues.py
@@ -0,0 +1,32 @@
+import logging
+from typing import Any, List
+
+from django_rq import job
+from executions.models import Execution
+from findings.models import Finding
+from framework.queues import BaseQueue
+from platforms.defect_dojo.integrations import DefectDojo
+from platforms.mail.notifications import SMTP
+from platforms.nvd_nist import NvdNist
+from platforms.telegram_app.notifications.notifications import Telegram
+from rq.job import Job
+
+logger = logging.getLogger()
+
+
+class FindingsQueue(BaseQueue):
+ name = "findings-queue"
+
+ def enqueue(self, execution: Execution, findings: List[Finding]) -> Job:
+ job = super().enqueue(execution=execution, findings=findings)
+ logger.info(
+ f"[Findings] {len(findings)} findings from execution {execution.id} have been enqueued"
+ )
+ return job
+
+ @staticmethod
+ @job("findings-queue")
+ def consume(execution: Execution, findings: List[Finding]) -> List[Finding]:
+ if findings:
+ for platform in [NvdNist, DefectDojo, SMTP, Telegram]:
+ platform().process_findings(execution, findings)
diff --git a/src/backend/findings/serializers.py b/src/backend/findings/serializers.py
index 686acb2a5..4b8296dd8 100644
--- a/src/backend/findings/serializers.py
+++ b/src/backend/findings/serializers.py
@@ -1,128 +1,164 @@
-from findings.models import (OSINT, Credential, Exploit, Host, Path, Port,
- Technology, Vulnerability)
-from rest_framework import serializers
-from tools.serializers import SimplyToolSerializer
-
-
-class OSINTSerializer(serializers.ModelSerializer):
- '''Serializer to get the OSINT data via API.'''
-
- detected_by = SimplyToolSerializer(many=False, read_only=True) # Tool details for read operations
-
+from findings.framework.serializers import FindingSerializer, TriageFindingSerializer
+from findings.models import (
+ OSINT,
+ Credential,
+ Exploit,
+ Host,
+ Path,
+ Port,
+ Technology,
+ Vulnerability,
+)
+
+
+class OSINTSerializer(FindingSerializer):
class Meta:
- '''Serializer metadata.'''
-
model = OSINT
- fields = ( # OSINT fields exposed via API
- 'id', 'executions', 'data', 'data_type', 'source', 'reference',
- 'detected_by', 'first_seen', 'last_seen', 'is_active'
+ fields = FindingSerializer.Meta.fields + (
+ "data",
+ "data_type",
+ "source",
+ "reference",
)
-class HostSerializer(serializers.ModelSerializer):
- '''Serializer to get the Host data via API.'''
+class TriageOSINTSerializer(TriageFindingSerializer):
+ class Meta:
+ model = OSINTSerializer.Meta.model
+ fields = TriageFindingSerializer.Meta.fields
- detected_by = SimplyToolSerializer(many=False, read_only=True) # Tool details for read operations
+class HostSerializer(FindingSerializer):
class Meta:
- '''Serializer metadata.'''
-
model = Host
- fields = ( # Host fields exposed via API
- 'id', 'executions', 'address', 'os', 'os_type', 'detected_by',
- 'first_seen', 'last_seen', 'is_active', 'port'
+ fields = FindingSerializer.Meta.fields + (
+ "address",
+ "os",
+ "os_type",
+ "port",
)
-class PortSerializer(serializers.ModelSerializer):
- '''Serializer to get the Port data via API.'''
+class TriageHostSerializer(TriageFindingSerializer):
+ class Meta:
+ model = HostSerializer.Meta.model
+ fields = TriageFindingSerializer.Meta.fields
- detected_by = SimplyToolSerializer(many=False, read_only=True) # Tool details for read operations
+class PortSerializer(FindingSerializer):
class Meta:
- '''Serializer metadata.'''
-
model = Port
- fields = ( # Port fields exposed via API
- 'id', 'executions', 'host', 'port', 'status', 'protocol', 'service',
- 'detected_by', 'first_seen', 'last_seen', 'is_active', 'path', 'technology',
- 'vulnerability'
+ fields = FindingSerializer.Meta.fields + (
+ "host",
+ "port",
+ "status",
+ "protocol",
+ "service",
+ "path",
+ "technology",
+ "vulnerability",
)
-class PathSerializer(serializers.ModelSerializer):
- '''Serializer to get the Path data via API.'''
+class TriagePortSerializer(TriageFindingSerializer):
+ class Meta:
+ model = PortSerializer.Meta.model
+ fields = TriageFindingSerializer.Meta.fields
- detected_by = SimplyToolSerializer(many=False, read_only=True) # Tool details for read operations
+class PathSerializer(FindingSerializer):
class Meta:
- '''Serializer metadata.'''
-
model = Path
- fields = ( # Path fields exposed via API
- 'id', 'executions', 'port', 'path', 'status', 'extra', 'type',
- 'detected_by', 'first_seen', 'last_seen', 'is_active'
+ fields = FindingSerializer.Meta.fields + (
+ "port",
+ "path",
+ "status",
+ "extra_info",
+ "type",
)
-class TechnologySerializer(serializers.ModelSerializer):
- '''Serializer to get the Technology data via API.'''
+class TriagePathSerializer(TriageFindingSerializer):
+ class Meta:
+ model = PathSerializer.Meta.model
+ fields = TriageFindingSerializer.Meta.fields
- detected_by = SimplyToolSerializer(many=False, read_only=True) # Tool details for read operations
+class TechnologySerializer(FindingSerializer):
class Meta:
- '''Serializer metadata.'''
-
model = Technology
- fields = ( # Technology fields exposed via API
- 'id', 'executions', 'port', 'name', 'version', 'description', 'reference',
- 'related_to', 'related_technologies', 'detected_by', 'first_seen', 'last_seen',
- 'is_active', 'vulnerability', 'exploit'
+ fields = FindingSerializer.Meta.fields + (
+ "port",
+ "name",
+ "version",
+ "description",
+ "reference",
+ "related_to",
+ "related_technologies",
+ "vulnerability",
+ "exploit",
)
-class CredentialSerializer(serializers.ModelSerializer):
- '''Serializer to get the Credential data via API.'''
+class TriageTechnologySerializer(TriageFindingSerializer):
+ class Meta:
+ model = TechnologySerializer.Meta.model
+ fields = TriageFindingSerializer.Meta.fields
- detected_by = SimplyToolSerializer(many=False, read_only=True) # Tool details for read operations
+class CredentialSerializer(FindingSerializer):
class Meta:
- '''Serializer metadata.'''
-
model = Credential
- # Credential fields exposed via API
- fields = (
- 'id', 'technology', 'email', 'username', 'secret', 'context',
- 'detected_by', 'first_seen', 'last_seen', 'is_active'
+ fields = FindingSerializer.Meta.fields + (
+ "technology",
+ "email",
+ "username",
+ "secret",
+ "context",
)
-class VulnerabilitySerializer(serializers.ModelSerializer):
- '''Serializer to get the Vulnerability data via API.'''
+class TriageCredentialSerializer(TriageFindingSerializer):
+ class Meta:
+ model = CredentialSerializer.Meta.model
+ fields = TriageFindingSerializer.Meta.fields
- detected_by = SimplyToolSerializer(many=False, read_only=True) # Tool details for read operations
+class VulnerabilitySerializer(FindingSerializer):
class Meta:
- '''Serializer metadata.'''
-
model = Vulnerability
- fields = ( # Vulnerability fields exposed via API
- 'id', 'executions', 'port', 'technology', 'name', 'description', 'severity',
- 'cve', 'cwe', 'reference', 'detected_by', 'first_seen', 'last_seen',
- 'is_active', 'exploit'
+ fields = FindingSerializer.Meta.fields + (
+ "port",
+ "technology",
+ "name",
+ "description",
+ "severity",
+ "cve",
+ "cwe",
+ "reference",
+ "exploit",
)
-class ExploitSerializer(serializers.ModelSerializer):
- '''Serializer to get the Exploit data via API.'''
+class TriageVulnerabilitySerializer(TriageFindingSerializer):
+ class Meta:
+ model = VulnerabilitySerializer.Meta.model
+ fields = TriageFindingSerializer.Meta.fields
- detected_by = SimplyToolSerializer(many=False, read_only=True) # Tool details for read operations
+class ExploitSerializer(FindingSerializer):
class Meta:
- '''Serializer metadata.'''
-
model = Exploit
- fields = ( # Exploit fields exposed via API
- 'id', 'executions', 'vulnerability', 'technology', 'title', 'edb_id',
- 'reference', 'detected_by', 'first_seen', 'last_seen', 'is_active'
+ fields = FindingSerializer.Meta.fields + (
+ "vulnerability",
+ "technology",
+ "title",
+ "edb_id",
+ "reference",
)
+
+
+class TriageExploitSerializer(TriageFindingSerializer):
+ class Meta:
+ model = ExploitSerializer.Meta.model
+ fields = TriageFindingSerializer.Meta.fields
diff --git a/src/backend/findings/utils.py b/src/backend/findings/utils.py
deleted file mode 100644
index 7aaee76b7..000000000
--- a/src/backend/findings/utils.py
+++ /dev/null
@@ -1,28 +0,0 @@
-from typing import Any, Dict, List
-
-from targets.models import Target
-
-
-def get_unique_filter(key_fields: List[Dict[str, Any]], fields: Dict[str, Any], target: Target) -> Dict[str, Any]:
- '''Get filter from finding data and its key fields.
-
- Args:
- key_fields (List[Dict[str, Any]]): Finding key fields
- fields (Dict[str, Any]): Finding fields and values
- target (Target): Execution where the finding is discovered
-
- Returns:
- Dict[str, Any]: Filter with the key fields and values
- '''
- base_field_found = False # Indicate if a base key field is found
- unique_filter: Dict[str, Any] = {}
- for field in key_fields: # For each key field
- value = fields.get(field['name']) # Get value for the key field
- # Only one base key field should be included in the filter
- if value and (not base_field_found or not field.get('is_base')):
- unique_filter[field['name']] = value # Add key field and value to the filter
- if field.get('is_base'):
- base_field_found = True # Update base indicator
- if not base_field_found and target: # If no base field found, use target
- unique_filter['executions__task__target'] = target # Add target value
- return unique_filter
diff --git a/src/backend/findings/views.py b/src/backend/findings/views.py
index 01b45cb9d..120d0e211 100644
--- a/src/backend/findings/views.py
+++ b/src/backend/findings/views.py
@@ -1,83 +1,70 @@
-from typing import Any
-from urllib.request import Request
-
-from api.filters import RekonoFilterBackend
from drf_spectacular.utils import extend_schema
+from findings.enums import OSINTDataType
+from findings.filters import (
+ CredentialFilter,
+ ExploitFilter,
+ HostFilter,
+ OSINTFilter,
+ PathFilter,
+ PortFilter,
+ TechnologyFilter,
+ VulnerabilityFilter,
+)
+from findings.framework.views import FindingViewSet
+from findings.models import (
+ OSINT,
+ Credential,
+ Exploit,
+ Host,
+ Path,
+ Port,
+ Technology,
+ Vulnerability,
+)
+from findings.serializers import (
+ CredentialSerializer,
+ ExploitSerializer,
+ HostSerializer,
+ OSINTSerializer,
+ PathSerializer,
+ PortSerializer,
+ TechnologySerializer,
+ TriageCredentialSerializer,
+ TriageExploitSerializer,
+ TriageHostSerializer,
+ TriageOSINTSerializer,
+ TriagePathSerializer,
+ TriagePortSerializer,
+ TriageTechnologySerializer,
+ TriageVulnerabilitySerializer,
+ VulnerabilitySerializer,
+)
from rest_framework import status
from rest_framework.decorators import action
-from rest_framework.filters import OrderingFilter, SearchFilter
-from rest_framework.mixins import (DestroyModelMixin, ListModelMixin,
- RetrieveModelMixin)
+from rest_framework.request import Request
from rest_framework.response import Response
-from rest_framework.viewsets import GenericViewSet
from targets.serializers import TargetSerializer
-from findings.enums import DataType
-from findings.filters import (CredentialFilter, ExploitFilter, HostFilter,
- OSINTFilter, PathFilter, PortFilter,
- TechnologyFilter, VulnerabilityFilter)
-from findings.models import (OSINT, Credential, Exploit, Finding, Host, Path,
- Port, Technology, Vulnerability)
-from findings.serializers import (CredentialSerializer, ExploitSerializer,
- HostSerializer, OSINTSerializer,
- PathSerializer, PortSerializer,
- TechnologySerializer,
- VulnerabilitySerializer)
-
# Create your views here.
-class FindingBaseView(GenericViewSet, ListModelMixin, RetrieveModelMixin, DestroyModelMixin):
- '''Common finding ViewSet that includes: get, retrieve, enable and disable features.'''
-
- # Replace DjangoFilterBackend by RekonoFilterBackend to allow filters by N-M relations like 'executions' field.
- filter_backends = [RekonoFilterBackend, SearchFilter, OrderingFilter]
- members_field = 'executions__task__target__project__members'
-
- def destroy(self, request: Request, *args: Any, **kwargs: Any) -> Response:
- '''Disable finding.
-
- Args:
- request (Request): Received HTTP request
-
- Returns:
- Response: HTTP response
- '''
- finding: Finding = self.get_object()
- finding.is_active = False
- finding.save(update_fields=['is_active'])
- return Response(status=status.HTTP_204_NO_CONTENT)
-
- @extend_schema(request=None, responses={201: None})
- @action(detail=True, methods=['POST'], url_path='enable', url_name='enable')
- def enable(self, request: Request, pk: str) -> Response:
- '''Enable finding.
-
- Args:
- request (Request): Received HTTP request
- pk (str): Instance Id
-
- Returns:
- Response: HTTP response
- '''
- finding: Finding = self.get_object()
- finding.is_active = True
- finding.save(update_fields=['is_active'])
- return Response(status=status.HTTP_201_CREATED)
-
-
-class OSINTViewSet(FindingBaseView):
- '''OSINT ViewSet that includes: get, retrieve, enable, disable, import in DD and target creation features.'''
-
- queryset = OSINT.objects.all().order_by('-id')
+class OSINTViewSet(FindingViewSet):
+ queryset = OSINT.objects.all()
serializer_class = OSINTSerializer
+ triage_serializer_class = TriageOSINTSerializer
filterset_class = OSINTFilter
- search_fields = ['data', 'source'] # Fields used to search OSINTs
+ search_fields = ["data"]
+ ordering_fields = ["id", "data", "data_type", "source"]
+ # "post" is needed to allow POST requests to create targets
+ http_method_names = ["get", "put", "post"]
+
+ def create(self, request: Request, *args, **kwargs):
+ return self._method_not_allowed("POST")
@extend_schema(request=None, responses={201: TargetSerializer})
- @action(detail=True, methods=['POST'], url_path='target', url_name='target')
+ @action(detail=True, methods=["POST"], url_path="target", url_name="target")
def target(self, request: Request, pk: str) -> Response:
- '''Target creation from OSINT data.
+ """Target creation from OSINT data.
Args:
request (Request): Received HTTP request
@@ -85,77 +72,100 @@ def target(self, request: Request, pk: str) -> Response:
Returns:
Response: HTTP response
- '''
+ """
osint = self.get_object()
- if osint.data_type in [DataType.IP, DataType.DOMAIN]: # Only supported for IPs and Domains
- serializer = TargetSerializer(data={'project': osint.get_project().id, 'target': osint.data})
- if serializer.is_valid():
- target = serializer.create(serializer.validated_data) # Target creation
- return Response(TargetSerializer(target).data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+ if osint.data_type in [
+ OSINTDataType.IP,
+ OSINTDataType.DOMAIN,
+ ]:
+ serializer = TargetSerializer(
+ data={"project": osint.get_project().id, "target": osint.data}
+ )
+ serializer.is_valid(raise_exception=True)
+ target = serializer.create(serializer.validated_data)
+ return Response(
+ TargetSerializer(target).data, status=status.HTTP_201_CREATED
+ )
return Response(
- {'data_type': ['Unsupported option for this OSINT data type']}, status=status.HTTP_400_BAD_REQUEST
+ {"data_type": "Target creation is not available for this OSINT data type"},
+ status=status.HTTP_400_BAD_REQUEST,
)
-class HostViewSet(FindingBaseView):
- '''Host ViewSet that includes: get, retrieve, enable, disable and import Defect-Dojo features.'''
-
- queryset = Host.objects.all().order_by('-id')
+class HostViewSet(FindingViewSet):
+ queryset = Host.objects.all()
serializer_class = HostSerializer
+ triage_serializer_class = TriageHostSerializer
filterset_class = HostFilter
- search_fields = ['address'] # Fields used to search Hosts
-
+ search_fields = ["address", "os"]
+ ordering_fields = ["id", "address", "os_type"]
-class PortViewSet(FindingBaseView):
- '''Port ViewSet that includes: get, retrieve, enable, disable and import Defect-Dojo features.'''
- queryset = Port.objects.all().order_by('-id')
+class PortViewSet(FindingViewSet):
+ queryset = Port.objects.all()
serializer_class = PortSerializer
+ triage_serializer_class = TriagePortSerializer
filterset_class = PortFilter
- search_fields = ['port', 'service'] # Fields used to search Ports
+ search_fields = ["port", "service"]
+ ordering_fields = ["id", "host", "port", "status", "protocol", "service"]
-class PathViewSet(FindingBaseView):
- '''Path ViewSet that includes: get, retrieve, enable, disable and import Defect-Dojo features.'''
-
- queryset = Path.objects.all().order_by('-id')
+class PathViewSet(FindingViewSet):
+ queryset = Path.objects.all()
serializer_class = PathSerializer
+ triage_serializer_class = TriagePathSerializer
filterset_class = PathFilter
- search_fields = ['path'] # Fields used to search Paths
-
+ search_fields = ["path", "extra_info"]
+ ordering_fields = ["id", "port", "port__host", "path", "status", "type"]
-class TechnologyViewSet(FindingBaseView):
- '''Technology ViewSet that includes: get, retrieve, enable, disable and import Defect-Dojo features.'''
- queryset = Technology.objects.all().order_by('-id')
+class TechnologyViewSet(FindingViewSet):
+ queryset = Technology.objects.all()
serializer_class = TechnologySerializer
+ triage_serializer_class = TriageTechnologySerializer
filterset_class = TechnologyFilter
- search_fields = ['name', 'version'] # Fields used to search Technologies
+ search_fields = ["name", "version", "description"]
+ ordering_fields = ["id", "port", "name", "version"]
-class CredentialViewSet(FindingBaseView):
- '''Credential ViewSet that includes: get, retrieve, enable, disable and import Defect-Dojo features.'''
-
- queryset = Credential.objects.all().order_by('-id')
+class CredentialViewSet(FindingViewSet):
+ queryset = Credential.objects.all()
serializer_class = CredentialSerializer
+ triage_serializer_class = TriageCredentialSerializer
filterset_class = CredentialFilter
- search_fields = ['email', 'username'] # Fields used to search Credentials
-
+ search_fields = ["email", "username", "secret", "context"]
+ ordering_fields = ["id", "email", "username", "secret"]
-class VulnerabilityViewSet(FindingBaseView):
- '''Vulnerability ViewSet that includes: get, retrieve, enable, disable and import Defect-Dojo features.'''
- queryset = Vulnerability.objects.all().order_by('-id')
+class VulnerabilityViewSet(FindingViewSet):
+ queryset = Vulnerability.objects.all()
serializer_class = VulnerabilitySerializer
+ triage_serializer_class = TriageVulnerabilitySerializer
filterset_class = VulnerabilityFilter
- search_fields = ['name', 'description', 'cve', 'cwe'] # Fields used to search Vulnerabilities
-
-
-class ExploitViewSet(FindingBaseView):
- '''Exploit ViewSet that includes: get, retrieve, enable, disable and import Defect-Dojo features.'''
-
- queryset = Exploit.objects.all().order_by('-id')
+ search_fields = ["name", "description", "cve", "cwe", "osvdb"]
+ ordering_fields = [
+ "id",
+ "technology",
+ "port",
+ "name",
+ "severity",
+ "cve",
+ "cwe",
+ "osvdb",
+ ]
+
+
+class ExploitViewSet(FindingViewSet):
+ queryset = Exploit.objects.all()
serializer_class = ExploitSerializer
+ triage_serializer_class = TriageExploitSerializer
filterset_class = ExploitFilter
- search_fields = ['title', 'edb_id', 'reference'] # Fields used to search Exploits
+ search_fields = ["title", "edb_id", "reference"]
+ ordering_fields = [
+ "id",
+ "vulnerability",
+ "technology",
+ "title",
+ "edb_id",
+ "reference",
+ ]
diff --git a/src/backend/findings/migrations/__init__.py b/src/backend/framework/__init__.py
similarity index 100%
rename from src/backend/findings/migrations/__init__.py
rename to src/backend/framework/__init__.py
diff --git a/src/backend/framework/apps.py b/src/backend/framework/apps.py
new file mode 100644
index 000000000..c4cc9bc41
--- /dev/null
+++ b/src/backend/framework/apps.py
@@ -0,0 +1,32 @@
+from typing import Any, List
+
+from django.core import management
+from django.core.management.commands import loaddata
+from django.db.models.signals import post_migrate
+
+
+class BaseApp:
+ fixtures_path = None
+ skip_if_model_exists = False
+
+ def ready(self) -> None:
+ """Run code as soon as the registry is fully populated."""
+ # Configure fixtures to be loaded after migration
+ if self.fixtures_path:
+ post_migrate.connect(self._load_fixtures, sender=self)
+
+ def _load_fixtures(self, **kwargs: Any) -> None:
+ if self.skip_if_model_exists:
+ for model in self._get_models():
+ if model and model.objects.exists():
+ return # pragma: no cover
+ management.call_command(
+ loaddata.Command(),
+ *(
+ self.fixtures_path / fixture
+ for fixture in sorted(self.fixtures_path.rglob("*.json"))
+ )
+ )
+
+ def _get_models(self) -> List[Any]:
+ return [] # pragma: no cover
diff --git a/src/backend/framework/enums.py b/src/backend/framework/enums.py
new file mode 100644
index 000000000..6c5e11a7b
--- /dev/null
+++ b/src/backend/framework/enums.py
@@ -0,0 +1,25 @@
+from enum import Enum
+
+
+class InputKeyword(Enum):
+ """List of keywords that can be included in tool argument patterns to include BaseInput data."""
+
+ TARGET = 1
+ HOST = 2
+ PORT = 3
+ PORTS = 4
+ PORTS_COMMAS = 5
+ TECHNOLOGY = 6
+ VERSION = 7
+ ENDPOINT = 8
+ URL = 9
+ EMAIL = 10
+ USERNAME = 11
+ SECRET = 12
+ CVE = 13
+ EXPLOIT = 14
+ WORDLIST = 15
+ COOKIE_NAME = 16
+ TOKEN = 17
+ CREDENTIAL_TYPE = 18
+ CREDENTIAL_TYPE_LOWER = 19
diff --git a/src/backend/framework/exceptions.py b/src/backend/framework/exceptions.py
new file mode 100644
index 000000000..64d7cb653
--- /dev/null
+++ b/src/backend/framework/exceptions.py
@@ -0,0 +1,16 @@
+from django.db.utils import IntegrityError
+from psycopg2.errors import UniqueViolation
+from rest_framework.response import Response
+from rest_framework.status import HTTP_400_BAD_REQUEST
+from rest_framework.views import exception_handler
+
+
+def exceptions_handler(exc, context):
+ if exc.__class__ in [UniqueViolation, IntegrityError]:
+ response = Response(
+ {"constraint": ["This object already exists"]},
+ status=HTTP_400_BAD_REQUEST,
+ )
+ else:
+ response = exception_handler(exc, context)
+ return response
diff --git a/src/backend/framework/fields.py b/src/backend/framework/fields.py
new file mode 100644
index 000000000..92c8d7635
--- /dev/null
+++ b/src/backend/framework/fields.py
@@ -0,0 +1,105 @@
+from typing import Any
+
+from django.forms import ValidationError
+from drf_spectacular.types import OpenApiTypes
+from drf_spectacular.utils import extend_schema_field
+from rest_framework.serializers import Field
+from taggit.serializers import TagListSerializerField
+
+
+@extend_schema_field({"type": "array", "items": {"type": "string"}})
+class TagField(TagListSerializerField):
+ """Internal serializer field for TagListSerializerField, including API documentation."""
+
+ pass
+
+
+@extend_schema_field(OpenApiTypes.STR)
+class ProtectedSecretField(Field):
+ """Serializer field to manage protected system values."""
+
+ def __init__(
+ self,
+ validator: callable = None,
+ read_only=False,
+ write_only=False,
+ required=None,
+ source=None,
+ label=None,
+ help_text=None,
+ style=None,
+ error_messages=None,
+ validators=None,
+ allow_null=False,
+ ):
+ self.validator = validator
+ super().__init__(
+ read_only=read_only,
+ write_only=write_only,
+ required=required,
+ source=source,
+ label=label,
+ help_text=help_text,
+ style=style,
+ error_messages=error_messages,
+ validators=validators,
+ allow_null=allow_null,
+ )
+
+ def to_representation(self, value: str) -> str:
+ """Return text value to send to the client.
+
+ Args:
+ value (str): Internal text value
+
+ Returns:
+ str: Text value that contains multiple '*' characters
+ """
+ return "*" * len(value)
+
+ def to_internal_value(self, value: str) -> str:
+ """Return text value to be stored in database.
+
+ Args:
+ value (str): Text value provided by the client
+
+ Returns:
+ str: Text value to be stored. Save value than the provided one.
+ """
+ if self.validator:
+ self.validator(value)
+ return value
+
+
+@extend_schema_field(OpenApiTypes.STR)
+class IntegerChoicesField(Field):
+ """Serializer field to manage IntegerChoices values."""
+
+ def __init__(self, model: Any, **kwargs: Any):
+ self.model = model
+ super().__init__(**kwargs)
+
+ def to_representation(self, value: int) -> str:
+ """Return text value to send to the client.
+
+ Args:
+ value (int): Integer value of the IntegerChoices field
+
+ Returns:
+ str: String value associated to the integer
+ """
+ return self.model(value).name.capitalize()
+
+ def to_internal_value(self, data: str) -> int:
+ """Return integer value to be stored in database.
+
+ Args:
+ data (str): String value of the IntegerChoices field
+
+ Returns:
+ int: Integer value associated to the string
+ """
+ try:
+ return self.model[data.upper()].value
+ except:
+ raise ValidationError(f"Invalid value", code=self.model.__class__.__name__)
diff --git a/src/backend/framework/filters.py b/src/backend/framework/filters.py
new file mode 100644
index 000000000..ab12d43b2
--- /dev/null
+++ b/src/backend/framework/filters.py
@@ -0,0 +1,50 @@
+from typing import Any, List
+
+from django.db.models import Q, QuerySet
+from django_filters.rest_framework import FilterSet, filters
+
+
+class LikeFilter(FilterSet):
+ """Filter that allows queryset filtering based on current user likes."""
+
+ # Indicate if user likes or not the entities
+ like = filters.BooleanFilter(method="get_liked_items")
+
+ def get_liked_items(self, queryset: QuerySet, name: str, value: bool) -> QuerySet:
+ """Filter queryset based on current user likes.
+
+ Args:
+ queryset (QuerySet): Queryset to be filtered
+ name (str): Field name. Not used in this case
+ value (bool): Indicate if current user likes or not the entities
+
+ Returns:
+ QuerySet: Queryset filtered by the current user likes
+ """
+ liked = {"liked_by": self.request.user}
+ return queryset.filter(Q(**liked) if value else ~Q(**liked)).all()
+
+
+class MultipleFieldFilterSet(FilterSet):
+ def multiple_field_filter(
+ self, queryset: QuerySet, name: str, value: Any
+ ) -> QuerySet:
+ query = Q()
+ for field in self.filters[name].fields:
+ query |= Q(**{field: value})
+ return queryset.filter(query)
+
+
+class MultipleFieldFilter(filters.Filter):
+ def __init__(self, fields: List[str], **kwargs: Any) -> None:
+ kwargs["method"] = "multiple_field_filter"
+ super().__init__(**kwargs)
+ self.fields = fields
+
+
+class MultipleNumberFilter(MultipleFieldFilter, filters.NumberFilter):
+ pass
+
+
+class MultipleCharFilter(MultipleFieldFilter, filters.CharFilter):
+ pass
diff --git a/src/backend/framework/models.py b/src/backend/framework/models.py
new file mode 100644
index 000000000..66db8f1f8
--- /dev/null
+++ b/src/backend/framework/models.py
@@ -0,0 +1,234 @@
+import importlib
+from typing import Any, Dict, List, Optional
+
+import requests
+import urllib3
+from django.db import models
+from django.db.models import Q
+from rekono.settings import AUTH_USER_MODEL, CONFIG
+from security.cryptography.encryption import Encryptor
+
+
+class BaseModel(models.Model):
+ class Meta:
+ abstract = True
+
+ def get_project(self) -> Any:
+ filter_field = self.__class__.get_project_field()
+ if filter_field:
+ project = self
+ for field in filter_field.split("__"):
+ if hasattr(project, field):
+ project = getattr(project, field)
+ else: # pragma: no cover
+ return None
+ return project
+
+ @classmethod
+ def get_project_field(cls) -> str:
+ return None
+
+ def _get_related_class(self, package: str, name: str) -> Any:
+ try:
+ # nosemgrep: python.lang.security.audit.non-literal-import.non-literal-import
+ module = importlib.import_module(
+ f'{package.lower()}.{name.lower().replace(" ", "_").replace("-", "_")}'
+ )
+ cls = getattr(
+ module,
+ name[0].upper() + name[1:].lower().replace(" ", "").replace("-", ""),
+ )
+ except (AttributeError, ModuleNotFoundError) as ex:
+ module = importlib.import_module(f"{package}.base")
+ type = package.split(".")[-1][:-1]
+ cls = getattr(module, f"Base{type[0].upper() + type[1:].lower()}")
+ return cls
+
+ def __str__(self) -> str:
+ return self.__class__.__name__
+
+
+class BaseEncrypted(BaseModel):
+ class Meta:
+ abstract = True
+
+ _encryptor = Encryptor(CONFIG.encryption_key) if CONFIG.encryption_key else None
+ _encrypted_field = "_secret"
+
+ @property
+ def secret(self) -> str:
+ return (
+ (
+ self._encryptor.decrypt(getattr(self, self._encrypted_field))
+ if self._encryptor
+ else getattr(self, self._encrypted_field)
+ )
+ if hasattr(self, self._encrypted_field)
+ and getattr(self, self._encrypted_field)
+ else None
+ )
+
+ @secret.setter
+ def secret(self, value: str) -> None:
+ if hasattr(self, self._encrypted_field):
+ setattr(
+ self,
+ self._encrypted_field,
+ self._encryptor.encrypt(value)
+ if self._encryptor and value is not None
+ else value,
+ )
+
+
+class BaseInput(BaseModel):
+ """Class to be extended by all the objects that can be used in tool executions as argument."""
+
+ class Meta:
+ abstract = True
+
+ class Filter:
+ def __init__(
+ self,
+ type: type,
+ field: str,
+ contains: bool = False,
+ processor: callable = None,
+ ) -> None:
+ self.type = type
+ self.field = field
+ self.contains = contains
+ self.processor = processor
+
+ filters: List[Filter] = []
+
+ def _clean_path(self, value: str) -> str:
+ return f"/{value}" if len(value) > 1 and value[0] != "/" else value
+
+ def _get_url(
+ self,
+ host: str,
+ port: int = None,
+ endpoint: str = "",
+ protocols: List[str] = ["http", "https"],
+ ) -> Optional[str]:
+ """Get a HTTP or HTTPS URL from host, port and endpoint.
+
+ Args:
+ host (str): Host to include in the URL
+ port (int, optional): Port to include in the URL. Defaults to None.
+ endpoint (str, optional): Endpoint to include in the URL. Defaults to ''.
+ protocols (List[str], optional): Protocol list to check. Defaults to ['http', 'https'].
+
+ Returns:
+ Optional[str]: [description]
+ """
+ urllib3.disable_warnings(category=urllib3.exceptions.InsecureRequestWarning)
+ if endpoint.startswith("/"):
+ endpoint = endpoint[1:]
+ schema = "{protocol}://{host}/{endpoint}"
+ if port:
+ schema = "{protocol}://{host}:{port}/{endpoint}" # Include port schema if port exists
+ if port == 80:
+ protocols = ["http"]
+ elif port == 443:
+ protocols = ["https"]
+ for protocol in protocols: # For each protocol
+ url_to_test = schema.format(
+ protocol=protocol, host=host, port=port, endpoint=endpoint
+ )
+ try:
+ # nosemgrep: python.requests.security.disabled-cert-validation.disabled-cert-validation
+ requests.get(url_to_test, timeout=5, verify=False)
+ return url_to_test
+ except:
+ continue
+ return None
+
+ def _compare_filter(
+ self, filter: Any, value: Any, negative: bool = False, contains: bool = False
+ ) -> bool:
+ comparison = lambda f, v: f == v if not contains else f in v
+ return (
+ comparison(filter, value) if not negative else not comparison(filter, value)
+ )
+
+ def filter(self, argument_input: Any, target: Any = None) -> bool:
+ """Check if this instance is valid based on input filter.
+
+ Args:
+ input (Any): Tool input whose filter will be applied
+
+ Returns:
+ bool: Indicate if this instance match the input filter or not
+ """
+ if not argument_input.filter:
+ return True
+ filter_value = argument_input.filter
+ for split, or_condition in [(" or ", True), (" and ", False)]:
+ if split not in filter_value and or_condition:
+ continue
+ for match_value in filter_value.split(split):
+ negative = match_value.startswith("!")
+ if negative:
+ match_value = match_value[1:]
+ for filter in self.filters:
+ and_condition = False
+ field_value = getattr(self, filter.field)
+ if filter.processor:
+ field_value = filter.processor(field_value)
+ try:
+ if (
+ issubclass(filter.type, models.TextChoices)
+ and self._compare_filter(
+ filter.type[match_value.upper()], field_value, negative
+ )
+ ) or (
+ hasattr(self, match_value)
+ and self._compare_filter(
+ filter.type(getattr(self, match_value)),
+ field_value,
+ negative,
+ filter.contains,
+ )
+ ):
+ if or_condition:
+ return True
+ else:
+ and_condition = True
+ elif not or_condition:
+ return False
+ except (ValueError, KeyError) as ex:
+ continue
+ if not or_condition and and_condition:
+ return True
+ return False
+
+ def parse(self, accumulated: Dict[str, Any] = {}) -> Dict[str, Any]:
+ """Get useful information from this instance to be used in tool execution as argument.
+
+ To be implemented by subclasses.
+
+ Args:
+ accumulated (Dict[str, Any], optional): Information from other instances of the same type. Defaults to {}.
+
+ Returns:
+ Dict[str, Any]: Useful information for tool executions, including accumulated if setted
+ """
+ return {} # pragma: no cover
+
+ def get_input_type(self) -> Any:
+ from input_types.models import InputType
+
+ reference = f"{self._meta.app_label}.{self._meta.model_name}"
+ return InputType.objects.filter(
+ Q(model=reference) | Q(fallback_model=reference)
+ ).first()
+
+
+class BaseLike(BaseModel):
+ """Common and abstract LikeBase model, to define common fields for all models that user can like."""
+
+ liked_by = models.ManyToManyField(AUTH_USER_MODEL, related_name="liked_%(class)s")
+
+ class Meta:
+ abstract = True
diff --git a/src/backend/framework/pagination.py b/src/backend/framework/pagination.py
new file mode 100644
index 000000000..da850cca3
--- /dev/null
+++ b/src/backend/framework/pagination.py
@@ -0,0 +1,10 @@
+from rest_framework.pagination import PageNumberPagination
+
+
+class Pagination(PageNumberPagination):
+ """Pagination configuration for API Rest."""
+
+ page_query_param = "page" # Page parameter
+ page_size_query_param = "size" # Size parameter
+ page_size = 25 # Default page size
+ max_page_size = 1000 # Max page size
diff --git a/src/backend/framework/platforms.py b/src/backend/framework/platforms.py
new file mode 100644
index 000000000..f4690c943
--- /dev/null
+++ b/src/backend/framework/platforms.py
@@ -0,0 +1,81 @@
+import logging
+from typing import Any, List
+from urllib.parse import urlparse
+
+import requests
+from executions.models import Execution
+from findings.framework.models import Finding
+from requests.adapters import HTTPAdapter, Retry
+from users.enums import Notification
+
+logger = logging.getLogger()
+
+
+class BasePlatform:
+ def is_available(self) -> bool:
+ return True
+
+ def process_findings(self, execution: Execution, findings: List[Finding]) -> None:
+ if not self.is_available():
+ return
+
+
+class BaseIntegration(BasePlatform):
+ url = ""
+
+ def __init__(self) -> None:
+ self.session = self._create_session(self.url)
+
+ def _create_session(self, url: str) -> requests.Session:
+ session = requests.Session()
+ retries = Retry(
+ total=10,
+ backoff_factor=1,
+ status_forcelist=[403, 429, 500, 502, 503, 504, 599],
+ )
+ session.mount(f"{urlparse(url).scheme}://", HTTPAdapter(max_retries=retries))
+ return session
+
+ def _request(
+ self, method: callable, url: str, json: bool = True, **kwargs: Any
+ ) -> Any:
+ try:
+ response = method(url, **kwargs)
+ except requests.exceptions.ConnectionError:
+ response = method(url, **kwargs)
+ logger.info(
+ f"[{self.__class__.__name__}] {method.__name__.upper()} {urlparse(url).path} > HTTP {response.status_code}"
+ )
+ response.raise_for_status()
+ return response.json() if json else response
+
+
+class BaseNotification(BasePlatform):
+ enable_field = ""
+
+ def _get_users_to_notify(self, execution: Execution) -> List[Any]:
+ users = set()
+ if (
+ execution.task.executor.notification_scope != Notification.DISABLED
+ and getattr(execution.task.executor, self.enable_field)
+ ):
+ users.add(execution.task.executor)
+ users.update(
+ execution.task.target.project.members.filter(
+ **{
+ self.enable_field: True,
+ "notification_scope": Notification.ALL_EXECUTIONS,
+ }
+ ).exclude(id=execution.task.executor.id)
+ )
+ return users
+
+ def _notify_execution(
+ self, users: List[Any], execution: Execution, findings: List[Finding]
+ ) -> None:
+ pass
+
+ def process_findings(self, execution: Execution, findings: List[Finding]) -> None:
+ super().process_findings(execution, findings)
+ users = self._get_users_to_notify(execution)
+ self._notify_execution(users, execution, findings)
diff --git a/src/backend/framework/queues.py b/src/backend/framework/queues.py
new file mode 100644
index 000000000..501758b9c
--- /dev/null
+++ b/src/backend/framework/queues.py
@@ -0,0 +1,134 @@
+import copy
+import logging
+from typing import Any, Dict, List
+
+import django_rq
+from findings.framework.models import Finding
+from framework.models import BaseInput
+from input_types.models import InputType
+from parameters.models import InputTechnology, InputVulnerability
+from rq.job import Job
+from rq.queue import Queue
+from target_ports.models import TargetPort
+from tools.models import Input, Tool
+from wordlists.models import Wordlist
+
+logger = logging.getLogger()
+
+
+class BaseQueue:
+ name = ""
+
+ def _get_queue(self) -> Queue:
+ return django_rq.get_queue(self.name)
+
+ def cancel_job(self, job_id: str) -> Job:
+ job = self._get_queue().fetch_job(job_id)
+ if job:
+ logger.info(f"[{self.name}] Job {job_id} has been cancelled")
+ job.cancel()
+
+ def delete_job(self, job_id: str) -> Job:
+ job = self._get_queue().fetch_job(job_id)
+ if job:
+ logger.info(f"[{self.name}] Job {job_id} has been deleted")
+ job.delete()
+
+ def enqueue(self, **kwargs: Any) -> Job:
+ return self._get_queue().enqueue(self.consume, **kwargs)
+
+ @staticmethod
+ def consume(**kwargs: Any) -> Any:
+ pass
+
+ @staticmethod
+ def _get_findings_by_type(
+ findings: List[Finding],
+ ) -> Dict[InputType, List[Finding]]:
+ findings_by_type = {}
+ for finding in findings:
+ input_type = finding.get_input_type()
+ if input_type not in findings_by_type:
+ findings_by_type[input_type] = [finding]
+ else:
+ findings_by_type[input_type].append(finding)
+ return dict(
+ sorted(
+ findings_by_type.items(),
+ key=lambda i: len(i[0].get_related_input_types()),
+ )
+ )
+
+ @staticmethod
+ def _calculate_executions(
+ tool: Tool,
+ findings: List[Finding],
+ target_ports: List[TargetPort],
+ input_vulnerabilities: List[InputVulnerability],
+ input_technologies: List[InputTechnology],
+ wordlists: List[Wordlist],
+ ) -> List[Dict[int, List[BaseInput]]]:
+ executions = [{0: []}]
+ input_types_used = set()
+ findings_by_type = BaseQueue._get_findings_by_type(findings)
+ for index, input_type, source in [
+ (0, t, list(f)) for t, f in (findings_by_type or {}).items() if f
+ ] + [
+ (i + 1, None, p)
+ for i, p in enumerate(
+ [
+ target_ports,
+ input_vulnerabilities,
+ input_technologies,
+ wordlists,
+ ]
+ )
+ ]:
+ if not source:
+ continue
+ if not input_type:
+ input_type = source[0].get_input_type()
+ if input_type in input_types_used:
+ continue
+ tool_input = (
+ Input.objects.filter(argument__tool=tool, type=input_type)
+ .order_by("order")
+ .first()
+ )
+ if not tool_input:
+ continue
+ filtered_base_inputs = [bi for bi in source if bi.filter(tool_input)]
+ if not filtered_base_inputs:
+ continue
+ related_input_types = [
+ i for i in input_type.get_related_input_types() if i in findings_by_type
+ ]
+ for execution_index, execution in enumerate(copy.deepcopy(executions)):
+ if not executions[execution_index].get(index):
+ executions[execution_index][index] = []
+ base_inputs = filtered_base_inputs.copy()
+ if index == 0 and related_input_types:
+ base_inputs = []
+ for related_input_type in related_input_types:
+ base_inputs.extend(
+ bi
+ for bi in filtered_base_inputs
+ if getattr(bi, related_input_type.name.lower())
+ in execution[index]
+ and bi not in base_inputs
+ )
+ if not base_inputs:
+ continue
+ input_types_used.add(input_type)
+ if tool_input.argument.multiple:
+ executions[execution_index][index].extend(base_inputs)
+ else:
+ original_execution = copy.deepcopy(execution)
+ executions[execution_index][index].append(base_inputs[0])
+ for base_input in base_inputs[1:]:
+ executions.append(copy.deepcopy(original_execution))
+ if not executions[-1].get(index):
+ executions[-1][index] = [base_input]
+ else:
+ executions[-1][index].append(base_input)
+ return executions
diff --git a/src/backend/framework/serializers.py b/src/backend/framework/serializers.py
new file mode 100644
index 000000000..94cb98ac5
--- /dev/null
+++ b/src/backend/framework/serializers.py
@@ -0,0 +1,37 @@
+from typing import Any
+
+from rest_framework.serializers import ModelSerializer, SerializerMethodField
+from users.models import User
+
+
+class LikeSerializer(ModelSerializer):
+ """Common serializer for all models that can be liked."""
+
+ liked = SerializerMethodField(method_name="is_liked_by_user", read_only=True)
+ likes = SerializerMethodField(method_name="count_likes", read_only=True)
+
+ def is_liked_by_user(self, instance: Any) -> bool:
+ """Check if an instance is liked by the current user or not.
+
+ Args:
+ instance (Any): Instance to check
+
+ Returns:
+ bool: Indicate if the current user likes this instance or not
+ """
+ check_likes = {
+ "pk": self.context.get("request").user.id,
+ f"liked_{instance.__class__.__name__.lower()}": instance,
+ }
+ return User.objects.filter(**check_likes).exists()
+
+ def count_likes(self, instance: Any) -> int:
+ """Count number of likes for an instance.
+
+ Args:
+ instance (Any): Instance to check
+
+ Returns:
+ int: Number of likes for this instance
+ """
+ return instance.liked_by.count()
diff --git a/src/backend/framework/views.py b/src/backend/framework/views.py
new file mode 100644
index 000000000..7ba04053e
--- /dev/null
+++ b/src/backend/framework/views.py
@@ -0,0 +1,145 @@
+from typing import Any, Dict
+
+from django.core.exceptions import PermissionDenied
+from django.db.models import Count, QuerySet
+from drf_spectacular.utils import extend_schema
+from framework.models import BaseModel
+from projects.models import Project
+from rest_framework import status
+from rest_framework.decorators import action
+from rest_framework.permissions import IsAuthenticated
+from rest_framework.request import Request
+from rest_framework.response import Response
+from rest_framework.serializers import Serializer
+from rest_framework.viewsets import ModelViewSet
+from security.authorization.permissions import IsAuditor
+
+
+class BaseViewSet(ModelViewSet):
+ ordering = ["-id"]
+ # Required to remove PATCH method
+ http_method_names = [
+ "get",
+ "post",
+ "put",
+ "delete",
+ ]
+ owner_field = "owner"
+
+ def _get_model(self) -> BaseModel:
+ for cls in [
+ self.get_serializer_class(),
+ self.filterset_class if hasattr(self, "filterset_class") else None,
+ ]:
+ if cls and hasattr(cls, "Meta") and hasattr(cls.Meta, "model"):
+ return cls.Meta.model
+
+ def _get_project_from_data(
+ self, project_field: str, data: Dict[str, Any]
+ ) -> Project:
+ fields = project_field.split("__")
+ data = data.get(fields[0], {})
+ for field in fields[1:]:
+ if hasattr(data, field):
+ data = getattr(data, field)
+ else: # pragma: no cover
+ return None
+ return data
+
+ def get_queryset(self) -> QuerySet:
+ model = self._get_model()
+ members_field = None
+ if model:
+ if model == Project:
+ members_field = "members"
+ elif model.get_project_field():
+ members_field = f"{model.get_project_field()}__members"
+ if members_field:
+ if self.request.user.id:
+ project_filter = {members_field: self.request.user}
+ return super().get_queryset().filter(**project_filter)
+ else: # pragma: no cover
+ return None
+ return super().get_queryset()
+
+ def perform_create(self, serializer: Serializer) -> None:
+ model = self._get_model()
+ if model and model.get_project_field():
+ project = self._get_project_from_data(
+ model.get_project_field(), serializer.validated_data
+ )
+ if project and self.request.user not in project.members.all():
+ raise PermissionDenied()
+ if self.owner_field and model and hasattr(model, self.owner_field):
+ parameters = {self.owner_field: self.request.user}
+ serializer.save(**parameters)
+ return
+ super().perform_create(serializer)
+
+ def _method_not_allowed(self, method: str) -> Response:
+ return Response(
+ {"detail": f'Method "{method.upper()}" not allowed.'},
+ status=status.HTTP_405_METHOD_NOT_ALLOWED,
+ )
+
+
+class LikeViewSet(BaseViewSet):
+ """Base ViewSet that includes the like and dislike features."""
+
+ def get_queryset(self) -> QuerySet:
+ """Get the model queryset. It's required for allow the access to the likes count by the child ViewSets.
+
+ Returns:
+ QuerySet: Model queryset
+ """
+ return super().get_queryset().annotate(likes_count=Count("liked_by"))
+
+ @extend_schema(request=None, responses={201: None})
+ # Permission classes are overrided to IsAuthenticated and IsAuditor, because currently only Tools, Processes and
+ # Wordlists can be liked, and auditors and admins are the only ones that can see this resources.
+ # Permission classes should be overrided here, because if not, the standard permissions would be applied, and not
+ # all auditors can make POST requests to resources like these.
+ @action(
+ detail=True,
+ methods=["POST"],
+ url_path="like",
+ url_name="like",
+ permission_classes=[IsAuthenticated, IsAuditor],
+ )
+ def like(self, request: Request, pk: str) -> Response:
+ """Mark an instance as liked by the current user.
+
+ Args:
+ request (Request): Received HTTP request
+ pk (str): Instance Id
+
+ Returns:
+ Response: HTTP Response
+ """
+ self.get_object().liked_by.add(request.user)
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+ @extend_schema(request=None, responses={204: None})
+ # Permission classes is overrided to IsAuthenticated and IsAuditor, because currently only Tools, Processes and
+ # Resources (Wordlists) can be liked, and auditors and admins are the only ones that can see this resources.
+ # Permission classes should be overrided here, because if not, the standard permissions would be applied, and not
+ # all auditors can make POST requests to resources like these.
+ @action(
+ detail=True,
+ methods=["POST"],
+ url_path="dislike",
+ url_name="dislike",
+ permission_classes=[IsAuthenticated, IsAuditor],
+ )
+ def dislike(self, request: Request, pk: str) -> Response:
+ """Unmark an instance as liked by the current user.
+
+ Args:
+ request (Request): Received HTTP request
+ pk (str): Instance Id
+
+ Returns:
+ Response: HTTP Response
+ """
+ self.get_object().liked_by.remove(request.user)
+ return Response(status=status.HTTP_204_NO_CONTENT)
diff --git a/src/backend/input_types/__init__.py b/src/backend/input_types/__init__.py
index 3abfab9de..e69de29bb 100644
--- a/src/backend/input_types/__init__.py
+++ b/src/backend/input_types/__init__.py
@@ -1 +0,0 @@
-'''Common features for all Input Types (Targets, Findings and Resources).'''
diff --git a/src/backend/input_types/apps.py b/src/backend/input_types/apps.py
index dbe3717a4..ee32ab439 100644
--- a/src/backend/input_types/apps.py
+++ b/src/backend/input_types/apps.py
@@ -1,27 +1,9 @@
-import os
from pathlib import Path
-from typing import Any
from django.apps import AppConfig
-from django.core import management
-from django.core.management.commands import loaddata
-from django.db.models.signals import post_migrate
+from framework.apps import BaseApp
-class InputTypesConfig(AppConfig):
- '''Input types Django application.'''
-
- name = 'input_types'
-
- def ready(self) -> None:
- '''Run code as soon as the registry is fully populated.'''
- # Configure fixtures to be loaded after migration
- post_migrate.connect(self.load_input_types_model, sender=self)
-
- def load_input_types_model(self, **kwargs: Any) -> None:
- '''Load input types fixtures in database.'''
- path = os.path.join(Path(__file__).resolve().parent, 'fixtures') # Path to fixtures directory
- management.call_command(
- loaddata.Command(),
- os.path.join(path, '1_input_types.json') # Input types entities
- )
+class InputTypesConfig(BaseApp, AppConfig):
+ name = "input_types"
+ fixtures_path = Path(__file__).resolve().parent / "fixtures"
diff --git a/src/backend/input_types/base.py b/src/backend/input_types/base.py
deleted file mode 100644
index fd64df70e..000000000
--- a/src/backend/input_types/base.py
+++ /dev/null
@@ -1,29 +0,0 @@
-from typing import Any, Dict
-
-
-class BaseInput:
- '''Class to be extended by all the objects that can be used in tool executions as argument.'''
-
- def filter(self, input: Any) -> bool:
- '''Check if this instance is valid based on input filter.
-
- Args:
- input (Any): Tool input whose filter will be applied
-
- Returns:
- bool: Indicate if this instance match the input filter or not
- '''
- return True
-
- def parse(self, accumulated: Dict[str, Any] = {}) -> Dict[str, Any]:
- '''Get useful information from this instance to be used in tool execution as argument.
-
- To be implemented by subclasses.
-
- Args:
- accumulated (Dict[str, Any], optional): Information from other instances of the same type. Defaults to {}.
-
- Returns:
- Dict[str, Any]: Useful information for tool executions, including accumulated if setted
- '''
- return {} # pragma: no cover
diff --git a/src/backend/input_types/enums.py b/src/backend/input_types/enums.py
index 9ccc02ddd..4c881b57e 100644
--- a/src/backend/input_types/enums.py
+++ b/src/backend/input_types/enums.py
@@ -1,41 +1,16 @@
-from enum import Enum
-
from django.db import models
-class InputTypeNames(models.TextChoices):
- '''Input type names, related to findings and resources.'''
-
- OSINT = 'OSINT'
- HOST = 'Host'
- PORT = 'Port'
- PATH = 'Path'
- TECHNOLOGY = 'Technology'
- VULNERABILITY = 'Vulnerability'
- EXPLOIT = 'Exploit'
- CREDENTIAL = 'Credential'
- WORDLIST = 'Wordlist'
-
-
-class InputKeyword(Enum):
- '''List of keywords that can be included in tool argument patterns to include BaseInput data.'''
+class InputTypeName(models.TextChoices):
+ """Input type names, related to findings and wordlists."""
- TARGET = 1
- HOST = 2
- PORT = 3
- PORTS = 4
- PORTS_COMMAS = 5
- TECHNOLOGY = 6
- VERSION = 7
- ENDPOINT = 8
- URL = 9
- EMAIL = 10
- USERNAME = 11
- SECRET = 12
- CVE = 13
- EXPLOIT = 14
- WORDLIST = 15
- COOKIE_NAME = 16
- TOKEN = 17
- CREDENTIAL_TYPE = 18
- CREDENTIAL_TYPE_LOWER = 19
+ OSINT = "OSINT"
+ HOST = "Host"
+ PORT = "Port"
+ PATH = "Path"
+ TECHNOLOGY = "Technology"
+ VULNERABILITY = "Vulnerability"
+ EXPLOIT = "Exploit"
+ CREDENTIAL = "Credential"
+ WORDLIST = "Wordlist"
+ AUTHENTICATION = "Authentication"
diff --git a/src/backend/input_types/fixtures/1_input_types.json b/src/backend/input_types/fixtures/1_input_types.json
index 3e99e8d68..24141a006 100644
--- a/src/backend/input_types/fixtures/1_input_types.json
+++ b/src/backend/input_types/fixtures/1_input_types.json
@@ -5,8 +5,8 @@
"fields": {
"name": "OSINT",
"model": "findings.osint",
- "callback_model": null,
- "regular": true
+ "fallback_model": null,
+ "relationships": true
}
},
{
@@ -15,8 +15,8 @@
"fields": {
"name": "Host",
"model": "findings.host",
- "callback_model": "targets.target",
- "regular": true
+ "fallback_model": "targets.target",
+ "relationships": true
}
},
{
@@ -25,8 +25,8 @@
"fields": {
"name": "Port",
"model": "findings.port",
- "callback_model": "targets.targetport",
- "regular": true
+ "fallback_model": "target_ports.targetport",
+ "relationships": true
}
},
{
@@ -35,8 +35,8 @@
"fields": {
"name": "Path",
"model": "findings.path",
- "callback_model": null,
- "regular": true
+ "fallback_model": "target_ports.targetport",
+ "relationships": true
}
},
{
@@ -45,8 +45,8 @@
"fields": {
"name": "Technology",
"model": "findings.technology",
- "callback_model": "parameters.inputtechnology",
- "regular": true
+ "fallback_model": "parameters.inputtechnology",
+ "relationships": true
}
},
{
@@ -55,8 +55,8 @@
"fields": {
"name": "Vulnerability",
"model": "findings.vulnerability",
- "callback_model": "parameters.inputvulnerability",
- "regular": true
+ "fallback_model": "parameters.inputvulnerability",
+ "relationships": true
}
},
{
@@ -65,8 +65,8 @@
"fields": {
"name": "Credential",
"model": "findings.credential",
- "callback_model": null,
- "regular": true
+ "fallback_model": null,
+ "relationships": true
}
},
{
@@ -75,8 +75,8 @@
"fields": {
"name": "Exploit",
"model": "findings.exploit",
- "callback_model": null,
- "regular": true
+ "fallback_model": null,
+ "relationships": true
}
},
{
@@ -84,9 +84,9 @@
"pk": 9,
"fields": {
"name": "Wordlist",
- "model": null,
- "callback_model": "resources.wordlist",
- "regular": true
+ "model": "wordlists.wordlist",
+ "fallback_model": null,
+ "relationships": true
}
},
{
@@ -95,8 +95,8 @@
"fields": {
"name": "Authentication",
"model": "authentications.authentication",
- "callback_model": null,
- "regular": false
+ "fallback_model": null,
+ "relationships": false
}
}
]
\ No newline at end of file
diff --git a/src/backend/input_types/migrations/0001_initial.py b/src/backend/input_types/migrations/0001_initial.py
deleted file mode 100644
index 72a5c1eec..000000000
--- a/src/backend/input_types/migrations/0001_initial.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# Generated by Django 3.2.12 on 2022-03-20 11:45
-
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
-
- initial = True
-
- dependencies = [
- ]
-
- operations = [
- migrations.CreateModel(
- name='InputType',
- fields=[
- ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
- ('name', models.TextField(choices=[('OSINT', 'Osint'), ('Host', 'Host'), ('Port', 'Port'), ('Path', 'Path'), ('Technology', 'Technology'), ('Vulnerability', 'Vulnerability'), ('Exploit', 'Exploit'), ('Credential', 'Credential'), ('Wordlist', 'Wordlist')], max_length=15)),
- ('related_model', models.TextField(blank=True, max_length=30, null=True)),
- ('callback_target', models.TextField(blank=True, max_length=15, null=True)),
- ],
- ),
- ]
diff --git a/src/backend/input_types/migrations/0002_auto_20221226_0011.py b/src/backend/input_types/migrations/0002_auto_20221226_0011.py
deleted file mode 100644
index f3404c492..000000000
--- a/src/backend/input_types/migrations/0002_auto_20221226_0011.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# Generated by Django 3.2.16 on 2022-12-25 23:11
-
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('input_types', '0001_initial'),
- ]
-
- operations = [
- migrations.RenameField(
- model_name='inputtype',
- old_name='callback_target',
- new_name='callback_model',
- ),
- migrations.RenameField(
- model_name='inputtype',
- old_name='related_model',
- new_name='model',
- ),
- migrations.AddField(
- model_name='inputtype',
- name='regular',
- field=models.BooleanField(default=True),
- ),
- ]
diff --git a/src/backend/input_types/models.py b/src/backend/input_types/models.py
index 456a56f3a..c08516c19 100644
--- a/src/backend/input_types/models.py
+++ b/src/backend/input_types/models.py
@@ -1,57 +1,72 @@
-from typing import Union
+from typing import List, Self
from django.apps import apps
from django.db import models
-
-from input_types.base import BaseInput
-from input_types.enums import InputTypeNames
+from framework.models import BaseInput, BaseModel
+from input_types.enums import InputTypeName
# Create your models here.
-class InputType(models.Model):
- '''Input type model, related to each object type that can be included in a tool argument.'''
+class InputType(BaseModel):
+ """Input type model, related to each object type that can be included in a tool argument."""
- name = models.TextField(max_length=15, choices=InputTypeNames.choices) # Input type name
+ name = models.TextField(max_length=15, choices=InputTypeName.choices)
# Related model name in 'app.Model' format. It can be a reference to a Finding
- model = models.TextField(max_length=30, null=True, blank=True)
+ model = models.TextField(max_length=30, blank=True, null=True)
# Related callback model name in 'app.Model' format. It will be used when 'model' is not available
- callback_model = models.TextField(max_length=15, null=True, blank=True)
+ fallback_model = models.TextField(max_length=15, blank=True, null=True)
# Indicate if the input type should be included to calculate relations between models and executions
- regular = models.BooleanField(default=True)
+ relationships = models.BooleanField(default=True)
def __str__(self) -> str:
- '''Instance representation in text format.
+ """Instance representation in text format.
Returns:
str: String value that identifies this instance
- '''
+ """
return self.name
- def get_class_from_reference(self, reference: str) -> BaseInput:
- '''Get model from string reference.
-
- Args:
- reference (str): Reference to model
-
- Returns:
- Union[BaseInput, None]: Model class related to reference
- '''
- app_label, model_name = reference.split('.', 1) # Get model attributes from reference
+ def _get_class_from_reference(self, reference: str) -> BaseInput:
+ if not reference:
+ return None
+ app_label, model_name = reference.split(".", 1)
return apps.get_model(app_label=app_label, model_name=model_name)
- def get_model_class(self) -> Union[BaseInput, None]:
- '''Get related model from 'model' reference.
+ def get_model_class(self) -> BaseInput | None:
+ """Get related model from 'model' reference.
Returns:
BaseInput: Related model of the input type
- '''
- return self.get_class_from_reference(self.model) if self.model else None
+ """
+ return self._get_class_from_reference(self.model)
- def get_callback_model_class(self) -> Union[BaseInput, None]:
- '''Get callback model from 'callback_model' reference.
+ def get_fallback_model_class(self) -> BaseInput | None:
+ """Get callback model from 'fallback_model' reference.
Returns:
BaseInput: Callback model of the input type
- '''
- return self.get_class_from_reference(self.callback_model) if self.callback_model else None
+ """
+ return self._get_class_from_reference(self.fallback_model)
+
+ def get_related_input_types(self) -> List[Self]:
+ """Get relations between the different input types.
+
+ Returns:
+ Dict[InputType, List[InputType]]: Dict with a list of related input types for each input type
+ """
+ relations: List[InputType] = []
+ model = self.get_model_class()
+ if model:
+ for field in model._meta.get_fields(): # For each model field
+ # Check if field is a ForeignKey to a BaseInput model
+ if field.__class__ == models.ForeignKey and issubclass(
+ field.related_model, BaseInput
+ ):
+ # Search InputType by model
+ related_type = InputType.objects.filter(
+ model=f"{field.related_model._meta.app_label}.{field.related_model._meta.model_name}"
+ )
+ if related_type.exists():
+ relations.append(related_type.first())
+ return relations
diff --git a/src/backend/input_types/serializers.py b/src/backend/input_types/serializers.py
index d865c4434..3fb5e5062 100644
--- a/src/backend/input_types/serializers.py
+++ b/src/backend/input_types/serializers.py
@@ -1,13 +1,16 @@
-from rest_framework import serializers
-
from input_types.models import InputType
+from rest_framework.serializers import ModelSerializer
-class InputTypeSerializer(serializers.ModelSerializer):
- '''Serializer to get the input type data via API.'''
+class InputTypeSerializer(ModelSerializer):
+ """Serializer to get the input type data via API."""
class Meta:
- '''Serializer metadata.'''
+ """Serializer metadata."""
model = InputType
- fields = ('name', 'model', 'callback_model') # Input type fields exposed via API
+ fields = (
+ "name",
+ "model",
+ "fallback_model",
+ )
diff --git a/src/backend/input_types/utils.py b/src/backend/input_types/utils.py
deleted file mode 100644
index 51c691786..000000000
--- a/src/backend/input_types/utils.py
+++ /dev/null
@@ -1,60 +0,0 @@
-from typing import Dict, List, Optional
-
-import requests
-import urllib3
-from django.db import models
-from input_types.base import BaseInput
-from input_types.models import InputType
-from urllib3.exceptions import InsecureRequestWarning
-
-urllib3.disable_warnings(category=InsecureRequestWarning)
-
-
-def get_url(host: str, port: int = None, endpoint: str = '', protocols: List[str] = ['http', 'https']) -> Optional[str]:
- '''Get a HTTP or HTTPS URL from host, port and endpoint.
-
- Args:
- host (str): Host to include in the URL
- port (int, optional): Port to include in the URL. Defaults to None.
- endpoint (str, optional): Endpoint to include in the URL. Defaults to ''.
- protocols (List[str], optional): Protocol list to check. Defaults to ['http', 'https'].
-
- Returns:
- Optional[str]: [description]
- '''
- schema = '{protocol}://{host}/{endpoint}'
- if port:
- schema = '{protocol}://{host}:{port}/{endpoint}' # Include port schema if port exists
- for protocol in protocols: # For each protocol
- url_to_test = schema.format(protocol=protocol, host=host, port=port, endpoint=endpoint)
- try:
- # nosemgrep: python.requests.security.disabled-cert-validation.disabled-cert-validation
- requests.get(url_to_test, timeout=5, verify=False) # Test URL connection
- return url_to_test
- except Exception:
- continue
- return None
-
-
-def get_relations_between_input_types() -> Dict[InputType, List[InputType]]:
- '''Get relations between the different input types.
-
- Returns:
- Dict[InputType, List[InputType]]: Dict with a list of related input types for each input type
- '''
- relations: Dict[InputType, List[InputType]] = {}
- input_types = InputType.objects.filter(regular=True).order_by('-id').all() # Get all input types
- for it in input_types: # For each input type
- relations[it] = []
- model = it.get_model_class()
- if model:
- for field in model._meta.get_fields(): # For each model field
- # Check if field is a ForeignKey to a BaseInput model
- if field.__class__ == models.ForeignKey and issubclass(field.related_model, BaseInput):
- # Search InputType by model
- related_type = InputType.objects.filter(
- model=f'{field.related_model._meta.app_label}.{field.related_model._meta.model_name}'
- )
- if related_type.exists():
- relations[it].append(related_type.first())
- return relations
diff --git a/src/backend/likes/__init__.py b/src/backend/likes/__init__.py
deleted file mode 100644
index 28c20c06d..000000000
--- a/src/backend/likes/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-'''Common features for all objects with Likes.'''
diff --git a/src/backend/likes/filters.py b/src/backend/likes/filters.py
deleted file mode 100644
index 661d04cff..000000000
--- a/src/backend/likes/filters.py
+++ /dev/null
@@ -1,27 +0,0 @@
-from django.db.models import Q, QuerySet
-from django_filters.rest_framework import FilterSet, filters
-
-
-class LikeFilter(FilterSet):
- '''Filter that allows queryset filtering based on current user likes.'''
-
- # Indicate if user likes or not the entities
- liked = filters.BooleanFilter(method='get_liked_items')
-
- def get_liked_items(self, queryset: QuerySet, name: str, value: bool) -> QuerySet:
- '''Filter queryset based on current user likes.
-
- Args:
- queryset (QuerySet): Queryset to be filtered
- name (str): Field name. Not used in this case
- value (bool): Indicate if current user likes or not the entities
-
- Returns:
- QuerySet: Queryset filtered by the current user likes
- '''
- liked = {'liked_by': self.request.user}
- if value:
- liked = Q(**liked) # Get entities liked by the user
- else:
- liked = ~Q(**liked) # Get entities disliked by the user
- return queryset.filter(liked).all()
diff --git a/src/backend/likes/models.py b/src/backend/likes/models.py
deleted file mode 100644
index 178dda1b5..000000000
--- a/src/backend/likes/models.py
+++ /dev/null
@@ -1,17 +0,0 @@
-from django.conf import settings
-from django.db import models
-
-# Create your models here.
-
-
-class LikeBase(models.Model):
- '''Common and abstract LikeBase model, to define common fields for all models that user can like.'''
-
- # Relation with all users that likes each entity
- liked_by = models.ManyToManyField(settings.AUTH_USER_MODEL, related_name='liked_%(class)s')
-
- class Meta:
- '''Model metadata.'''
-
- # To be extended by models that can be liked
- abstract = True
diff --git a/src/backend/likes/serializers.py b/src/backend/likes/serializers.py
deleted file mode 100644
index f992eedef..000000000
--- a/src/backend/likes/serializers.py
+++ /dev/null
@@ -1,39 +0,0 @@
-from typing import Any
-
-from rest_framework import serializers
-from users.models import User
-
-
-class LikeBaseSerializer(serializers.Serializer):
- '''Common serializer for all models that can be liked.'''
-
- # Field that indicates if the current user likes or not each entity
- liked = serializers.SerializerMethodField(method_name='is_liked_by_user', read_only=True)
- # Field that indicates the number of likes for each entity
- likes = serializers.SerializerMethodField(method_name='count_likes', read_only=True)
-
- def is_liked_by_user(self, instance: Any) -> bool:
- '''Check if an instance is liked by the current user or not.
-
- Args:
- instance (Any): Instance to check
-
- Returns:
- bool: Indicate if the current user likes this instance or not
- '''
- check_likes = { # Filter users by Id and liked entities
- 'pk': self.context.get('request').user.id,
- f'liked_{instance.__class__.__name__.lower()}': instance
- }
- return User.objects.filter(**check_likes).exists()
-
- def count_likes(self, instance: Any) -> int:
- '''Count number of likes for an instance.
-
- Args:
- instance (Any): Instance to check
-
- Returns:
- int: Number of likes for this instance
- '''
- return instance.liked_by.count()
diff --git a/src/backend/likes/views.py b/src/backend/likes/views.py
deleted file mode 100644
index ed0dfca5b..000000000
--- a/src/backend/likes/views.py
+++ /dev/null
@@ -1,79 +0,0 @@
-from django.db.models import Count, QuerySet
-from drf_spectacular.utils import extend_schema
-from rest_framework import status
-from rest_framework.decorators import action
-from rest_framework.permissions import IsAuthenticated
-from rest_framework.request import Request
-from rest_framework.response import Response
-from rest_framework.viewsets import GenericViewSet
-from security.authorization.permissions import IsAuditor
-from users.models import User
-
-# Create your views here.
-
-
-class LikeManagementView(GenericViewSet):
- '''Base ViewSet that includes the like and dislike features.'''
-
- def get_queryset(self) -> QuerySet:
- '''Get the model queryset. It's required for allow the access to the likes count by the child ViewSets.
-
- Returns:
- QuerySet: Model queryset
- '''
- return super().get_queryset().annotate(likes_count=Count('liked_by'))
-
- @extend_schema(request=None, responses={201: None})
- # Permission classes are overrided to IsAuthenticated and IsAuditor, because currently only Tools, Processes and
- # Resources (Wordlists) can be liked, and auditors and admins are the only ones that can see this resources.
- # Permission classes should be overrided here, because if not, the standard permissions would be applied, and not
- # all auditors can make POST requests to resources like these.
- @action(
- detail=True,
- methods=['POST'],
- url_path='like',
- url_name='like',
- permission_classes=[IsAuthenticated, IsAuditor]
- )
- def like(self, request: Request, pk: str) -> Response:
- '''Mark an instance as liked by the current user.
-
- Args:
- request (Request): Received HTTP request
- pk (str): Instance Id
-
- Returns:
- Response: HTTP Response
- '''
- instance = self.get_object()
- instance.liked_by.add(request.user) # Add user like
- return Response(status=status.HTTP_201_CREATED)
-
- @extend_schema(request=None, responses={204: None})
- # Permission classes is overrided to IsAuthenticated and IsAuditor, because currently only Tools, Processes and
- # Resources (Wordlists) can be liked, and auditors and admins are the only ones that can see this resources.
- # Permission classes should be overrided here, because if not, the standard permissions would be applied, and not
- # all auditors can make POST requests to resources like these.
- @action(
- detail=True,
- methods=['POST'],
- url_path='dislike',
- url_name='dislike',
- permission_classes=[IsAuthenticated, IsAuditor]
- )
- def dislike(self, request: Request, pk: str) -> Response:
- '''Unmark an instance as liked by the current user.
-
- Args:
- request (Request): Received HTTP request
- pk (str): Instance Id
-
- Returns:
- Response: HTTP Response
- '''
- instance = self.get_object()
- user: User = request.user
- instance.liked_by.remove(user) # Remove user like
- # Remove instance from liked instances by user
- getattr(user, f'liked_{instance.__class__.__name__.lower()}').remove(instance)
- return Response(status=status.HTTP_204_NO_CONTENT)
diff --git a/src/backend/manage.py b/src/backend/manage.py
index 507b9296b..67ad86dd0 100755
--- a/src/backend/manage.py
+++ b/src/backend/manage.py
@@ -1,15 +1,15 @@
#!/usr/bin/env python
-'''Django's command-line utility for administrative tasks.'''
+"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
- '''Run administrative tasks.'''
- os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'rekono.settings')
+ """Run administrative tasks."""
+ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "rekono.settings")
try:
from django.core.management import execute_from_command_line
- except ImportError as exc:
+ except ImportError as exc: # pragma: no cover
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
@@ -18,5 +18,5 @@ def main():
execute_from_command_line(sys.argv)
-if __name__ == '__main__':
+if __name__ == "__main__":
main()
diff --git a/src/backend/parameters/__init__.py b/src/backend/parameters/__init__.py
index 779c1181c..e69de29bb 100644
--- a/src/backend/parameters/__init__.py
+++ b/src/backend/parameters/__init__.py
@@ -1 +0,0 @@
-'''Parameters.'''
diff --git a/src/backend/parameters/admin.py b/src/backend/parameters/admin.py
index 4a427cd46..3d455bef2 100644
--- a/src/backend/parameters/admin.py
+++ b/src/backend/parameters/admin.py
@@ -1,5 +1,4 @@
from django.contrib import admin
-
from parameters.models import InputTechnology, InputVulnerability
# Register your models here.
diff --git a/src/backend/parameters/apps.py b/src/backend/parameters/apps.py
index 562e172f5..6e8fc3ad9 100644
--- a/src/backend/parameters/apps.py
+++ b/src/backend/parameters/apps.py
@@ -1,7 +1,6 @@
from django.apps import AppConfig
+from framework.apps import BaseApp
-class ParametersConfig(AppConfig):
- '''Parameters Django application.'''
-
- name = 'parameters'
+class ParametersConfig(BaseApp, AppConfig):
+ name = "parameters"
diff --git a/src/backend/parameters/filters.py b/src/backend/parameters/filters.py
index 64f4141d4..8fdda898a 100644
--- a/src/backend/parameters/filters.py
+++ b/src/backend/parameters/filters.py
@@ -1,37 +1,35 @@
-from django_filters import rest_framework
-from django_filters.rest_framework.filters import OrderingFilter
-
+from django_filters.filters import ModelChoiceFilter
+from django_filters.rest_framework import FilterSet
from parameters.models import InputTechnology, InputVulnerability
+from projects.models import Project
-class InputTechnologyFilter(rest_framework.FilterSet):
- '''FilterSet to filter and sort input Technology entities.'''
+class InputTechnologyFilter(FilterSet):
+ """FilterSet to filter and sort input Technology entities."""
- o = OrderingFilter(fields=('target', 'name')) # Ordering fields
+ project = ModelChoiceFilter(
+ queryset=Project.objects.all(), field_name="target__project"
+ )
class Meta:
- '''FilterSet metadata.'''
-
model = InputTechnology
- fields = { # Filter fields
- 'target': ['exact'],
- 'target__target': ['exact'],
- 'name': ['exact', 'icontains'],
- 'version': ['exact', 'icontains'],
+ fields = {
+ "target": ["exact"],
+ "name": ["exact", "icontains"],
+ "version": ["exact", "icontains"],
}
-class InputVulnerabilityFilter(rest_framework.FilterSet):
- '''FilterSet to filter and sort input Vulnerability entities.'''
+class InputVulnerabilityFilter(FilterSet):
+ """FilterSet to filter and sort input Vulnerability entities."""
- o = OrderingFilter(fields=('target', 'cve')) # Ordering fields
+ project = ModelChoiceFilter(
+ queryset=Project.objects.all(), field_name="target__project"
+ )
class Meta:
- '''FilterSet metadata.'''
-
model = InputVulnerability
- fields = { # Filter fields
- 'target': ['exact'],
- 'target__target': ['exact'],
- 'cve': ['exact']
+ fields = {
+ "target": ["exact"],
+ "cve": ["exact"],
}
diff --git a/src/backend/parameters/migrations/0001_initial.py b/src/backend/parameters/migrations/0001_initial.py
deleted file mode 100644
index 762467ee3..000000000
--- a/src/backend/parameters/migrations/0001_initial.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# Generated by Django 3.2.16 on 2023-01-08 12:56
-
-from django.db import migrations, models
-import django.db.models.deletion
-import input_types.base
-import security.input_validation
-
-
-class Migration(migrations.Migration):
-
- initial = True
-
- dependencies = [
- ('targets', '0002_auto_20230108_1356'),
- ]
-
- operations = [
- migrations.CreateModel(
- name='InputVulnerability',
- fields=[
- ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
- ('cve', models.TextField(max_length=20, validators=[security.input_validation.validate_cve])),
- ('target', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='input_vulnerabilities', to='targets.target')),
- ],
- bases=(models.Model, input_types.base.BaseInput),
- ),
- migrations.CreateModel(
- name='InputTechnology',
- fields=[
- ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
- ('name', models.TextField(max_length=100, validators=[security.input_validation.validate_name])),
- ('version', models.TextField(blank=True, max_length=100, null=True, validators=[security.input_validation.validate_name])),
- ('target', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='input_technologies', to='targets.target')),
- ],
- bases=(models.Model, input_types.base.BaseInput),
- ),
- migrations.AddConstraint(
- model_name='inputvulnerability',
- constraint=models.UniqueConstraint(fields=('target', 'cve'), name='unique input vulnerability'),
- ),
- migrations.AddConstraint(
- model_name='inputtechnology',
- constraint=models.UniqueConstraint(fields=('target', 'name', 'version'), name='unique input technology'),
- ),
- ]
diff --git a/src/backend/parameters/models.py b/src/backend/parameters/models.py
index fb736770d..5fc8fe24e 100644
--- a/src/backend/parameters/models.py
+++ b/src/backend/parameters/models.py
@@ -1,132 +1,111 @@
-from typing import Any, Dict, cast
+from typing import Any, Dict
from django.db import models
-from findings.enums import Severity
-from input_types.enums import InputKeyword
-from input_types.models import BaseInput
-from projects.models import Project
-from security.input_validation import validate_cve, validate_name
+from framework.enums import InputKeyword
+from framework.models import BaseInput
+from security.input_validator import Regex, Validator
from targets.models import Target
-from tools.models import Input
# Create your models here.
-class InputTechnology(models.Model, BaseInput):
- '''Input technology model.'''
+class InputTechnology(BaseInput):
+ """Input technology model."""
- target = models.ForeignKey(Target, related_name='input_technologies', on_delete=models.CASCADE) # Related target
- name = models.TextField(max_length=100, validators=[validate_name]) # Technology name
- version = models.TextField(max_length=100, validators=[validate_name], blank=True, null=True) # Technology version
+ target = models.ForeignKey(
+ Target, related_name="input_technologies", on_delete=models.CASCADE
+ )
+ name = models.TextField(
+ max_length=100, validators=[Validator(Regex.NAME.value, code="name")]
+ )
+ version = models.TextField(
+ max_length=100,
+ validators=[Validator(Regex.NAME.value, code="version")],
+ blank=True,
+ null=True,
+ )
- class Meta:
- '''Model metadata.'''
+ filters = [BaseInput.Filter(type=str, field="name", contains=True)]
+ class Meta:
constraints = [
- # Unique constraint by: Target, Technology and Version
- models.UniqueConstraint(fields=['target', 'name', 'version'], name='unique input technology')
+ models.UniqueConstraint(
+ fields=["target", "name", "version"], name="unique_input_technology"
+ )
]
- def filter(self, input: Input) -> bool:
- '''Check if this instance is valid based on input filter.
-
- Args:
- input (Input): Tool input whose filter will be applied
-
- Returns:
- bool: Indicate if this instance match the input filter or not
- '''
- return not input.filter or input.filter.lower() in self.name.lower()
-
def parse(self, accumulated: Dict[str, Any] = {}) -> Dict[str, Any]:
- '''Get useful information from this instance to be used in tool execution as argument.
+ """Get useful information from this instance to be used in tool execution as argument.
Args:
accumulated (Dict[str, Any], optional): Information from other instances of the same type. Defaults to {}.
Returns:
Dict[str, Any]: Useful information for tool executions, including accumulated if setted
- '''
- output = self.target.parse()
+ """
+ output = self.target.parse(accumulated)
output[InputKeyword.TECHNOLOGY.name.lower()] = self.name
if self.version:
output[InputKeyword.VERSION.name.lower()] = self.version
return output
def __str__(self) -> str:
- '''Instance representation in text format.
+ """Instance representation in text format.
Returns:
str: String value that identifies this instance
- '''
- base = f'{self.target.__str__()} - {self.name}'
- return f'{base} - {self.version}' if self.version else base
+ """
+ return f"{self.target.__str__()} - {self.name}{f' - {self.version}' if self.version else ''}"
- def get_project(self) -> Project:
- '''Get the related project for the instance. This will be used for authorization purposes.
+ @classmethod
+ def get_project_field(cls) -> str:
+ return "target__project"
- Returns:
- Project: Related project entity
- '''
- return self.target.project
+class InputVulnerability(BaseInput):
+ """Input vulnerability model."""
-class InputVulnerability(models.Model, BaseInput):
- '''Input vulnerability model.'''
+ target = models.ForeignKey(
+ Target, related_name="input_vulnerabilities", on_delete=models.CASCADE
+ )
+ cve = models.TextField(
+ max_length=20, validators=[Validator(Regex.CVE.value, code="cve")]
+ )
- target = models.ForeignKey(Target, related_name='input_vulnerabilities', on_delete=models.CASCADE) # Related target
- cve = models.TextField(max_length=20, validators=[validate_cve]) # CVE
+ filters = [
+ BaseInput.Filter(type=str, field="cve", processor=lambda v: "cve"),
+ BaseInput.Filter(type=str, field="cve", processor=lambda v: v.lower()),
+ ]
class Meta:
- '''Model metadata.'''
-
constraints = [
- # Unique constraint by: Target and CVE
- models.UniqueConstraint(fields=['target', 'cve'], name='unique input vulnerability')
+ models.UniqueConstraint(
+ fields=["target", "cve"], name="unique_input_vulnerability"
+ )
]
- def filter(self, input: Input) -> bool:
- '''Check if this instance is valid based on input filter.
-
- Args:
- input (Input): Tool input whose filter will be applied
-
- Returns:
- bool: Indicate if this instance match the input filter or not
- '''
- return (
- not input.filter or
- input.filter.capitalize() in cast(models.TextChoices, Severity) or
- input.filter.lower().startswith('cwe-') or
- input.filter.lower() == 'cve' or
- (input.filter.lower().startswith('cve-') and input.filter.lower() == self.cve.lower())
- )
-
def parse(self, accumulated: Dict[str, Any] = {}) -> Dict[str, Any]:
- '''Get useful information from this instance to be used in tool execution as argument.
+ """Get useful information from this instance to be used in tool execution as argument.
Args:
accumulated (Dict[str, Any], optional): Information from other instances of the same type. Defaults to {}.
Returns:
Dict[str, Any]: Useful information for tool executions, including accumulated if setted
- '''
- output = self.target.parse()
- output[InputKeyword.CVE.name.lower()] = self.cve
- return output
+ """
+ return {
+ **self.target.parse(accumulated),
+ InputKeyword.CVE.name.lower(): self.cve,
+ }
def __str__(self) -> str:
- '''Instance representation in text format.
+ """Instance representation in text format.
Returns:
str: String value that identifies this instance
- '''
- return f'{self.target.__str__()} - {self.cve}'
+ """
+ return f"{self.target.__str__()} - {self.cve}"
- def get_project(self) -> Project:
- '''Get the related project for the instance. This will be used for authorization purposes.
-
- Returns:
- Project: Related project entity
- '''
- return self.target.project
+ @classmethod
+ def get_project_field(cls) -> str:
+ return "target__project"
diff --git a/src/backend/parameters/serializers.py b/src/backend/parameters/serializers.py
index 4255e090d..21fce3f7a 100644
--- a/src/backend/parameters/serializers.py
+++ b/src/backend/parameters/serializers.py
@@ -1,69 +1,18 @@
-from typing import Any, Dict
-
-from django.forms import ValidationError
-from rest_framework import serializers
-
from parameters.models import InputTechnology, InputVulnerability
+from rest_framework.serializers import ModelSerializer
-class InputTechnologySerializer(serializers.ModelSerializer):
- '''Serializer to manage input technologies via API.'''
+class InputTechnologySerializer(ModelSerializer):
+ """Serializer to manage input technologies via API."""
class Meta:
- '''Serializer metadata.'''
-
model = InputTechnology
- # Input technology fields exposed via API
- fields = ('id', 'target', 'name', 'version')
-
- def validate(self, attrs: Dict[str, Any]) -> Dict[str, Any]:
- '''Validate the provided data before use it.
-
- Args:
- attrs (Dict[str, Any]): Provided data
-
- Raises:
- ValidationError: Raised if provided data is invalid
+ fields = ("id", "target", "name", "version")
- Returns:
- Dict[str, Any]: Data after validation process
- '''
- attrs = super().validate(attrs)
- if InputTechnology.objects.filter(
- target=attrs['target'],
- name=attrs['name'],
- version=attrs['version']
- ).exists():
- raise ValidationError({
- 'name': 'This name already exists in this target',
- 'version': 'This version already exists for this technology in this target'
- })
- return attrs
-
-class InputVulnerabilitySerializer(serializers.ModelSerializer):
- '''Serializer to manage input vulnerabilities via API.'''
+class InputVulnerabilitySerializer(ModelSerializer):
+ """Serializer to manage input vulnerabilities via API."""
class Meta:
- '''Serializer metadata.'''
-
model = InputVulnerability
- # Input vulnerabilities fields exposed via API
- fields = ('id', 'target', 'cve')
-
- def validate(self, attrs: Dict[str, Any]) -> Dict[str, Any]:
- '''Validate the provided data before use it.
-
- Args:
- attrs (Dict[str, Any]): Provided data
-
- Raises:
- ValidationError: Raised if provided data is invalid
-
- Returns:
- Dict[str, Any]: Data after validation process
- '''
- attrs = super().validate(attrs)
- if InputVulnerability.objects.filter(target=attrs['target'], cve=attrs['cve']).exists():
- raise ValidationError({'cve': 'This CVE already exists in this target'})
- return attrs
+ fields = ("id", "target", "cve")
diff --git a/src/backend/parameters/views.py b/src/backend/parameters/views.py
index dc172b331..830b48bfa 100644
--- a/src/backend/parameters/views.py
+++ b/src/backend/parameters/views.py
@@ -1,48 +1,56 @@
-from api.views import CreateViewSet, GetViewSet
-from rest_framework.mixins import (CreateModelMixin, DestroyModelMixin,
- ListModelMixin, RetrieveModelMixin)
-
+from framework.views import BaseViewSet
from parameters.filters import InputTechnologyFilter, InputVulnerabilityFilter
from parameters.models import InputTechnology, InputVulnerability
-from parameters.serializers import (InputTechnologySerializer,
- InputVulnerabilitySerializer)
+from parameters.serializers import (
+ InputTechnologySerializer,
+ InputVulnerabilitySerializer,
+)
+from rest_framework.permissions import IsAuthenticated
+from security.authorization.permissions import (
+ ProjectMemberPermission,
+ RekonoModelPermission,
+)
# Create your views here.
-class InputTechnologyViewSet(
- GetViewSet,
- CreateViewSet,
- CreateModelMixin,
- ListModelMixin,
- RetrieveModelMixin,
- DestroyModelMixin
-):
- '''InputTechnology ViewSet that includes: get, retrieve, create, and delete features.'''
+class InputTechnologyViewSet(BaseViewSet):
+ """InputTechnology ViewSet that includes: get, retrieve, create, and delete features."""
- queryset = InputTechnology.objects.all().order_by('-id')
+ queryset = InputTechnology.objects.all()
serializer_class = InputTechnologySerializer
filterset_class = InputTechnologyFilter
+ permission_classes = [
+ IsAuthenticated,
+ RekonoModelPermission,
+ ProjectMemberPermission,
+ ]
# Fields used to search input technologies
- search_fields = ['name', 'version']
- # Project members field used for authorization purposes
- members_field = 'target__project__members'
+ search_fields = ["name", "version"]
+ ordering_fields = ["id", "target", "name"]
+ http_method_names = [
+ "get",
+ "post",
+ "delete",
+ ]
-class InputVulnerabilityViewSet(
- GetViewSet,
- CreateViewSet,
- CreateModelMixin,
- ListModelMixin,
- RetrieveModelMixin,
- DestroyModelMixin
-):
- '''InputVulnerability ViewSet that includes: get, retrieve, create, and delete features.'''
+class InputVulnerabilityViewSet(BaseViewSet):
+ """InputVulnerability ViewSet that includes: get, retrieve, create, and delete features."""
- queryset = InputVulnerability.objects.all().order_by('-id')
+ queryset = InputVulnerability.objects.all()
serializer_class = InputVulnerabilitySerializer
filterset_class = InputVulnerabilityFilter
+ permission_classes = [
+ IsAuthenticated,
+ RekonoModelPermission,
+ ProjectMemberPermission,
+ ]
# Fields used to search input vulnerabilities
- search_fields = ['cve']
- # Project members field used for authorization purposes
- members_field = 'target__project__members'
+ search_fields = ["cve"]
+ ordering_fields = ["id", "target", "cve"]
+ http_method_names = [
+ "get",
+ "post",
+ "delete",
+ ]
diff --git a/src/backend/input_types/migrations/__init__.py b/src/backend/platforms/__init__.py
similarity index 100%
rename from src/backend/input_types/migrations/__init__.py
rename to src/backend/platforms/__init__.py
diff --git a/src/backend/parameters/migrations/__init__.py b/src/backend/platforms/defect_dojo/__init__.py
similarity index 100%
rename from src/backend/parameters/migrations/__init__.py
rename to src/backend/platforms/defect_dojo/__init__.py
diff --git a/src/backend/platforms/defect_dojo/admin.py b/src/backend/platforms/defect_dojo/admin.py
new file mode 100644
index 000000000..056fc8b02
--- /dev/null
+++ b/src/backend/platforms/defect_dojo/admin.py
@@ -0,0 +1,12 @@
+from django.contrib import admin
+from platforms.defect_dojo.models import (
+ DefectDojoSettings,
+ DefectDojoSync,
+ DefectDojoTargetSync,
+)
+
+# Register your models here.
+
+admin.register(DefectDojoSettings)
+admin.register(DefectDojoSync)
+admin.register(DefectDojoTargetSync)
diff --git a/src/backend/platforms/defect_dojo/apps.py b/src/backend/platforms/defect_dojo/apps.py
new file mode 100644
index 000000000..ba000f846
--- /dev/null
+++ b/src/backend/platforms/defect_dojo/apps.py
@@ -0,0 +1,16 @@
+from pathlib import Path
+from typing import Any, List
+
+from django.apps import AppConfig
+from framework.apps import BaseApp
+
+
+class DefectDojoConfig(BaseApp, AppConfig):
+ name = "platforms.defect_dojo"
+ fixtures_path = Path(__file__).resolve().parent / "fixtures"
+ skip_if_model_exists = True
+
+ def _get_models(self) -> List[Any]:
+ from platforms.defect_dojo.models import DefectDojoSettings
+
+ return [DefectDojoSettings]
diff --git a/src/backend/platforms/defect_dojo/fixtures/1_default.json b/src/backend/platforms/defect_dojo/fixtures/1_default.json
new file mode 100644
index 000000000..9bb7b1f0d
--- /dev/null
+++ b/src/backend/platforms/defect_dojo/fixtures/1_default.json
@@ -0,0 +1,17 @@
+[
+ {
+ "model": "defect_dojo.defectdojosettings",
+ "pk": 1,
+ "fields": {
+ "server": null,
+ "_api_token": null,
+ "tls_validation": true,
+ "tag": "rekono",
+ "test_type_id": null,
+ "test_type": "Rekono Findings Import",
+ "test": "Rekono Execution",
+ "date_format": "%Y-%m-%d",
+ "datetime_format": "%Y-%m-%dT%H:%M"
+ }
+ }
+]
\ No newline at end of file
diff --git a/src/backend/platforms/defect_dojo/integrations.py b/src/backend/platforms/defect_dojo/integrations.py
new file mode 100644
index 000000000..5846b115b
--- /dev/null
+++ b/src/backend/platforms/defect_dojo/integrations.py
@@ -0,0 +1,244 @@
+from datetime import timedelta
+from pathlib import Path as PathFile
+from typing import Any, Dict, List
+
+import requests
+from django.utils import timezone
+from executions.models import Execution
+from findings.enums import PathType, Severity
+from findings.framework.models import Finding
+from findings.models import Path
+from framework.platforms import BaseIntegration
+from platforms.defect_dojo.models import (
+ DefectDojoSettings,
+ DefectDojoSync,
+ DefectDojoTargetSync,
+)
+from requests.exceptions import HTTPError
+from targets.models import Target
+
+
+class DefectDojo(BaseIntegration):
+ def __init__(self) -> None:
+ self.settings = DefectDojoSettings.objects.first()
+ self.url = self.settings.server
+ super().__init__()
+ self.severity_mapping = {
+ Severity.INFO: "S0",
+ Severity.LOW: "S1",
+ Severity.MEDIUM: "S3",
+ Severity.HIGH: "S4",
+ Severity.CRITICAL: "S5",
+ }
+
+ def _request(
+ self, method: callable, url: str, json: bool = True, **kwargs: Any
+ ) -> Any:
+ return super()._request(
+ method,
+ f"{self.settings.server}/api/v2{url}",
+ json,
+ **{
+ **kwargs,
+ "headers": {
+ "User-Agent": "Rekono",
+ "Authorization": f"Token {self.settings.secret}",
+ },
+ "verify": self.settings.tls_validation,
+ },
+ )
+
+ def is_available(self) -> bool:
+ if not self.settings.server or not self.settings.secret:
+ return False
+ if "/api/v2" in self.settings.server:
+ self.settings.server = self.settings.server.replace("/api/v2", "")
+ if self.settings.server[-1] == "/":
+ self.settings.server = self.settings.server[:-1]
+ self.settings.save(update_fields=["server"])
+ try:
+ self._request(requests.get, "/test_types/", timeout=5)
+ return True
+ except:
+ return False
+
+ def exists(self, entity_name: str, id: int) -> bool:
+ try:
+ self._request(self.session.get, f"/{entity_name}/{id}/")
+ return True
+ except:
+ return False
+
+ def create_product_type(self, name: str, description: str) -> Dict[str, Any]:
+ return self._request(
+ self.session.post,
+ "/product_types/",
+ data={"name": name, "description": description},
+ )
+
+ def create_product(
+ self, product_type: int, name: str, description: str, tags: List[str]
+ ) -> Dict[str, Any]:
+ return self._request(
+ self.session.post,
+ "/products/",
+ data={
+ "tags": tags,
+ "name": name,
+ "description": description,
+ "prod_type": product_type,
+ },
+ )
+
+ def create_engagement(
+ self, product: int, name: str, description: str, tags: List[str]
+ ) -> Dict[str, Any]:
+ start = timezone.now()
+ end = start + timedelta(days=7)
+ return self._request(
+ self.session.post,
+ "/engagements/",
+ data={
+ "name": name,
+ "description": description,
+ "tags": tags,
+ "product": product,
+ "status": "In Progress",
+ "engagement_type": "Interactive",
+ "target_start": start.strftime(self.settings.date_format),
+ "target_end": end.strftime(self.settings.date_format),
+ },
+ )
+
+ def _create_test_type(self, name: str, tags: List[str]) -> Dict[str, Any]:
+ return self._request(
+ self.session.post,
+ "/test_types/",
+ data={"name": name, "tags": tags, "dynamic_tool": True},
+ )
+
+ def _create_test(
+ self, test_type: int, engagement: int, title: str, description: str
+ ) -> Dict[str, Any]:
+ return self._request(
+ self.session.post,
+ "/tests/",
+ data={
+ "engagement": engagement,
+ "test_type": test_type,
+ "title": title,
+ "description": description,
+ "target_start": timezone.now().strftime(self.settings.datetime_format),
+ "target_end": timezone.now().strftime(self.settings.datetime_format),
+ },
+ )
+
+ def _create_endpoint(
+ self, product: int, endpoint: Path, target: Target
+ ) -> Dict[str, Any]:
+ try:
+ return self._request(
+ self.session.post,
+ "/endpoints/",
+ data={**endpoint.defect_dojo_endpoint(target), "product": product},
+ )
+ except HTTPError:
+ return None
+
+ def _create_finding(self, test: int, finding: Finding) -> Dict[str, Any]:
+ data = finding.defect_dojo()
+ return self._request(
+ self.session.post,
+ "/findings/",
+ data={
+ **data,
+ "test": test,
+ "numerical_severity": self.severity_mapping[data.get("severity")],
+ "active": True,
+ },
+ )
+
+ def _import_scan(
+ self, engagement: int, execution: Execution, tags: List[str]
+ ) -> Dict[str, Any]:
+ with open(execution.output_file, "r") as report:
+ return self._request(
+ self.session.post,
+ "/import-scan/",
+ data={
+ "scan_type": execution.configuration.tool.defect_dojo_scan_type,
+ "engagement": engagement,
+ "tags": tags,
+ },
+ files={"file": report},
+ )
+
+ def process_findings(self, execution: Execution, findings: List[Finding]) -> None:
+ super().process_findings(execution, findings)
+ target_sync = DefectDojoTargetSync.objects.filter(target=execution.task.target)
+ if target_sync.exists():
+ sync = target_sync.first()
+ engagement_id = sync.engagement_id
+ product_id = sync.defect_dojo_sync.product_id
+ else:
+ project_sync = DefectDojoSync.objects.filter(
+ project=execution.task.target.project
+ )
+ if project_sync.exists():
+ sync = project_sync.first()
+ product_id = sync.product_id
+ if sync.engagement_id:
+ engagement_id = sync.engagement_id
+ else:
+ new_engagement = self.create_engagement(
+ product_id,
+ execution.task.target.target,
+ f"Rekono assessment for {execution.task.target.target}",
+ [self.settings.tag],
+ )
+ new_sync = DefectDojoTargetSync.objects.create(
+ defect_dojo_sync=sync,
+ target=execution.task.target,
+ engagement_id=new_engagement.get("id"),
+ )
+ engagement_id = new_sync.engagement_id
+ else:
+ return
+ if (
+ execution.configuration.tool.defect_dojo_scan_type
+ and execution.output_file is not None
+ and PathFile(execution.output_file).is_file()
+ ):
+ new_import = self._import_scan(
+ engagement_id, execution, [self.settings.tag]
+ )
+ execution.defect_dojo_test_id = new_import.get("test_id")
+ execution.save(update_fields=["defect_dojo_test_id"])
+ else:
+ test_id = None
+ for finding in findings:
+ if isinstance(finding, Path) and finding.type == PathType.ENDPOINT:
+ if finding.defect_dojo_id is None:
+ new_endpoint = self._create_endpoint(
+ product_id, finding, execution.task.target
+ )
+ if new_endpoint is not None:
+ finding.defect_dojo_id = new_endpoint.get("id")
+ else:
+ if not test_id:
+ if not self.settings.test_type_id:
+ new_test_type = self._create_test_type(
+ self.settings.test_type, [self.settings.tag]
+ )
+ self.settings.test_type_id = new_test_type.get("id")
+ self.settings.save(update_fields=["test_type_id"])
+ new_test = self._create_test(
+ self.settings.test_type_id,
+ engagement_id,
+ self.settings.test,
+ self.settings.test,
+ )
+ test_id = new_test.get("id")
+ new_finding = self._create_finding(test_id, finding)
+ finding.defect_dojo_id = new_finding.get("id")
+ finding.save(update_fields=["defect_dojo_id"])
diff --git a/src/backend/platforms/defect_dojo/models.py b/src/backend/platforms/defect_dojo/models.py
new file mode 100644
index 000000000..39c3bc4c2
--- /dev/null
+++ b/src/backend/platforms/defect_dojo/models.py
@@ -0,0 +1,90 @@
+from django.core.validators import MaxValueValidator, MinValueValidator
+from django.db import models
+from framework.models import BaseEncrypted, BaseModel
+from projects.models import Project
+from security.input_validator import Regex, Validator
+from targets.models import Target
+
+# Create your models here.
+
+
+class DefectDojoSettings(BaseEncrypted):
+ server = models.TextField(
+ max_length=100,
+ validators=[Validator(Regex.TARGET.value)],
+ blank=True,
+ null=True,
+ )
+ _api_token = models.TextField(
+ max_length=40,
+ validators=[Validator(Regex.SECRET.value, code="api_token")],
+ null=True,
+ blank=True,
+ db_column="api_token",
+ )
+ tls_validation = models.BooleanField(default=True)
+ tag = models.TextField(
+ max_length=200, validators=[Validator(Regex.NAME.value, code="tag")]
+ )
+ # Stores Test Type ID to avoid duplicated creation
+ test_type_id = models.IntegerField(
+ validators=[MinValueValidator(1), MaxValueValidator(999999999)],
+ blank=True,
+ null=True,
+ )
+ test_type = models.TextField(
+ max_length=200, validators=[Validator(Regex.NAME.value, code="test_type")]
+ )
+ test = models.TextField(
+ max_length=200, validators=[Validator(Regex.NAME.value, code="test")]
+ )
+ date_format = models.TextField(max_length=15)
+ datetime_format = models.TextField(max_length=15)
+
+ _encrypted_field = "_api_token"
+
+ def __str__(self) -> str:
+ return self.server if self.server else super().__str__()
+
+
+class DefectDojoSync(BaseModel):
+ project = models.OneToOneField(
+ Project, related_name="defect_dojo_sync", on_delete=models.CASCADE
+ )
+ product_type_id = models.IntegerField(
+ validators=[MinValueValidator(1), MaxValueValidator(999999999)],
+ )
+ product_id = models.IntegerField(
+ validators=[MinValueValidator(1), MaxValueValidator(999999999)],
+ )
+ engagement_id = models.IntegerField(
+ validators=[MinValueValidator(1), MaxValueValidator(999999999)],
+ blank=True,
+ null=True,
+ )
+
+ def __str__(self) -> str:
+ return f"{self.project.__str__()} - {self.product_type_id} - {self.product_id}{f' - {self.engagement_id}' if self.engagement_id else ''}"
+
+ @classmethod
+ def get_project_field(cls) -> str:
+ return "project"
+
+
+class DefectDojoTargetSync(BaseModel):
+ defect_dojo_sync = models.ForeignKey(
+ DefectDojoSync, related_name="target_syncs", on_delete=models.CASCADE
+ )
+ target = models.OneToOneField(
+ Target, related_name="defect_dojo_sync", on_delete=models.CASCADE
+ )
+ engagement_id = models.IntegerField(
+ validators=[MinValueValidator(1), MaxValueValidator(999999999)]
+ )
+
+ def __str__(self) -> str:
+ return f"{self.defect_dojo_sync.__str__()} - {self.target.target} - {self.engagement_id}"
+
+ @classmethod
+ def get_project_field(cls) -> str:
+ return "defect_dojo_sync__project" # pragma: no cover
diff --git a/src/backend/platforms/defect_dojo/serializers.py b/src/backend/platforms/defect_dojo/serializers.py
new file mode 100644
index 000000000..8e6ffc9fd
--- /dev/null
+++ b/src/backend/platforms/defect_dojo/serializers.py
@@ -0,0 +1,195 @@
+from typing import Any, Dict
+
+from django.core.validators import MaxValueValidator, MinValueValidator
+from django.forms import ValidationError
+from django.shortcuts import get_object_or_404
+from framework.fields import ProtectedSecretField
+from platforms.defect_dojo.integrations import DefectDojo
+from platforms.defect_dojo.models import (
+ DefectDojoSettings,
+ DefectDojoSync,
+ DefectDojoTargetSync,
+)
+from projects.models import Project
+from rest_framework.serializers import (
+ CharField,
+ IntegerField,
+ ModelSerializer,
+ PrimaryKeyRelatedField,
+ Serializer,
+ SerializerMethodField,
+)
+from security.input_validator import Regex, Validator
+
+
+class DefectDojoSettingsSerializer(ModelSerializer):
+ api_token = ProtectedSecretField(
+ Validator(Regex.SECRET.value, code="api_token").__call__,
+ required=False,
+ allow_null=True,
+ source="secret",
+ )
+ is_available = SerializerMethodField(read_only=True)
+
+ class Meta:
+ model = DefectDojoSettings
+ fields = (
+ "id",
+ "server",
+ "api_token",
+ "tls_validation",
+ "tag",
+ "test_type",
+ "test",
+ "is_available",
+ )
+
+ def get_is_available(self, instance: DefectDojoSettings) -> bool:
+ return DefectDojo().is_available()
+
+
+class BaseDefectDojoSerializer:
+ client = None
+
+ def _get_client(self) -> DefectDojo:
+ if not self.client:
+ self.client = DefectDojo()
+ return self.client
+
+ def validate(self, attrs: Dict[str, Any]) -> Dict[str, Any]:
+ if not self._get_client().is_available():
+ raise ValidationError(
+ "Defect-Dojo integration hasn't been configured properly",
+ code="defect-dojo",
+ )
+ attrs = super().validate(attrs)
+ for entity in ["product_type", "product", "engagement"]:
+ value = attrs.get(f"{entity}_id") or attrs.get(entity)
+ if value:
+ if not self._get_client().exists(f"{entity}s", value):
+ raise ValidationError(f"Entity {value} doesn't exist", code=entity)
+ return attrs
+
+
+class DefectDojoSyncSerializer(BaseDefectDojoSerializer, ModelSerializer):
+ class Meta:
+ model = DefectDojoSync
+ fields = (
+ "id",
+ "project",
+ "product_type_id",
+ "product_id",
+ "engagement_id",
+ )
+
+
+class DefectDojoTargetSyncSerializer(ModelSerializer):
+ class Meta:
+ model = DefectDojoTargetSync
+ fields = (
+ "id",
+ "defect_dojo_sync",
+ "target",
+ "engagement_id",
+ )
+
+
+class DefectDojoProductTypeSerializer(BaseDefectDojoSerializer, Serializer):
+ id = IntegerField(read_only=True)
+ name = CharField(
+ required=True,
+ allow_blank=False,
+ max_length=100,
+ validators=[Validator(Regex.NAME.value, code="name")],
+ write_only=True,
+ )
+ description = CharField(
+ required=True,
+ allow_blank=False,
+ max_length=500,
+ validators=[Validator(Regex.TEXT.value, code="description")],
+ write_only=True,
+ )
+
+ def create(self, validated_data: Dict[str, Any]) -> Dict[str, Any]:
+ return self._get_client().create_product_type(
+ validated_data["name"], validated_data["description"]
+ )
+
+
+class DefectDojoProductSerializer(BaseDefectDojoSerializer, Serializer):
+ id = IntegerField(read_only=True)
+ product_type = IntegerField(
+ required=True,
+ validators=[MinValueValidator(1), MaxValueValidator(999999999)],
+ write_only=True,
+ )
+ name = CharField(
+ required=True,
+ allow_blank=False,
+ max_length=100,
+ validators=[Validator(Regex.NAME.value, code="name")],
+ write_only=True,
+ )
+ description = CharField(
+ required=True,
+ allow_blank=False,
+ max_length=500,
+ validators=[Validator(Regex.TEXT.value, code="description")],
+ write_only=True,
+ )
+ # Needed to add project tags to Defect-Dojo product
+ project_id = PrimaryKeyRelatedField(
+ required=True,
+ queryset=Project.objects.all(),
+ write_only=True,
+ )
+
+ def validate(self, attrs: Dict[str, Any]) -> Dict[str, Any]:
+ attrs = super().validate(attrs)
+ attrs["project"] = get_object_or_404(
+ Project,
+ id=attrs.get("project_id").id,
+ members=self.context.get("request").user.id,
+ )
+ return attrs
+
+ def create(self, validated_data: Dict[str, Any]) -> Dict[str, Any]:
+ return self._get_client().create_product(
+ validated_data["product_type"],
+ validated_data["name"],
+ validated_data["description"],
+ [self._get_client().settings.tag]
+ + list(validated_data["project"].tags.all().values_list("slug", flat=True)),
+ )
+
+
+class DefectDojoEngagementSerializer(BaseDefectDojoSerializer, Serializer):
+ id = IntegerField(read_only=True)
+ product = IntegerField(
+ required=True,
+ validators=[MinValueValidator(1), MaxValueValidator(999999999)],
+ write_only=True,
+ )
+ name = CharField(
+ required=True,
+ allow_blank=False,
+ max_length=100,
+ validators=[Validator(Regex.NAME.value, code="name")],
+ write_only=True,
+ )
+ description = CharField(
+ required=True,
+ allow_blank=False,
+ max_length=500,
+ validators=[Validator(Regex.TEXT.value, code="description")],
+ write_only=True,
+ )
+
+ def create(self, validated_data: Dict[str, Any]) -> Dict[str, Any]:
+ return self._get_client().create_engagement(
+ validated_data["product"],
+ validated_data["name"],
+ validated_data["description"],
+ [self._get_client().settings.tag],
+ )
diff --git a/src/backend/platforms/defect_dojo/urls.py b/src/backend/platforms/defect_dojo/urls.py
new file mode 100644
index 000000000..a5ca31bcf
--- /dev/null
+++ b/src/backend/platforms/defect_dojo/urls.py
@@ -0,0 +1,29 @@
+from platforms.defect_dojo.views import (
+ DefectDojoEngagementViewSet,
+ DefectDojoProductTypeViewSet,
+ DefectDojoProductViewSet,
+ DefectDojoSettingsViewSet,
+ DefectDojoSyncViewSet,
+)
+from rest_framework.routers import SimpleRouter
+
+# Register your views here.
+
+router = SimpleRouter()
+router.register("defect-dojo/settings", DefectDojoSettingsViewSet)
+router.register("defect-dojo/sync", DefectDojoSyncViewSet)
+router.register(
+ "defect-dojo/product-types",
+ DefectDojoProductTypeViewSet,
+ basename="defect-dojo_product-type",
+)
+router.register(
+ "defect-dojo/products", DefectDojoProductViewSet, basename="defect-dojo_product"
+)
+router.register(
+ "defect-dojo/engagements",
+ DefectDojoEngagementViewSet,
+ basename="defect-dojo_engagement",
+)
+
+urlpatterns = router.urls
diff --git a/src/backend/platforms/defect_dojo/views.py b/src/backend/platforms/defect_dojo/views.py
new file mode 100644
index 000000000..9b2105f3e
--- /dev/null
+++ b/src/backend/platforms/defect_dojo/views.py
@@ -0,0 +1,75 @@
+from framework.views import BaseViewSet
+from platforms.defect_dojo.models import DefectDojoSettings, DefectDojoSync
+from platforms.defect_dojo.serializers import (
+ DefectDojoEngagementSerializer,
+ DefectDojoProductSerializer,
+ DefectDojoProductTypeSerializer,
+ DefectDojoSettingsSerializer,
+ DefectDojoSyncSerializer,
+)
+from rest_framework import status
+from rest_framework.permissions import IsAuthenticated
+from rest_framework.request import Request
+from rest_framework.response import Response
+from security.authorization.permissions import (
+ IsAuditor,
+ ProjectMemberPermission,
+ RekonoModelPermission,
+)
+
+# Create your views here.
+
+
+class DefectDojoSettingsViewSet(BaseViewSet):
+ queryset = DefectDojoSettings.objects.all()
+ serializer_class = DefectDojoSettingsSerializer
+ permission_classes = [IsAuthenticated, RekonoModelPermission]
+ http_method_names = [
+ "get",
+ "put",
+ ]
+
+
+class DefectDojoSyncViewSet(BaseViewSet):
+ queryset = DefectDojoSync.objects.all()
+ serializer_class = DefectDojoSyncSerializer
+ permission_classes = [
+ IsAuthenticated,
+ RekonoModelPermission,
+ ProjectMemberPermission,
+ ]
+ http_method_names = [
+ "post",
+ "delete",
+ ]
+
+
+class DefectDojoEntityViewSet(BaseViewSet):
+ http_method_names = ["post"]
+ permission_classes = [IsAuthenticated, IsAuditor]
+
+ def create(self, request: Request) -> Response:
+ serializer = self.get_serializer_class()(
+ data=request.data, context={"request": request}
+ )
+ serializer.is_valid(raise_exception=True)
+ try:
+ response = serializer.create(serializer.validated_data)
+ return Response({"id": response.get("id")}, status=status.HTTP_201_CREATED)
+ except:
+ return Response(
+ {"defect-dojo": "Error creating instance on Defect-Dojo"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+
+class DefectDojoProductTypeViewSet(DefectDojoEntityViewSet):
+ serializer_class = DefectDojoProductTypeSerializer
+
+
+class DefectDojoProductViewSet(DefectDojoEntityViewSet):
+ serializer_class = DefectDojoProductSerializer
+
+
+class DefectDojoEngagementViewSet(DefectDojoEntityViewSet):
+ serializer_class = DefectDojoEngagementSerializer
diff --git a/src/backend/processes/migrations/__init__.py b/src/backend/platforms/mail/__init__.py
similarity index 100%
rename from src/backend/processes/migrations/__init__.py
rename to src/backend/platforms/mail/__init__.py
diff --git a/src/backend/platforms/mail/admin.py b/src/backend/platforms/mail/admin.py
new file mode 100644
index 000000000..e6545f57a
--- /dev/null
+++ b/src/backend/platforms/mail/admin.py
@@ -0,0 +1,6 @@
+from django.contrib import admin
+from platforms.mail.models import SMTPSettings
+
+# Register your models here.
+
+admin.site.register(SMTPSettings)
diff --git a/src/backend/platforms/mail/apps.py b/src/backend/platforms/mail/apps.py
new file mode 100644
index 000000000..f674e6376
--- /dev/null
+++ b/src/backend/platforms/mail/apps.py
@@ -0,0 +1,16 @@
+from pathlib import Path
+from typing import Any, List
+
+from django.apps import AppConfig
+from framework.apps import BaseApp
+
+
+class MailConfig(BaseApp, AppConfig):
+ name = "platforms.mail"
+ fixtures_path = Path(__file__).resolve().parent / "fixtures"
+ skip_if_model_exists = True
+
+ def _get_models(self) -> List[Any]:
+ from platforms.mail.models import SMTPSettings
+
+ return [SMTPSettings]
diff --git a/src/backend/platforms/mail/fixtures/1_default.json b/src/backend/platforms/mail/fixtures/1_default.json
new file mode 100644
index 000000000..7ef190ec1
--- /dev/null
+++ b/src/backend/platforms/mail/fixtures/1_default.json
@@ -0,0 +1,13 @@
+[
+ {
+ "model": "mail.smtpsettings",
+ "pk": 1,
+ "fields": {
+ "host": null,
+ "port": 587,
+ "username": null,
+ "_password": null,
+ "tls": true
+ }
+ }
+]
\ No newline at end of file
diff --git a/src/backend/platforms/mail/models.py b/src/backend/platforms/mail/models.py
new file mode 100644
index 000000000..961122713
--- /dev/null
+++ b/src/backend/platforms/mail/models.py
@@ -0,0 +1,40 @@
+from django.core.validators import MaxValueValidator, MinValueValidator
+from django.db import models
+from framework.models import BaseEncrypted
+from security.input_validator import Regex, Validator
+
+# Create your models here.
+
+
+class SMTPSettings(BaseEncrypted):
+ host = models.TextField(
+ max_length=100,
+ validators=[Validator(Regex.TARGET.value)],
+ blank=True,
+ null=True,
+ )
+ port = models.IntegerField(
+ validators=[MinValueValidator(0), MaxValueValidator(65535)],
+ blank=True,
+ null=True,
+ default=587,
+ )
+ username = models.TextField(
+ max_length=100,
+ validators=[Validator(Regex.NAME.value, code="name")],
+ null=True,
+ blank=True,
+ )
+ _password = models.TextField(
+ max_length=200,
+ validators=[Validator(Regex.SECRET.value, code="api_token")],
+ null=True,
+ blank=True,
+ db_column="password",
+ )
+ tls = models.BooleanField(default=True)
+
+ _encrypted_field = "_password"
+
+ def __str__(self) -> str:
+ return f"{self.host}:{self.port}"
diff --git a/src/backend/platforms/mail/notifications.py b/src/backend/platforms/mail/notifications.py
new file mode 100644
index 000000000..cf86f4a05
--- /dev/null
+++ b/src/backend/platforms/mail/notifications.py
@@ -0,0 +1,129 @@
+import logging
+import os
+import threading
+from typing import Any, Dict, List
+
+import certifi
+from django.core.mail import EmailMultiAlternatives
+from django.core.mail.backends.smtp import EmailBackend
+from django.template.loader import get_template
+from django.utils import timezone
+from executions.models import Execution
+from findings.framework.models import Finding
+from framework.platforms import BaseNotification
+from platforms.mail.models import SMTPSettings
+from rekono.settings import CONFIG
+
+logger = logging.getLogger()
+
+
+class SMTP(BaseNotification):
+ enable_field = "email_notifications"
+
+ def __init__(self) -> None:
+ self.settings = SMTPSettings.objects.first()
+ self.backend = (
+ EmailBackend(
+ host=self.settings.host,
+ port=self.settings.port,
+ username=self.settings.username,
+ password=self.settings.secret,
+ use_tls=self.settings.tls,
+ timeout=5,
+ )
+ if self.settings
+ else None
+ )
+ self.datetime_format = "%Y-%m-%d %H:%M %Z"
+ # The trusted certificates must be defined
+ os.environ["SSL_CERT_FILE"] = certifi.where()
+
+ def is_available(self) -> bool:
+ if not self.settings or not self.settings.host or not self.settings.port:
+ return False
+ try:
+ self.backend.open()
+ self.backend.close()
+ return True
+ except:
+ return False
+
+ def _send_messages_in_background(
+ self, users: List[Any], subject: str, template: str, data: Dict[str, Any]
+ ) -> None:
+ threading.Thread(
+ target=self._send_messages, args=(users, subject, template, data)
+ ).start()
+
+ def _send_messages(
+ self, users: List[Any], subject: str, template: str, data: Dict[str, Any]
+ ) -> None:
+ try:
+ message = EmailMultiAlternatives(
+ subject, "", "Rekono Tool Configuration {{ configuration.name }} {{ execution.configuration.name }} Your Rekono user has been enabled. Please, follow this link to establish your password.{{ execution.task.target.project.name }}
{{ execution.task.target.project.name }}
- {% for e in port %}
+ {% for p in port %}
- {% if e.host %}
-
{% endfor %}
@@ -150,19 +150,19 @@ {{ e.host.address }}
+ {% if p.host %}
+ {{ p.host.address }}
{% else %}
{% endif %}
- {{ e.port }}
- {{ e.status }}
- {{ e.protocol }}
- {{ e.service }}
+ {{ p.port }}
+ {{ p.status }}
+ {{ p.protocol }}
+ {{ p.service }}
{{ execution.task.target.project.name }}
- {% for e in path %}
+ {% for p in path %}
- {% if e.port and e.port.host %}
-
{% endfor %}
@@ -177,6 +177,7 @@ {{ e.port.host.address }} - {{ e.port.port }}
- {% elif e.port %}
- {{ e.port.port }}
+ {% if p.port and p.port.host %}
+ {{ p.port.host.address }} - {{ p.port.port }}
+ {% elif p.port %}
+ {{ p.port.port }}
{% else %}
{% endif %}
- {{ e.type }}
- {{ e.path }}
- {{ e.status }}
- {{ e.extra }}
+ {{ p.type }}
+ {{ p.path }}
+ {{ p.status }}
+ {{ p.extra }}
{{ execution.task.target.project.name }}
Port
Name
Version
+ Reference
@@ -191,6 +192,12 @@ {{ execution.task.target.project.name }}
{% endif %}
{{ t.name }}
{{ t.version }}
+ {% if v.reference %}
+
+ Link
+ {% else %}
+
+ {% endif %}
{% endfor %}
@@ -202,6 +209,7 @@ {{ execution.task.target.project.name }}
+
{% for c in credential %}
Technology
Email
Username
Secret
@@ -211,6 +219,11 @@ {{ execution.task.target.project.name }}
+ {% if c.technology %}
+
@@ -252,6 +266,7 @@ {{ c.technology.name }}
+ {% else %}
+
+ {% endif %}
{{ c.email }}
{{ c.username }}
{{ c.secret }}
@@ -231,6 +244,7 @@ {{ execution.task.target.project.name }}
Name
Severity
CVE
+ CWE
Reference
{{ execution.task.target.project.name }}
{{ v.name }}
{{ v.severity }}
{{ v.cve }}
+ {{ v.cwe }}
{% if v.reference %}
Link
@@ -280,17 +295,27 @@ {{ execution.task.target.project.name }}
{% if e.vulnerability %}
{% endfor %}
diff --git a/src/backend/email_notifications/templates/user_enable_account.html b/src/backend/platforms/mail/templates/user_enable_account.html
similarity index 95%
rename from src/backend/email_notifications/templates/user_enable_account.html
rename to src/backend/platforms/mail/templates/user_enable_account.html
index e1523d664..c5a3ea11c 100644
--- a/src/backend/email_notifications/templates/user_enable_account.html
+++ b/src/backend/platforms/mail/templates/user_enable_account.html
@@ -19,7 +19,7 @@ {{ e.vulnerability.name }}
- {% else %}
+ {% if e.vulnerability.technology %}
+ {{ e.vulnerability.technology.name }}
+ {% elif e.technology %}
+ {{ e.technology.name }}
+ {% else %}
+
+ {% endif %}
+ {% elif e.technology %}
- {% endif %}
- {% if e.technology %}
{{ e.technology.name }}
{% else %}
+
{% endif %}
{{ e.title }}
-
- {{ e.edb_id }}
+ {% if v.reference %}
+
+ {{ e.edb_id }}
+ {% else %}
+
+ {% endif %}
Welcome {{ user.username }}!
{% endif %}