Skip to content

Commit

Permalink
fixed: linting
Browse files Browse the repository at this point in the history
  • Loading branch information
AronBuzogany committed Feb 24, 2024
1 parent a56c4a4 commit ae34ff6
Show file tree
Hide file tree
Showing 5 changed files with 102 additions and 19 deletions.
63 changes: 54 additions & 9 deletions backend/project/utils/submissions/evaluator.py
Original file line number Diff line number Diff line change
@@ -1,42 +1,87 @@
from project.models.projects import Projects
"""
This module is responsible for evaluating the submission.
It uses docker to run the evaluator in a container.
The image used for the container is determined by the evaluator argument.
If the evaluator is not found in the
DOCKER_IMAGE_MAPPER, the project test path is used as the image.
The evaluator is run in the container and the
exit code is returned. The output of the evaluator is written to a log file
in the submission output folder.
"""
from os import path
import docker
from project.models.projects import Projects
from project.models.submissions import Submissions
from project.utils.submissions.file_handling import create_submission_subfolders
import docker

DOCKER_IMAGE_MAPPER = {
"python": path.join(path.dirname(__file__), "evaluators", "python"),
}

def evaluate(submission: Submissions, project: Projects, evaluator: str):
def evaluate(submission: Submissions, project: Projects, evaluator: str) -> int:
"""
Evaluate a submission using the evaluator.
Args:
submission (Submissions): The submission to evaluate.
project (Projects): The project the submission is for.
evaluator (str): The evaluator to use.
Returns:
int: The exit code of the evaluator.
Raises:
ValueError: If the evaluator is not found in the DOCKER_IMAGE_MAPPER
and the project test path does not exist.
"""
project_path = project.test_path

docker_image = DOCKER_IMAGE_MAPPER.get(evaluator, None)
if docker_image is None:
docker_image = project_path
if not path.exists(docker_image):
raise Exception(f"Test path: {docker_image}, not found")

raise ValueError(f"Test path: {docker_image},\
not found and the provided evaluator:\
{evaluator} is not associated with any image.")

submission_path = submission.submission_path
submission_solution_path = path.join(submission_path, "submission")

container = create_and_run_evaluator(docker_image, submission.submission_id, project_path, submission_solution_path)
container = create_and_run_evaluator(docker_image,
submission.submission_id,
project_path,
submission_solution_path)

submission_output_path = create_submission_subfolders(submission_path)
test_output_path = path.join(submission_output_path, "test_output.log")

exit_code = container.wait()

with open(path.join(test_output_path, ), "w") as output_file:
with open(path.join(test_output_path, ), "w", encoding='utf-8') as output_file:
output_file.write(container.logs().decode('utf-8'))

container.remove()

return exit_code['StatusCode']

def create_and_run_evaluator(docker_image: str, submission_id: int, project_path: str, submission_solution_path: str):
def create_and_run_evaluator(docker_image: str,
submission_id: int,
project_path: str,
submission_solution_path: str):
"""
Create and run the evaluator container.
Args:
docker_image (str): The path to the docker image.
submission_id (int): The id of the submission.
project_path (str): The path to the project.
submission_solution_path (str): The path to the submission solution.
Returns:
docker.models.containers.Container: The container that is running the evaluator.
"""
client = docker.from_env()
image, build_logs = client.images.build(path=docker_image, tag=f"submission_{submission_id}")
image, _ = client.images.build(path=docker_image, tag=f"submission_{submission_id}")

container = client.containers.run(
image.id,
Expand Down
26 changes: 23 additions & 3 deletions backend/project/utils/submissions/file_handling.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,13 @@
"""
This module contains functions for handling files and folders for submissions.
"""

from os import path, makedirs, getenv

def create_submission_subfolders(submission_path: str):
"""
Create the output and artifacts folder for a submission.
"""
submission_output_path = path.join(submission_path, "output")
artifacts_path = path.join(submission_output_path, "artifacts")

Expand All @@ -12,8 +19,21 @@ def create_submission_subfolders(submission_path: str):

return submission_output_path

def create_submission_folder(submission_id, project_id):
submission_path = path.join(getenv("SUBMISSIONS_ROOT_PATH"), str(project_id), str(submission_id))
def create_submission_folder(submission_id: int, project_id: int):
"""
Create the submission folder and the submission
solution folder that will contain a students solution.
Args:
submission_id (int): The id of the submission.
project_id (int): The id of the project.
Returns:
str: The path to the submission folder.
"""
submission_path = path.join(getenv("SUBMISSIONS_ROOT_PATH"),
str(project_id),
str(submission_id))
submission_solution_path = path.join(submission_path, "submission")

if not path.exists(submission_path):
Expand All @@ -22,4 +42,4 @@ def create_submission_folder(submission_id, project_id):
if not path.exists(submission_solution_path):
makedirs(submission_solution_path)

return submission_path
return submission_path
Original file line number Diff line number Diff line change
@@ -1,13 +1,19 @@
from project.utils.submissions.file_handling import create_submission_folder
"""
This file contains tests for functions that are applicable to all evaluators.
"""
from os import path
from shutil import rmtree
from project.utils.submissions.file_handling import create_submission_folder

def test_create_submission_folder_creates(submission_root):
"""
Test whether the create_submission_folder function creates the submission folder.
"""
submission_id = 1
project_id = 1
submission_path = create_submission_folder(submission_id, project_id)
assert path.join(submission_path) \
== path.join(submission_root, str(project_id), str(submission_id))
assert path.exists(submission_path)
assert path.exists(path.join(submission_path, "submission"))
rmtree(submission_path)
rmtree(submission_path)
11 changes: 9 additions & 2 deletions backend/tests/utils/submission_evaluators/conftest.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,18 @@
import pytest
"""
This file contains the global fixtures for the submission evaluators tests.
"""
from shutil import rmtree
from os import environ, mkdir, path
import pytest

@pytest.fixture
def submission_root():
"""
Create a submission root folder for the tests.
When the tests are done, the folder is removed recursively.
"""
submission_root = path.join(path.dirname(__file__), "submissions-root")
environ["SUBMISSIONS_ROOT_PATH"] = submission_root
mkdir(submission_root)
yield submission_root
rmtree(submission_root)
rmtree(submission_root)
11 changes: 8 additions & 3 deletions backend/tests/utils/submission_evaluators/python_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,18 @@
This file contains tests for the python submission evaluator.
"""

from os import path
import pytest
from project.utils.submissions.evaluator import evaluate
from project.utils.submissions.file_handling import create_submission_folder
from project.models.submissions import Submissions
from project.models.projects import Projects
import pytest
from os import path

@pytest.fixture
def project_path_succes():
"""
Return the path to a project with a succesful test case.
"""
return path.join(path.dirname(__file__), "resources", "python", "tc_1")

@pytest.fixture
Expand Down Expand Up @@ -40,5 +43,7 @@ def test_makes_log_file(evaluate_python):
def test_logs_output(evaluate_python):
"""Test whether the evaluator logs the output of the script."""
_, submission_path = evaluate_python
with open(path.join(submission_path, "output", "test_output.log"), "r") as output_file:
with open(path.join(submission_path, "output", "test_output.log",),
"r",
encoding="utf-8") as output_file:
assert "Hello, World!" in output_file.read()

0 comments on commit ae34ff6

Please sign in to comment.