Skip to content

Commit

Permalink
Better submission status viewing (#69)
Browse files Browse the repository at this point in the history
Co-authored-by: Prateek Kumar <[email protected]>
Co-authored-by: vishwakftw <[email protected]>
  • Loading branch information
vishwakftw and prateekkumarweb authored Jun 7, 2019
1 parent 8c3037f commit cf1e26a
Show file tree
Hide file tree
Showing 14 changed files with 173 additions and 101 deletions.
2 changes: 1 addition & 1 deletion .flake8
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
[flake8]
max-line-length = 100
exclude = judge/migrations
exclude = judge/migrations,content/**/*
2 changes: 1 addition & 1 deletion content/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -57,4 +57,4 @@ ENV SUB_ID=-1
WORKDIR /app

# Run the meta script
CMD python compile_and_test.py --submission_config tmp/sub_run_${SUB_ID}.txt
CMD python3.6 compile_and_test.py --submission_config tmp/sub_run_${SUB_ID}.txt
10 changes: 7 additions & 3 deletions content/compile_and_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,13 +29,17 @@
'submission_{}{}'.format(sub_info[1], sub_info[2])],
stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e: # If compilation fails, end this script here
error_msg = e.output.decode('utf-8').replace('\n', '\\n')
error_msg = str(e.output.decode('utf-8'))
with open(args.submission_config, "a") as stat_file:
for testcase_id in sub_info[5:]:
log_file_name = 'sub_run_{}_{}.log'.format(sub_info[1], testcase_id)

with open('tmp/' + log_file_name, "w") as log_file:
log_file.write(error_msg)

stat_file.write("{} {} 0 0 {}\n"
.format(testcase_id,
'CE' if e.returncode == 1 else 'NA',
error_msg))
'CE' if e.returncode == 1 else 'NA', log_file_name))
else:
subprocess.call(['./main_tester.sh'] + sub_info[0:2] + sub_info[3:]) # run tests
subprocess.call(['rm', 'submissions/submission_{}'.format(sub_info[1])]) # remove executable
5 changes: 2 additions & 3 deletions content/main_tester.sh
Original file line number Diff line number Diff line change
Expand Up @@ -80,14 +80,13 @@ run_submission() {
# This is then checked normally using a diff
# The status is appended to the verdict_string along with the memory and time consumed
VERDICT=""
ERR_MSG=""
if [ "$TIMEOUT" = true ] ; then
VERDICT=$(error_code_to_string $TLE ${TID})
elif [ "$MEMOUT" = true ] ; then
VERDICT=$(error_code_to_string $OOM ${TID})
else
clean_generated_output ${SID} ${TID} # Delete the generated file to prevent any mismatch
ERR_MSG=$({ ${SUB_FDR}/submission_${SID} < ${TEST_FDR}/inputfile_${TID}.txt > ${TMP}/sub_output_${SID}_${TID}.txt; } 2>&1)
${SUB_FDR}/submission_${SID} < ${TEST_FDR}/inputfile_${TID}.txt > ${TMP}/sub_output_${SID}_${TID}.txt 2> ${TMP}/sub_run_${SID}_${TID}.log

case "$?" in
"0")
Expand All @@ -102,7 +101,7 @@ run_submission() {
;;
esac
fi
VERDICT="${VERDICT} ${WCTIME} ${MAXVM} ${ERR_MSG}"
VERDICT="${VERDICT} ${WCTIME} ${MAXVM} sub_run_${SID}_${TID}.log"
echo ${VERDICT}
}

Expand Down
1 change: 1 addition & 0 deletions judge/default/compilation_script.sh
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ compile_c() {
return $SUCCESS
else
return $FAILURE
fi
}

# This is the function to compile .cpp files using g++
Expand Down
93 changes: 58 additions & 35 deletions judge/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,17 +3,25 @@

from re import compile
from io import StringIO
from logging import error as log_error
from traceback import print_exc
from csv import writer as csvwriter
from shutil import rmtree, copyfile
from logging import error as log_error
from datetime import timedelta, datetime
from typing import Tuple, Optional, Dict, Any, List, Union

from django.utils import timezone
from django.core.files.uploadedfile import InMemoryUploadedFile

from . import models


def _check_and_remove(*fullpaths):
for fullpath in fullpaths:
if os.path.exists(fullpath):
os.remove(fullpath)


def process_contest(contest_name: str, contest_start: datetime, contest_soft_end: datetime,
contest_hard_end: datetime, penalty: float, is_public: bool,
enable_linter_score: bool, enable_poster_score: bool) -> Tuple[bool, str]:
Expand Down Expand Up @@ -74,60 +82,77 @@ def delete_contest(contest_id: int) -> Tuple[bool, Optional[str]]:
return (False, 'Contest could not be deleted')


def process_problem(code: str, contest: int, name: str, statement: str, input_format: str,
output_format: str, difficulty: int, time_limit: int, memory_limit: int,
file_format: str, starting_code, max_score: int,
compilation_script, test_script) -> Tuple[bool, Optional[str]]:
def process_problem(
contest: int,
**kwargs: Union[str, int, Optional[InMemoryUploadedFile]]) -> Tuple[bool, Optional[str]]:
"""
Function to process a new :class:`~judge.models.Problem`.
:param code: Problem code
:param contest: Contest ID to which the problem belongs
:attr:`**kwargs` includes the following keyword arguments, which are directly passed
to the construct a :class:`~judge.models.Problem` object.
:param code: Problem code
:type code: str
:param name: Problem name
:type name: str
:param statement: Problem statement
:type statement: str
:param input_format: Problem input format
:type statement: str
:param output_format: Problem output format
:type statement: str
:param difficulty: Problem difficulty
:type statement: int
:param time_limit: Problem execution time limit
:type statement: int
:param memory_limit: Problem virtual memory limit
:param file_format: Accepted file format for submissions
:type statement: int
:param file_exts: Accepted file format for submissions
:type statement: str
:param starting_code: Starting code for the problem
:type statement: Optional[InMemoryUploadedFile]
:param max_score: Maximum judge score per test case for the problem
:type statement: int
:param compilation_script: Compilation script for the submissions
:type statement: Optional[InMemoryUploadedFile]
:param test_script: Test script for the submissions
:type statement: Optional[InMemoryUploadedFile]
:returns: A 2-tuple - 1st element indicating whether the processing has succeeded, and
2nd element providing an error message if processing is unsuccessful.
"""
# Check if the Problem Code has already been taken
code = kwargs.get('code')
try:
models.Problem.objects.get(pk=code)
return (False, '{} already a used Question code.'.format(code))
except models.Problem.DoesNotExist:
pass

statement = 'The problem statement is empty.' if statement is None else statement
input_format = 'No input format specified.' if input_format is None else input_format
output_format = 'No output format specified.' if output_format is None else output_format
# Quill replaces empty input with this
NO_INPUT_QUILL = '{"ops":[{"insert":"\\n"}]}'
if kwargs.get('statement') == NO_INPUT_QUILL:
kwargs['statement'] = 'The problem statement is empty.'
if kwargs.get('input_format') == NO_INPUT_QUILL:
kwargs['input_format'] = 'No input format specified.'
if kwargs.get('output_format') == NO_INPUT_QUILL:
kwargs['output_format'] = 'No output format specified.'

# if either one of compilation_script or test_script is None,
# we create a Problem with the default compilation script and/or test_script
# and then we copy a compilation script and/or test_script to the right location
# and update the link after creation
no_comp_script, no_test_script = compilation_script is None, test_script is None
no_comp_script = kwargs.get('compilation_script') is None
no_test_script = kwargs.get('test_script') is None
if no_comp_script:
compilation_script = './default/compilation_script.sh'
kwargs['compilation_script'] = './default/compilation_script.sh'
if no_test_script:
test_script = './default/test_script.sh'
kwargs['test_script'] = './default/test_script.sh'

try:
c = models.Contest.objects.get(pk=contest)
p = models.Problem.objects.create(
code=code, contest=c, name=name, statement=statement,
input_format=input_format, output_format=output_format,
difficulty=difficulty, time_limit=time_limit, memory_limit=memory_limit,
file_format=file_format, start_code=starting_code, max_score=max_score,
compilation_script=compilation_script,
test_script=test_script)
p = models.Problem.objects.create(contest=c, **kwargs)

if not os.path.exists(os.path.join('content', 'problems', p.code)):
# Create the problem directory explictly if not yet created
Expand Down Expand Up @@ -210,17 +235,15 @@ def delete_problem(problem_id: str) -> Tuple[bool, Optional[str]]:
'content', 'testcase', 'inputfile_{}.txt'.format(testcase.pk))
outputfile_path = os.path.join(
'content', 'testcase', 'outputfile_{}.txt'.format(testcase.pk))
if os.path.exists(inputfile_path):
os.remove(inputfile_path)
if os.path.exists(outputfile_path):
os.remove(outputfile_path)
_check_and_remove(inputfile_path, outputfile_path)

submissions = models.Submission.objects.filter(problem=problem)
for submission in submissions:
submission_path = os.path.join(
'content', 'submissions',
'submission_{}{}'.format(submission.pk, submission.file_type))
if os.path.exists(submission_path):
os.remove(submission_path)
_check_and_remove(submission_path)

rmtree(os.path.join('content', 'problems', problem_id))

models.Problem.objects.filter(pk=problem_id).delete()
Expand Down Expand Up @@ -254,7 +277,8 @@ def process_person(email: str, rank: int = 0) -> Tuple[bool, Optional[str]]:


def process_testcase(problem_id: str, test_type: str,
input_file, output_file) -> Tuple[bool, Optional[str]]:
input_file: InMemoryUploadedFile,
output_file: InMemoryUploadedFile) -> Tuple[bool, Optional[str]]:
"""
Function to process a new :class:`~judge.models.TestCase` for a problem.
Expand Down Expand Up @@ -300,10 +324,8 @@ def delete_testcase(testcase_id: str) -> Tuple[bool, Optional[str]]:
'content', 'testcase', 'inputfile_{}.txt'.format(testcase_id))
outputfile_path = os.path.join(
'content', 'testcase', 'outputfile_{}.txt'.format(testcase_id))
if os.path.exists(inputfile_path):
os.remove(inputfile_path)
if os.path.exists(outputfile_path):
os.remove(outputfile_path)
_check_and_remove(inputfile_path, outputfile_path)

models.TestCase.objects.filter(pk=testcase_id).delete()
return (True, None)
except Exception as e:
Expand All @@ -312,7 +334,8 @@ def delete_testcase(testcase_id: str) -> Tuple[bool, Optional[str]]:


def process_submission(problem_id: str, participant: str, file_type: str,
submission_file, timestamp: str) -> Tuple[bool, Optional[str]]:
submission_file: InMemoryUploadedFile,
timestamp: str) -> Tuple[bool, Optional[str]]:
"""
Function to process a new :class:`~judge.models.Submission` for a problem by a participant.
Expand All @@ -326,9 +349,9 @@ def process_submission(problem_id: str, participant: str, file_type: str,
"""
try:
problem = models.Problem.objects.get(pk=problem_id)
if file_type not in problem.file_format.split(','):
if file_type not in problem.file_exts.split(','):
return (False, 'Accepted file types: \"{}\"'
.format(', '.join(problem.file_format.split(','))))
.format(', '.join(problem.file_exts.split(','))))
participant = models.Person.objects.get(email=participant)
s = problem.submission_set.create(participant=participant, file_type=file_type,
submission_file=submission_file, timestamp=timestamp)
Expand Down Expand Up @@ -658,7 +681,7 @@ def get_personcontest_score(person: str, contest: int) -> Tuple[bool, Union[floa


def get_submissions(problem_id: str, person_id: Optional[str]) \
-> Tuple[bool, Union[Dict[str, List[Any]], str]]:
-> Tuple[bool, Union[Dict[str, List[Any]], str]]:
"""
Function to retrieve all submissions made by everyone or a specific person for this
problem.
Expand Down
10 changes: 5 additions & 5 deletions judge/migrations/0001_initial.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Generated by Django 2.2.1 on 2019-06-02 07:16
# Generated by Django 2.2.1 on 2019-06-05 06:21

import datetime
from django.db import migrations, models
Expand Down Expand Up @@ -47,10 +47,10 @@ class Migration(migrations.Migration):
('input_format', models.TextField(default='No input format specified.')),
('output_format', models.TextField(default='No output format specified.')),
('difficulty', models.PositiveSmallIntegerField(default=0)),
('time_limit', models.DurationField(default=datetime.timedelta(seconds=10))),
('time_limit', models.DurationField(default=datetime.timedelta(0, 10))),
('memory_limit', models.PositiveIntegerField(default=200000)),
('file_format', models.CharField(default='.py,.cpp', max_length=100)),
('start_code', models.FileField(null=True, upload_to=judge.models.start_code_name)),
('file_exts', models.CharField(default='.py,.cpp', max_length=100)),
('starting_code', models.FileField(null=True, upload_to=judge.models.starting_code_name)),
('max_score', models.PositiveSmallIntegerField(default=0)),
('compilation_script', models.FileField(default='./default/compilation_script.sh', upload_to=functools.partial(judge.models.compilation_test_upload_location, *(), **{'is_compilation': True}))),
('test_script', models.FileField(default='./default/test_script.sh', upload_to=functools.partial(judge.models.compilation_test_upload_location, *(), **{'is_compilation': False}))),
Expand Down Expand Up @@ -97,7 +97,7 @@ class Migration(migrations.Migration):
name='SubmissionTestCase',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('verdict', models.CharField(choices=[('F', 'Fail'), ('P', 'Pass'), ('R', 'Running'), ('TE', 'TLE'), ('ME', 'OOM'), ('CE', 'COMPILATION_ERROR'), ('RE', 'RUNTIME_ERROR'), ('NA', 'NOT_AVAILABLE')], default='NA', max_length=2)),
('verdict', models.CharField(choices=[('F', 'Failed'), ('P', 'Passed'), ('R', 'Running'), ('TE', 'Time Limit Exceeded'), ('ME', 'Out Of Memory'), ('CE', 'Compilation Error'), ('RE', 'Runtime Error'), ('NA', 'Internal Failure')], default='NA', max_length=2)),
('memory_taken', models.PositiveIntegerField()),
('time_taken', models.DurationField()),
('message', models.TextField(default='')),
Expand Down
22 changes: 11 additions & 11 deletions judge/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from django.utils import timezone


def start_code_name(instance, filename):
def starting_code_name(instance, filename):
return 'content/problems/{}/start_code{}'.format(instance.code, splitext(filename)[1])


Expand Down Expand Up @@ -105,10 +105,10 @@ class Problem(models.Model):
"""Problem memory limit"""

# Support upto 30 file formats
file_format = models.CharField(max_length=100, default='.py,.cpp')
"""Accepted file formats for submissions to problem"""
file_exts = models.CharField(max_length=100, default='.py,.cpp')
"""Accepted file extensions for submissions to problem"""

start_code = models.FileField(upload_to=start_code_name, null=True)
starting_code = models.FileField(upload_to=starting_code_name, null=True)
"""Problem starting code"""

max_score = models.PositiveSmallIntegerField(default=0)
Expand Down Expand Up @@ -246,14 +246,14 @@ class SubmissionTestCase(models.Model):

# Possible Verdicts
VERDICT = (
('F', 'Fail'),
('P', 'Pass'),
('F', 'Failed'),
('P', 'Passed'),
('R', 'Running'),
('TE', 'TLE'),
('ME', 'OOM'),
('CE', 'COMPILATION_ERROR'),
('RE', 'RUNTIME_ERROR'),
('NA', 'NOT_AVAILABLE'))
('TE', 'Time Limit Exceeded'),
('ME', 'Out Of Memory'),
('CE', 'Compilation Error'),
('RE', 'Runtime Error'),
('NA', 'Internal Failure'))

submission = models.ForeignKey(Submission, on_delete=models.CASCADE)
"""Foreign key to submission"""
Expand Down
4 changes: 2 additions & 2 deletions judge/templates/judge/base.html
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<title>{% block title %}Home{% endblock %} | PDP</title>
<title>{% block title %}Home{% endblock %} | AUTOJUDGE</title>

<!-- Fonts -->
<link href="https://fonts.googleapis.com/css?family=Open+Sans:300,400,600,700" rel="stylesheet">
Expand All @@ -30,7 +30,7 @@
{% url 'judge:index' as homepage %}
<nav class="navbar navbar-expand-lg navbar-dark bg-default">
<div class="container-fluid">
<a class="navbar-brand" href="{{ homepage }}">PDP</a>
<a class="navbar-brand" href="{{ homepage }}">AUTOJUDGE</a>
<button class="navbar-toggler" type="button" data-toggle="collapse" data-target="#navbar-default"
aria-controls="navbar-default" aria-expanded="false" aria-label="Toggle navigation">
<span class="navbar-toggler-icon"></span>
Expand Down
4 changes: 2 additions & 2 deletions judge/templates/judge/problem_detail.html
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ <h4>Output Format</h4>
</tr>
<tr>
<th>Allowed file extensions</th>
<td>{{ problem.file_format }}</td>
<td>{{ problem.file_exts }}</td>
</tr>
</tbody>
</table>
Expand Down Expand Up @@ -228,7 +228,7 @@ <h6>Output</h6>
</div>
{% endif %}
<div class="col-12 mb-4">
{% if problem.start_code %}
{% if problem.starting_code %}
<a href="{% url 'judge:problem_starting_code' problem.pk %}" class="btn btn-default">Download starting code</a>
{% endif %}
{% if type == 'Poster' %}
Expand Down
Loading

0 comments on commit cf1e26a

Please sign in to comment.