def put_result(request): ''' Graders post their results here. ''' if request.method != 'POST': return HttpResponse(compose_reply(False, "'put_result' must use HTTP POST")) else: (reply_is_valid, submission_id, submission_key, grader_reply) = _is_valid_reply(request.POST) if not reply_is_valid: log.error("Invalid reply from pull-grader: grader_id: {0} request.POST: {1}".format( get_request_ip(request), request.POST, )) return HttpResponse(compose_reply(False, 'Incorrect reply format')) else: try: submission = Submission.objects.select_for_update().get(id=submission_id) except Submission.DoesNotExist: log.error("Grader submission_id refers to nonexistent entry in Submission DB: grader: {0}, submission_id: {1}, submission_key: {2}, grader_reply: {3}".format( get_request_ip(request), submission_id, submission_key, grader_reply )) return HttpResponse(compose_reply(False, 'Submission does not exist')) if not submission.pullkey or submission_key != submission.pullkey: return HttpResponse(compose_reply(False, 'Incorrect key for submission')) submission.return_time = timezone.now() submission.grader_reply = grader_reply # Deliver grading results to LMS success = submission_queue.consumer.post_grade_to_lms(submission.xqueue_header, grader_reply) submission.lms_ack = success # Keep track of how many times we've failed to return a grade for this submission # to the LMS. if not success: submission.num_failures += 1 # Auto-retire a submission if it fails to make it back to the LMS enough times. # This can be because it's an old submission and the course changed structure (causing a 404) # or because the LMS is throwing errors. The combination of MAX_NUMBER_OF_FAILURES and # SUBMISSION_PROCESSING_DELAY tells you how long a period of time a submission can be graded over # before it's auto-retired. if submission.num_failures > settings.MAX_NUMBER_OF_FAILURES: submission.retired = True else: submission.retired = submission.lms_ack submission.save() return HttpResponse(compose_reply(success=True, content=''))
def get_queuelen(request): ''' Retrieves the length of queue named by GET['queue_name']. If queue_name is invalid or null, returns list of all queue names ''' try: queue_name = request.GET['queue_name'] except KeyError: return HttpResponse(compose_reply(False, "'get_queuelen' must provide parameter 'queue_name'")) if queue_name in settings.XQUEUES: job_count = Submission.objects.get_queue_length(queue_name) return HttpResponse(compose_reply(True, job_count)) else: return HttpResponse(compose_reply(False, 'Valid queue names are: ' + ', '.join(list(settings.XQUEUES.keys()))))
def submit(request): ''' Handle submissions to Xqueue from the LMS ''' if request.method != 'POST': transaction.commit() return HttpResponse( compose_reply(False, 'Queue requests should use HTTP POST')) else: # queue_name, xqueue_header, xqueue_body are all serialized (request_is_valid, lms_callback_url, queue_name, xqueue_header, xqueue_body) = _is_valid_request(request.POST) if not request_is_valid: log.error( "Invalid queue submission from LMS: lms ip: {0}, request.POST: {1}" .format( get_request_ip(request), request.POST, )) transaction.commit() return HttpResponse( compose_reply(False, 'Queue request has invalid format')) else: if queue_name not in settings.XQUEUES: transaction.commit() return HttpResponse( compose_reply(False, "Queue '%s' not found" % queue_name)) else: # Limit DOS attacks by invalidating prior submissions from the # same (user, module-id) pair as encoded in the lms_callback_url _invalidate_prior_submissions(lms_callback_url) # Check for file uploads keys = dict() # For internal Xqueue use urls = dict() # For external grader use for filename in request.FILES.keys(): key = make_hashkey(xqueue_header + filename) url = _upload(request.FILES[filename], queue_name, key) keys.update({filename: key}) urls.update({filename: url}) urls_json = json.dumps(urls) keys_json = json.dumps(keys) if len(urls_json) > CHARFIELD_LEN_LARGE: key = make_hashkey(xqueue_header + json.dumps(list(request.FILES.keys()))) url = _upload_file_dict(urls, keys, queue_name, key) keys = {"KEY_FOR_EXTERNAL_DICTS": key} urls = {"URL_FOR_EXTERNAL_DICTS": url} urls_json = json.dumps(urls) keys_json = json.dumps(keys) # Track the submission in the Submission database submission = Submission( requester_id=get_request_ip(request), lms_callback_url=lms_callback_url[:128], queue_name=queue_name, xqueue_header=xqueue_header, xqueue_body=xqueue_body, s3_urls=urls_json, s3_keys=keys_json) submission.save() transaction.commit( ) # Explicit commit to DB before inserting submission.id into queue qcount = Submission.objects.get_queue_length(queue_name) # For a successful submission, return the count of prior items return HttpResponse( compose_reply(success=True, content="%d" % qcount))
def get_submission(request): ''' Retrieve a single submission from queue named by GET['queue_name']. ''' try: queue_name = request.GET['queue_name'] except KeyError: return HttpResponse( compose_reply( False, "'get_submission' must provide parameter 'queue_name'")) if queue_name not in settings.XQUEUES: return HttpResponse( compose_reply(False, "Queue '%s' not found" % queue_name)) else: # Try to pull a single item from named queue (got_submission, submission ) = Submission.objects.get_single_unretired_submission(queue_name) if not got_submission: return HttpResponse( compose_reply(False, "Queue '%s' is empty" % queue_name)) else: # Collect info on pull event grader_id = get_request_ip(request) pull_time = timezone.now() pullkey = make_hashkey(str(pull_time) + str(submission.id)) submission.grader_id = grader_id submission.pull_time = pull_time submission.pullkey = pullkey submission.save() # Prepare payload to external grader ext_header = { 'submission_id': submission.id, 'submission_key': pullkey } urls = json.loads(submission.urls) if submission.urls else {} # Because this code assumes there is a URL to fetch (traditionally out of S3) # it doesn't play well for ContentFile users in tests or local use. # ContentFile handles uploads well, but hands along file paths in /tmp rather than # URLs, see lms_interface. if "URL_FOR_EXTERNAL_DICTS" in submission.urls: url = urls["URL_FOR_EXTERNAL_DICTS"] timeout = 2 try: r = requests.get(url, timeout=timeout) success = True except (ConnectionError, Timeout): success = False log.error( 'Could not fetch uploaded files at %s in timeout=%f' % (url, timeout)) return HttpResponse( compose_reply( False, "Error fetching submission for %s. Please try again." % queue_name)) if (r.status_code not in [200]) or (not success): log.error( 'Could not fetch uploaded files at %s. Status code: %d' % (url, r.status_code)) return HttpResponse( compose_reply( False, "Error fetching submission for %s. Please try again." % queue_name)) xqueue_files = json.dumps(json.loads(r.text)["files"]) else: xqueue_files = submission.urls payload = { 'xqueue_header': json.dumps(ext_header), 'xqueue_body': submission.xqueue_body, 'xqueue_files': xqueue_files } return HttpResponse( compose_reply(True, content=json.dumps(payload)))