Beispiel #1
0
    def test_submit_files(self):
        """
        Submitted files should be uploaded to the storage backend.
        """
        payload = self.valid_payload.copy()
        upload = ContentFile("TESTING", name="test")
        upload.seek(0)
        payload["upload"] = upload
        response = self._submit(payload)
        self.assertEqual(response["return_code"], 0)  # success

        # Check that the file was actually uploaded
        _, files = default_storage.listdir("tmp/")
        key = make_hashkey(payload["xqueue_header"] + "upload")
        self.assertIn(key, files)
    def test_submit_many_files(self):
        '''
        Submitted files should be uploaded to the storage backend.
        '''
        payload = self.valid_payload.copy()
        upload = ContentFile('TESTING', name='test')
        upload.seek(0)
        payload['upload'] = upload
        response = self._submit(payload)
        self.assertEqual(response['return_code'], 0)  # success

        # Check that the file was actually uploaded
        _, files = default_storage.listdir('tmp/')
        key = make_hashkey(payload['xqueue_header'] + 'upload')
        self.assertIn(key, files)
Beispiel #3
0
    def test_submit_files(self):
        '''
        Submitted files should be uploaded to the storage backend.
        '''

        # 0) filename contains ascii characters only
        # 1) filename contains utf-8 characters
        files = [
            ContentFile('TESTING', name='test'),
            ContentFile(u'测试', name=u'测试')
        ]

        for file in files:
            payload = self.valid_payload.copy()
            file.seek(0)
            payload['upload'] = file
            response = self._submit(payload)
            self.assertEqual(response['return_code'], 0)  # success

            # Check that the file was actually uploaded
            _, files = default_storage.listdir('tmp/')
            key = make_hashkey(payload['xqueue_header'] + 'upload')
            self.assertIn(key, files)
Beispiel #4
0
def get_submission(request):
    '''
    Retrieve a single submission from queue named by GET['queue_name'].
    '''
    try:
        queue_name = request.GET['queue_name']
    except KeyError:
        return HttpResponse(
            compose_reply(
                False, "'get_submission' must provide parameter 'queue_name'"))

    if queue_name not in settings.XQUEUES:
        return HttpResponse(
            compose_reply(False, "Queue '%s' not found" % queue_name))
    else:
        # Try to pull a single item from named queue
        (got_submission, submission
         ) = Submission.objects.get_single_unretired_submission(queue_name)

        if not got_submission:
            return HttpResponse(
                compose_reply(False, "Queue '%s' is empty" % queue_name))
        else:
            # Collect info on pull event
            grader_id = get_request_ip(request)
            pull_time = timezone.now()

            pullkey = make_hashkey(str(pull_time) + str(submission.id))

            submission.grader_id = grader_id
            submission.pull_time = pull_time
            submission.pullkey = pullkey

            submission.save()

            # Prepare payload to external grader
            ext_header = {
                'submission_id': submission.id,
                'submission_key': pullkey
            }
            urls = json.loads(submission.urls) if submission.urls else {}

            # Because this code assumes there is a URL to fetch (traditionally out of S3)
            # it doesn't play well for ContentFile users in tests or local use.
            # ContentFile handles uploads well, but hands along file paths in /tmp rather than
            # URLs, see lms_interface.
            if "URL_FOR_EXTERNAL_DICTS" in submission.urls:
                url = urls["URL_FOR_EXTERNAL_DICTS"]
                timeout = 2
                try:
                    r = requests.get(url, timeout=timeout)
                    success = True
                except (ConnectionError, Timeout):
                    success = False
                    log.error(
                        'Could not fetch uploaded files at %s in timeout=%f' %
                        (url, timeout))
                    return HttpResponse(
                        compose_reply(
                            False,
                            "Error fetching submission for %s. Please try again."
                            % queue_name))

                if (r.status_code not in [200]) or (not success):
                    log.error(
                        'Could not fetch uploaded files at %s. Status code: %d'
                        % (url, r.status_code))
                    return HttpResponse(
                        compose_reply(
                            False,
                            "Error fetching submission for %s. Please try again."
                            % queue_name))

                xqueue_files = json.dumps(json.loads(r.text)["files"])
            else:
                xqueue_files = submission.urls

            payload = {
                'xqueue_header': json.dumps(ext_header),
                'xqueue_body': submission.xqueue_body,
                'xqueue_files': xqueue_files
            }

            return HttpResponse(
                compose_reply(True, content=json.dumps(payload)))
Beispiel #5
0
def get_submission(request):
    '''
    Retrieve a single submission from queue named by GET['queue_name'].
    '''
    try:
        queue_name = request.GET['queue_name']
    except KeyError:
        return HttpResponse(compose_reply(False, "'get_submission' must provide parameter 'queue_name'"))

    if queue_name not in settings.XQUEUES:
        return HttpResponse(compose_reply(False, "Queue '%s' not found" % queue_name))
    else:
        # Try to pull a single item from named queue
        (got_submission, submission) = Submission.objects.get_single_unretired_submission(queue_name)

        if not got_submission:
            return HttpResponse(compose_reply(False, "Queue '%s' is empty" % queue_name))
        else:
            # Collect info on pull event
            grader_id = get_request_ip(request)
            pull_time = timezone.now()

            pullkey = make_hashkey(str(pull_time)+str(submission.id))

            submission.grader_id = grader_id
            submission.pull_time = pull_time
            submission.pullkey = pullkey

            submission.save()

            # Prepare payload to external grader
            ext_header = {'submission_id': submission.id, 'submission_key': pullkey}
            urls = json.loads(submission.urls) if submission.urls else {}

            # Because this code assumes there is a URL to fetch (traditionally out of S3)
            # it doesn't play well for ContentFile users in tests or local use.
            # ContentFile handles uploads well, but hands along file paths in /tmp rather than
            # URLs, see lms_interface.
            if "URL_FOR_EXTERNAL_DICTS" in submission.urls:
                url = urls["URL_FOR_EXTERNAL_DICTS"]
                timeout = 2
                try:
                    r = requests.get(url, timeout=timeout)
                    success = True
                except (ConnectionError, Timeout):
                    success = False
                    log.error('Could not fetch uploaded files at %s in timeout=%f' % (url, timeout))
                    return HttpResponse(
                        compose_reply(False, "Error fetching submission for %s. Please try again." % queue_name)
                    )

                if (r.status_code not in [200]) or (not success):
                    log.error('Could not fetch uploaded files at %s. Status code: %d' % (url, r.status_code))
                    return HttpResponse(
                        compose_reply(False, "Error fetching submission for %s. Please try again." % queue_name)
                    )

                xqueue_files = json.dumps(json.loads(r.text)["files"])
            else:
                xqueue_files = submission.urls

            payload = {'xqueue_header': json.dumps(ext_header),
                       'xqueue_body': submission.xqueue_body,
                       'xqueue_files': xqueue_files}

            return HttpResponse(compose_reply(True, content=json.dumps(payload)))
Beispiel #6
0
def submit(request):
    '''
    Handle submissions to Xqueue from the LMS
    '''
    if request.method != 'POST':
        transaction.commit()
        return HttpResponse(
            compose_reply(False, 'Queue requests should use HTTP POST'))
    else:
        # queue_name, xqueue_header, xqueue_body are all serialized
        (request_is_valid, lms_callback_url, queue_name, xqueue_header,
         xqueue_body) = _is_valid_request(request.POST)

        if not request_is_valid:
            log.error(
                "Invalid queue submission from LMS: lms ip: {0}, request.POST: {1}"
                .format(
                    get_request_ip(request),
                    request.POST,
                ))
            transaction.commit()
            return HttpResponse(
                compose_reply(False, 'Queue request has invalid format'))
        else:
            if queue_name not in settings.XQUEUES:
                transaction.commit()
                return HttpResponse(
                    compose_reply(False, "Queue '%s' not found" % queue_name))
            else:
                # Limit DOS attacks by invalidating prior submissions from the
                #   same (user, module-id) pair as encoded in the lms_callback_url
                _invalidate_prior_submissions(lms_callback_url)

                # Check for file uploads
                keys = dict()  # For internal Xqueue use
                urls = dict()  # For external grader use
                for filename in request.FILES.keys():
                    key = make_hashkey(xqueue_header + filename)
                    url = _upload(request.FILES[filename], queue_name, key)
                    keys.update({filename: key})
                    urls.update({filename: url})

                urls_json = json.dumps(urls)
                keys_json = json.dumps(keys)

                if len(urls_json) > CHARFIELD_LEN_LARGE:
                    key = make_hashkey(xqueue_header +
                                       json.dumps(list(request.FILES.keys())))
                    url = _upload_file_dict(urls, keys, queue_name, key)
                    keys = {"KEY_FOR_EXTERNAL_DICTS": key}
                    urls = {"URL_FOR_EXTERNAL_DICTS": url}
                    urls_json = json.dumps(urls)
                    keys_json = json.dumps(keys)

                # Track the submission in the Submission database
                submission = Submission(
                    requester_id=get_request_ip(request),
                    lms_callback_url=lms_callback_url[:128],
                    queue_name=queue_name,
                    xqueue_header=xqueue_header,
                    xqueue_body=xqueue_body,
                    s3_urls=urls_json,
                    s3_keys=keys_json)
                submission.save()
                transaction.commit(
                )  # Explicit commit to DB before inserting submission.id into queue

                qcount = Submission.objects.get_queue_length(queue_name)

                # For a successful submission, return the count of prior items
                return HttpResponse(
                    compose_reply(success=True, content="%d" % qcount))
Beispiel #7
0
def submit(request):
    '''
    Handle submissions to Xqueue from the LMS
    '''
    if request.method != 'POST':
        transaction.commit()
        return HttpResponse(compose_reply(False, 'Queue requests should use HTTP POST'))
    else:
        # queue_name, xqueue_header, xqueue_body are all serialized
        (request_is_valid, lms_callback_url, queue_name, xqueue_header, xqueue_body) = _is_valid_request(request.POST)

        if not request_is_valid:
            log.error("Invalid queue submission from LMS: lms ip: {0}, request.POST: {1}".format(
                get_request_ip(request),
                request.POST,
            ))
            transaction.commit()
            return HttpResponse(compose_reply(False, 'Queue request has invalid format'))
        else:
            if queue_name not in settings.XQUEUES:
                transaction.commit()
                return HttpResponse(compose_reply(False, "Queue '%s' not found" % queue_name))
            else:
                # Limit DOS attacks by invalidating prior submissions from the
                #   same (user, module-id) pair as encoded in the lms_callback_url
                _invalidate_prior_submissions(lms_callback_url)

                # Check for file uploads
                keys = dict()  # For internal Xqueue use
                urls = dict()  # For external grader use
                for filename in request.FILES.keys():
                    key = make_hashkey(xqueue_header + filename)
                    url = _upload(request.FILES[filename], queue_name, key)
                    keys.update({filename: key})
                    urls.update({filename: url})

                urls_json = json.dumps(urls)
                keys_json = json.dumps(keys)

                if len(urls_json) > CHARFIELD_LEN_LARGE:
                    key = make_hashkey(xqueue_header + json.dumps(request.FILES.keys()))
                    url = _upload_file_dict(urls, keys, queue_name, key)
                    keys = {"KEY_FOR_EXTERNAL_DICTS": key}
                    urls = {"URL_FOR_EXTERNAL_DICTS": url}
                    urls_json = json.dumps(urls)
                    keys_json = json.dumps(keys)

                # Track the submission in the Submission database
                submission = Submission(requester_id=get_request_ip(request),
                                        lms_callback_url=lms_callback_url[:128],
                                        queue_name=queue_name,
                                        xqueue_header=xqueue_header,
                                        xqueue_body=xqueue_body,
                                        s3_urls=urls_json,
                                        s3_keys=keys_json)
                submission.save()
                transaction.commit()  # Explicit commit to DB before inserting submission.id into queue

                qcount = Submission.objects.get_queue_length(queue_name)

                # For a successful submission, return the count of prior items
                return HttpResponse(compose_reply(success=True, content="%d" % qcount))