def test_json_base64_identity(self): self.assertEquals(self._ob, _json_base64_decode( _json_base64_encode(self._ob))) self.assertEquals(self._ob_json, _json_base64_encode( _json_base64_decode(self._ob_json))) for j in self._jsons: self.assertEquals(j, _json_base64_encode(_json_base64_decode(j)))
def push_grade(request, saved_environ_id, signature): # TODO: might use some kind of url signing decorator and skip # arguments from url if not verify_zeus_url_signature(saved_environ_id, signature): raise PermissionDenied # This message may be useful for debugging in case when decoding fails logger.info('BEFORE DECODING BODY') body = _json_base64_decode(request.body) logger.info(' >>>> ') logger.info(body) logger.info(' <<<< ') # Create a small ``env`` that will be used to resume the job. Actuall # results processing is done in oioioi.zeus.handlers.restore_job. env = {'saved_environ_id': saved_environ_id} if 'compilation_output' in body: env['compilation_result'] = 'CE' env['reports'] = [] else: env['compilation_result'] = 'OK' env['reports'] = list(_get_key(body, 'tests_info')) env['compilation_message'] = body.get('compilation_output', '') delay_environ(env) return HttpResponse('Recorded!')
def push_grade(request, check_uid, signature): # TODO: might use some kind of url signing decorator and skip # arguments from url if not verify_zeus_url_signature(check_uid, signature): raise PermissionDenied # This message may be useful for debugging in case when decoding fails logger.info('BEFORE DECODING BODY') body = _json_base64_decode(request.body) logger.info(' >>>> ') logger.info(body) logger.info(' <<<< ') if 'compilation_output' in body: compilation_result = 'CE' else: compilation_result = 'OK' if compilation_result == 'OK': reports = _get_key(body, 'tests_info') else: reports = [] try: async_job, created = ZeusAsyncJob.objects.select_for_update() \ .get_or_create(check_uid=check_uid) except IntegrityError: # This should never happen. logger.error("IntegrityError while saving results for %s", check_uid, exc_info=True) logger.error("Received reports:\n%s", reports) return HttpResponse("Recorded!") if async_job.resumed: logger.debug("Got results for %s again, ignoring", check_uid) return HttpResponse("Recorded!") if not created: logger.info("Resuming job %s", check_uid) env = json.loads(async_job.environ) env.setdefault('zeus_results', []) env['compilation_result'] = compilation_result env['compilation_message'] = body.get('compilation_output', '') env['zeus_results'].extend(list(reports)) postpone(env) async_job.environ = json.dumps(env) async_job.resumed = True async_job.save() else: # The code below solves a race condition in case Zeus # does the callback before ZeusAsyncJob is created in handlers. async_job.environ = json.dumps({'zeus_results': list(reports)}) async_job.save() # TODO: return a brief text response in a case of a failure # (Internal Server Error or Permission Denied). # Currently we respond with the default human-readable HTML. return HttpResponse("Recorded!")
def setUp(self): self._ob = { u'dict': {u'key': Base64String('somestring')}, u'key': Base64String('string'), } self._ob_json = '{"dict": {"key": "c29tZXN0cmluZw=="}, "key": "c3RyaW5n"}' with open('oioioi/zeus/fixtures/test_zeus_data.json') as f: self._jsons = [f['result'] for f in json.load(f)] self._json_base64_decode = lambda v: _json_base64_decode(v, wrap=True)
def setUp(self): self._ob = { u'dict': {u'key': Base64String('somestring')}, u'key': Base64String('string'), } self._ob_json = \ '{"dict": {"key": "c29tZXN0cmluZw=="}, "key": "c3RyaW5n"}' with open('oioioi/zeus/fixtures/test_zeus_data.json') as f: self._jsons = [f['result'] for f in json.load(f)] self._json_base64_decode = lambda v: _json_base64_decode(v, wrap=True)
def test_json_base64_decode(self): self.assertEquals(self._ob, _json_base64_decode(self._ob_json)) for j in self._jsons: _json_base64_decode(j)