async def receive(self, text_data=None, bytes_data=None): user_pk = self.scope['url_route']['kwargs']['user_pk'] submission_id = self.scope['url_route']['kwargs']['submission_id'] try: sub = Submission.objects.get(pk=submission_id) except Submission.DoesNotExist: return await self.close() if sub.phase.hide_output and not sub.phase.competition.user_has_admin_permission( user_pk): return submission_output_path = os.path.join(settings.TEMP_SUBMISSION_STORAGE, f"{submission_id}.txt") os.makedirs(os.path.dirname(submission_output_path), exist_ok=True) data = json.loads(text_data) if data['kind'] == 'detailed_result_update': data['result_url'] = make_url_sassy( Submission.objects.get(id=submission_id).detailed_result.name) # update text data to include the newly added sas url for retrieval on page refresh text_data = json.dumps(data) async with aiofiles.open(submission_output_path, 'a+') as f: await f.write(f'{text_data}\n') await self.channel_layer.group_send( f"submission_listening_{user_pk}", { 'type': 'submission.message', 'text': data, 'submission_id': submission_id, })
def create_detailed_output_file(detail_name, submission): # Detail logs like stdout/etc. new_details = SubmissionDetails.objects.create(submission=submission, name=detail_name) new_details.data_file.save(f'{detail_name}.txt', ContentFile( ''.encode())) # must encode here for GCS return make_url_sassy(new_details.data_file.name, permission="w")
def get_files(self, request, pk): qs_helper = Competition.objects.select_related( 'created_by').prefetch_related('dumps__dataset', ) competition = qs_helper.get(id=pk) if request.user != competition.created_by and request.user not in competition.collaborators.all( ) and not request.user.is_superuser: raise PermissionDenied( "You don't have access to the competition files") bundle = competition.bundle_dataset files = {'dumps': []} if bundle: files['bundle'] = { 'name': bundle.name, 'url': make_url_sassy(bundle.data_file.name) } for dump in competition.dumps.all(): files['dumps'].append({ 'name': dump.dataset.name, 'url': make_url_sassy(dump.dataset.data_file.name) }) return Response(files)
def create(self, request, *args, **kwargs): serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) new_dataset = serializer.save( ) # request_sassy_file_name is temporarily set via this serializer headers = self.get_success_headers(serializer.data) # Make an empty placeholder so we can sign a URL allowing us to upload to it sassy_file_name = os.path.basename(new_dataset.request_sassy_file_name) # encode here helps GCS do the upload, complains # ```TypeError: ('`data` must be bytes, received', <class 'str'>)``` otherwise new_dataset.data_file.save(sassy_file_name, ContentFile(''.encode())) context = { "key": new_dataset.key, "sassy_url": make_url_sassy(new_dataset.data_file.name, 'w'), } return Response(context, status=status.HTTP_201_CREATED, headers=headers)
def handle(self, *args, **options): backup_file_name = options['backup_path'] backup_path = os.path.join("/app/backups", options['backup_path']) # Upload it upload_url = make_url_sassy(f'backups/{backup_file_name}', permission='w', content_type=None) print(f"Uploading backup '{backup_path}' to '{upload_url}'") resp = put_blob(upload_url, backup_path) if resp.status_code == 200: print(f"Success!") else: print( f"FAILED TO SEND! Result ({resp.status_code}):\n{resp.content}" ) # Clean up print(f"Removing local dump file '{backup_path}'") os.remove(backup_path)
def _send_submission(submission, task, is_scoring, run_args): if not submission.detailed_result.name and submission.phase.competition.enable_detailed_results: submission.detailed_result.save('detailed_results.html', ContentFile(''.encode())) # must encode here for GCS submission.save(update_fields=['detailed_result']) if not submission.prediction_result.name: submission.prediction_result.save('prediction_result.zip', ContentFile(''.encode())) # must encode here for GCS submission.save(update_fields=['prediction_result']) if not submission.scoring_result.name: submission.scoring_result.save('scoring_result.zip', ContentFile(''.encode())) # must encode here for GCS submission.save(update_fields=['scoring_result']) submission = Submission.objects.get(id=submission.id) if not is_scoring: run_args['prediction_result'] = make_url_sassy( path=submission.prediction_result.name, permission='w' ) else: if submission.phase.competition.enable_detailed_results: run_args['detailed_results_url'] = make_url_sassy( path=submission.detailed_result.name, permission='w', content_type='' ) run_args['prediction_result'] = make_url_sassy( path=submission.prediction_result.name, permission='r' ) run_args['scoring_result'] = make_url_sassy( path=submission.scoring_result.name, permission='w' ) if task.ingestion_program: if (task.ingestion_only_during_scoring and is_scoring) or (not task.ingestion_only_during_scoring and not is_scoring): run_args['ingestion_program'] = make_url_sassy(task.ingestion_program.data_file.name) if task.input_data and (not is_scoring or task.ingestion_only_during_scoring): run_args['input_data'] = make_url_sassy(task.input_data.data_file.name) if is_scoring and task.reference_data: run_args['reference_data'] = make_url_sassy(task.reference_data.data_file.name) run_args['ingestion_only_during_scoring'] = task.ingestion_only_during_scoring run_args['program_data'] = make_url_sassy( path=submission.data.data_file.name if not is_scoring else task.scoring_program.data_file.name ) if not is_scoring: detail_names = SubmissionDetails.DETAILED_OUTPUT_NAMES_PREDICTION else: detail_names = SubmissionDetails.DETAILED_OUTPUT_NAMES_SCORING for detail_name in detail_names: run_args[detail_name] = create_detailed_output_file(detail_name, submission) logger.info(f"Task data for submission id = {submission.id}") logger.info(run_args) # Pad timelimit so worker has time to cleanup time_padding = 60 * 20 # 20 minutes time_limit = submission.phase.execution_time_limit + time_padding if submission.phase.competition.queue: submission.queue_name = submission.phase.competition.queue.name or '' submission.save() # Send to special queue? Using `celery_app` var name here since we'd be overriding the imported `app` # variable above celery_app = app_or_default() with celery_app.connection() as new_connection: new_connection.virtual_host = str(submission.phase.competition.queue.vhost) task = celery_app.send_task( 'compute_worker_run', args=(run_args,), queue='compute-worker', soft_time_limit=time_limit, connection=new_connection ) else: task = app.send_task( 'compute_worker_run', args=(run_args,), queue='compute-worker', soft_time_limit=time_limit ) submission.celery_task_id = task.id submission.status = Submission.SUBMITTED submission.save()
def download(request, key): data = get_object_or_404(Data, key=key) return HttpResponseRedirect(make_url_sassy(data.data_file.name))
def get_scoring_result(self, instance): if instance.scoring_result.name: if instance.phase.hide_output and not instance.phase.competition.user_has_admin_permission( self.context['request'].user): return None return make_url_sassy(instance.scoring_result.name)
def get_detailed_result(self, instance): if instance.detailed_result.name: return make_url_sassy(instance.detailed_result.name)
def get_data_file(self, instance): return make_url_sassy(instance.data.data_file.name)