def data_info(request, pk): db_task = models.Task.objects.prefetch_related('data__images').select_related('data__video').get(pk=pk) if hasattr(db_task.data, 'video'): media = [db_task.data.video] else: media = list(db_task.data.images.order_by('frame')) frame_meta = [{ 'width': item.width, 'height': item.height, 'name': item.path, } for item in media] db_data = db_task.data db_data.frames = frame_meta serializer = DataMetaSerializer(db_data) return Response(serializer.data)
class TaskViewSet(auth.TaskGetQuerySetMixin, viewsets.ModelViewSet): queryset = Task.objects.all().prefetch_related( "label_set__attributespec_set", "segment_set__job_set", ).order_by('-id') serializer_class = TaskSerializer search_fields = ("name", "owner__username", "mode", "status") filterset_class = TaskFilter ordering_fields = ("id", "name", "owner", "status", "assignee") def get_permissions(self): http_method = self.request.method permissions = [IsAuthenticated] if http_method in SAFE_METHODS: permissions.append(auth.TaskAccessPermission) elif http_method in ["POST"]: permissions.append(auth.TaskCreatePermission) elif self.action == 'annotations' or http_method in ["PATCH", "PUT"]: permissions.append(auth.TaskChangePermission) elif http_method in ["DELETE"]: permissions.append(auth.TaskDeletePermission) else: permissions.append(auth.AdminRolePermission) return [perm() for perm in permissions] def perform_create(self, serializer): def validate_task_limit(owner): admin_perm = auth.AdminRolePermission() is_admin = admin_perm.has_permission(self.request, self) if not is_admin and settings.RESTRICTIONS['task_limit'] is not None and \ Task.objects.filter(owner=owner).count() >= settings.RESTRICTIONS['task_limit']: raise serializers.ValidationError( 'The user has the maximum number of tasks') owner = self.request.data.get('owner', None) if owner: validate_task_limit(owner) serializer.save() else: validate_task_limit(self.request.user) serializer.save(owner=self.request.user) def perform_destroy(self, instance): task_dirname = instance.get_task_dirname() super().perform_destroy(instance) shutil.rmtree(task_dirname, ignore_errors=True) if instance.data and not instance.data.tasks.all(): shutil.rmtree(instance.data.get_data_dirname(), ignore_errors=True) instance.data.delete() @swagger_auto_schema( method='get', operation_summary='Returns a list of jobs for a specific task', responses={'200': JobSerializer(many=True)}) @action(detail=True, methods=['GET'], serializer_class=JobSerializer) def jobs(self, request, pk): self.get_object() # force to call check_object_permissions queryset = Job.objects.filter(segment__task_id=pk) serializer = JobSerializer(queryset, many=True, context={"request": request}) return Response(serializer.data) @swagger_auto_schema( method='post', operation_summary= 'Method permanently attaches images or video to a task', request_body=DataSerializer, ) @swagger_auto_schema( method='get', operation_summary='Method returns data for a specific task', manual_parameters=[ openapi.Parameter( 'type', in_=openapi.IN_QUERY, required=True, type=openapi.TYPE_STRING, enum=['chunk', 'frame', 'preview'], description="Specifies the type of the requested data"), openapi.Parameter( 'quality', in_=openapi.IN_QUERY, required=True, type=openapi.TYPE_STRING, enum=['compressed', 'original'], description= "Specifies the quality level of the requested data, doesn't matter for 'preview' type" ), openapi.Parameter( 'number', in_=openapi.IN_QUERY, required=True, type=openapi.TYPE_NUMBER, description= "A unique number value identifying chunk or frame, doesn't matter for 'preview' type" ), ]) @action(detail=True, methods=['POST', 'GET']) def data(self, request, pk): if request.method == 'POST': db_task = self.get_object( ) # call check_object_permissions as well serializer = DataSerializer(data=request.data) serializer.is_valid(raise_exception=True) db_data = serializer.save() db_task.data = db_data db_task.save() data = {k: v for k, v in serializer.data.items()} data['use_zip_chunks'] = serializer.validated_data[ 'use_zip_chunks'] data['use_cache'] = serializer.validated_data['use_cache'] if data['use_cache']: db_task.data.storage_method = StorageMethodChoice.CACHE db_task.data.save(update_fields=['storage_method']) # if the value of stop_frame is 0, then inside the function we cannot know # the value specified by the user or it's default value from the database if 'stop_frame' not in serializer.validated_data: data['stop_frame'] = None task.create(db_task.id, data) return Response(serializer.data, status=status.HTTP_202_ACCEPTED) else: data_type = request.query_params.get('type', None) data_id = request.query_params.get('number', None) data_quality = request.query_params.get('quality', 'compressed') possible_data_type_values = ('chunk', 'frame', 'preview') possible_quality_values = ('compressed', 'original') if not data_type or data_type not in possible_data_type_values: return Response( data='data type not specified or has wrong value', status=status.HTTP_400_BAD_REQUEST) elif data_type == 'chunk' or data_type == 'frame': if not data_id: return Response(data='number not specified', status=status.HTTP_400_BAD_REQUEST) elif data_quality not in possible_quality_values: return Response(data='wrong quality value', status=status.HTTP_400_BAD_REQUEST) try: db_task = self.get_object() db_data = db_task.data frame_provider = FrameProvider(db_task.data) if data_type == 'chunk': data_id = int(data_id) data_quality = FrameProvider.Quality.COMPRESSED \ if data_quality == 'compressed' else FrameProvider.Quality.ORIGINAL #TODO: av.FFmpegError processing if settings.USE_CACHE and db_data.storage_method == StorageMethodChoice.CACHE: buff, mime_type = frame_provider.get_chunk( data_id, data_quality) return HttpResponse(buff.getvalue(), content_type=mime_type) # Follow symbol links if the chunk is a link on a real image otherwise # mimetype detection inside sendfile will work incorrectly. path = os.path.realpath( frame_provider.get_chunk(data_id, data_quality)) return sendfile(request, path) elif data_type == 'frame': data_id = int(data_id) data_quality = FrameProvider.Quality.COMPRESSED \ if data_quality == 'compressed' else FrameProvider.Quality.ORIGINAL buf, mime = frame_provider.get_frame(data_id, data_quality) return HttpResponse(buf.getvalue(), content_type=mime) elif data_type == 'preview': return sendfile(request, frame_provider.get_preview()) else: return Response( data='unknown data type {}.'.format(data_type), status=status.HTTP_400_BAD_REQUEST) except APIException as e: return Response(data=e.default_detail, status=e.status_code) except Exception as e: msg = 'cannot get requested data type: {}, number: {}, quality: {}'.format( data_type, data_id, data_quality) slogger.task[pk].error(msg, exc_info=True) return Response(data=msg + '\n' + str(e), status=status.HTTP_400_BAD_REQUEST) @swagger_auto_schema( method='get', operation_summary='Method allows to download task annotations', manual_parameters=[ openapi.Parameter( 'format', openapi.IN_QUERY, description= "Desired output format name\nYou can get the list of supported formats at:\n/server/annotation/formats", type=openapi.TYPE_STRING, required=False), openapi.Parameter('filename', openapi.IN_QUERY, description="Desired output file name", type=openapi.TYPE_STRING, required=False), openapi.Parameter( 'action', in_=openapi.IN_QUERY, description= 'Used to start downloading process after annotation file had been created', type=openapi.TYPE_STRING, required=False, enum=['download']) ], responses={ '202': openapi.Response( description='Dump of annotations has been started'), '201': openapi.Response( description='Annotations file is ready to download'), '200': openapi.Response(description='Download of file started'), '405': openapi.Response(description='Format is not available'), }) @swagger_auto_schema( method='put', operation_summary='Method allows to upload task annotations', manual_parameters=[ openapi.Parameter( 'format', openapi.IN_QUERY, description= "Input format name\nYou can get the list of supported formats at:\n/server/annotation/formats", type=openapi.TYPE_STRING, required=False), ], responses={ '202': openapi.Response(description='Uploading has been started'), '201': openapi.Response(description='Uploading has finished'), '405': openapi.Response(description='Format is not available'), }) @swagger_auto_schema( method='patch', operation_summary= 'Method performs a partial update of annotations in a specific task', manual_parameters=[ openapi.Parameter('action', in_=openapi.IN_QUERY, required=True, type=openapi.TYPE_STRING, enum=['create', 'update', 'delete']) ]) @swagger_auto_schema( method='delete', operation_summary='Method deletes all annotations for a specific task') @action(detail=True, methods=['GET', 'DELETE', 'PUT', 'PATCH'], serializer_class=LabeledDataSerializer) def annotations(self, request, pk): db_task = self.get_object() # force to call check_object_permissions if request.method == 'GET': format_name = request.query_params.get('format') if format_name: return _export_annotations( db_task=db_task, rq_id="/api/v1/tasks/{}/annotations/{}".format( pk, format_name), request=request, action=request.query_params.get("action", "").lower(), callback=dm.views.export_task_annotations, format_name=format_name, filename=request.query_params.get("filename", "").lower(), ) else: data = dm.task.get_task_data(pk) serializer = LabeledDataSerializer(data=data) if serializer.is_valid(raise_exception=True): return Response(serializer.data) elif request.method == 'PUT': format_name = request.query_params.get('format') if format_name: return _import_annotations( request=request, rq_id="{}@/api/v1/tasks/{}/annotations/upload".format( request.user, pk), rq_func=dm.task.import_task_annotations, pk=pk, format_name=format_name, ) else: serializer = LabeledDataSerializer(data=request.data) if serializer.is_valid(raise_exception=True): data = dm.task.put_task_data(pk, serializer.data) return Response(data) elif request.method == 'DELETE': dm.task.delete_task_data(pk) return Response(status=status.HTTP_204_NO_CONTENT) elif request.method == 'PATCH': action = self.request.query_params.get("action", None) if action not in dm.task.PatchAction.values(): raise serializers.ValidationError( "Please specify a correct 'action' for the request") serializer = LabeledDataSerializer(data=request.data) if serializer.is_valid(raise_exception=True): try: data = dm.task.patch_task_data(pk, serializer.data, action) except (AttributeError, IntegrityError) as e: return Response(data=str(e), status=status.HTTP_400_BAD_REQUEST) return Response(data) @swagger_auto_schema( method='get', operation_summary= 'When task is being created the method returns information about a status of the creation process' ) @action(detail=True, methods=['GET'], serializer_class=RqStatusSerializer) def status(self, request, pk): self.get_object() # force to call check_object_permissions response = self._get_rq_response(queue="default", job_id="/api/{}/tasks/{}".format( request.version, pk)) serializer = RqStatusSerializer(data=response) if serializer.is_valid(raise_exception=True): return Response(serializer.data) @staticmethod def _get_rq_response(queue, job_id): queue = django_rq.get_queue(queue) job = queue.fetch_job(job_id) response = {} if job is None or job.is_finished: response = {"state": "Finished"} elif job.is_queued: response = {"state": "Queued"} elif job.is_failed: response = {"state": "Failed", "message": job.exc_info} else: response = {"state": "Started"} if 'status' in job.meta: response['message'] = job.meta['status'] return response @staticmethod @swagger_auto_schema( method='get', operation_summary= 'Method provides a meta information about media files which are related with the task', responses={'200': DataMetaSerializer()}) @action(detail=True, methods=['GET'], serializer_class=DataMetaSerializer, url_path='data/meta') def data_info(request, pk): db_task = models.Task.objects.prefetch_related( 'data__images').select_related('data__video').get(pk=pk) if hasattr(db_task.data, 'video'): media = [db_task.data.video] else: media = list(db_task.data.images.order_by('frame')) frame_meta = [{ 'width': item.width, 'height': item.height, 'name': item.path, } for item in media] db_data = db_task.data db_data.frames = frame_meta serializer = DataMetaSerializer(db_data) return Response(serializer.data) @swagger_auto_schema( method='get', operation_summary='Export task as a dataset in a specific format', manual_parameters=[ openapi.Parameter( 'format', openapi.IN_QUERY, description= "Desired output format name\nYou can get the list of supported formats at:\n/server/annotation/formats", type=openapi.TYPE_STRING, required=True), openapi.Parameter('filename', openapi.IN_QUERY, description="Desired output file name", type=openapi.TYPE_STRING, required=False), openapi.Parameter( 'action', in_=openapi.IN_QUERY, description= 'Used to start downloading process after annotation file had been created', type=openapi.TYPE_STRING, required=False, enum=['download']) ], responses={ '202': openapi.Response(description='Exporting has been started'), '201': openapi.Response( description='Output file is ready for downloading'), '200': openapi.Response(description='Download of file started'), '405': openapi.Response(description='Format is not available'), }) @action(detail=True, methods=['GET'], serializer_class=None, url_path='dataset') def dataset_export(self, request, pk): db_task = self.get_object() # force to call check_object_permissions format_name = request.query_params.get("format", "") return _export_annotations( db_task=db_task, rq_id="/api/v1/tasks/{}/dataset/{}".format(pk, format_name), request=request, action=request.query_params.get("action", "").lower(), callback=dm.views.export_task_as_dataset, format_name=format_name, filename=request.query_params.get("filename", "").lower(), )
class TaskViewSet(auth.TaskGetQuerySetMixin, viewsets.ModelViewSet): queryset = Task.objects.all().prefetch_related( "label_set__attributespec_set", "segment_set__job_set", ).order_by('-id') serializer_class = TaskSerializer search_fields = ("name", "owner__username", "mode", "status") filterset_class = TaskFilter ordering_fields = ("id", "name", "owner", "status", "assignee") def get_permissions(self): http_method = self.request.method permissions = [IsAuthenticated] if http_method in SAFE_METHODS: permissions.append(auth.TaskAccessPermission) elif http_method in ["POST"]: permissions.append(auth.TaskCreatePermission) elif self.action == 'annotations' or http_method in ["PATCH", "PUT"]: permissions.append(auth.TaskChangePermission) elif http_method in ["DELETE"]: permissions.append(auth.TaskDeletePermission) else: permissions.append(auth.AdminRolePermission) return [perm() for perm in permissions] def perform_create(self, serializer): if self.request.data.get('owner', None): serializer.save() else: serializer.save(owner=self.request.user) def perform_destroy(self, instance): task_dirname = instance.get_task_dirname() super().perform_destroy(instance) shutil.rmtree(task_dirname, ignore_errors=True) if instance.data and not instance.data.tasks.all(): shutil.rmtree(instance.data.get_data_dirname(), ignore_errors=True) instance.data.delete() @swagger_auto_schema( method='get', operation_summary='Returns a list of jobs for a specific task', responses={'200': JobSerializer(many=True)}) @action(detail=True, methods=['GET'], serializer_class=JobSerializer) def jobs(self, request, pk): self.get_object() # force to call check_object_permissions queryset = Job.objects.filter(segment__task_id=pk) serializer = JobSerializer(queryset, many=True, context={"request": request}) return Response(serializer.data) @swagger_auto_schema( method='post', operation_summary='Method permanently attaches images or video to a task' ) @swagger_auto_schema( method='get', operation_summary='Method returns data for a specific task', manual_parameters=[ openapi.Parameter( 'type', in_=openapi.IN_QUERY, required=True, type=openapi.TYPE_STRING, enum=['chunk', 'frame', 'preview'], description="Specifies the type of the requested data"), openapi.Parameter( 'quality', in_=openapi.IN_QUERY, required=True, type=openapi.TYPE_STRING, enum=['compressed', 'original'], description= "Specifies the quality level of the requested data, doesn't matter for 'preview' type" ), openapi.Parameter( 'number', in_=openapi.IN_QUERY, required=True, type=openapi.TYPE_NUMBER, description= "A unique number value identifying chunk or frame, doesn't matter for 'preview' type" ), ]) @action(detail=True, methods=['POST', 'GET']) def data(self, request, pk): if request.method == 'POST': db_task = self.get_object( ) # call check_object_permissions as well serializer = DataSerializer(data=request.data) serializer.is_valid(raise_exception=True) db_data = serializer.save() db_task.data = db_data db_task.save() data = {k: v for k, v in serializer.data.items()} data['use_zip_chunks'] = serializer.validated_data[ 'use_zip_chunks'] # if the value of stop_frame is 0, then inside the function we cannot know # the value specified by the user or it's default value from the database if 'stop_frame' not in serializer.validated_data: data['stop_frame'] = None task.create(db_task.id, data) return Response(serializer.data, status=status.HTTP_202_ACCEPTED) else: data_type = request.query_params.get('type', None) data_id = request.query_params.get('number', None) data_quality = request.query_params.get('quality', 'compressed') possible_data_type_values = ('chunk', 'frame', 'preview') possible_quality_values = ('compressed', 'original') if not data_type or data_type not in possible_data_type_values: return Response( data='data type not specified or has wrong value', status=status.HTTP_400_BAD_REQUEST) elif data_type == 'chunk' or data_type == 'frame': if not data_id: return Response(data='number not specified', status=status.HTTP_400_BAD_REQUEST) elif data_quality not in possible_quality_values: return Response(data='wrong quality value', status=status.HTTP_400_BAD_REQUEST) try: db_task = self.get_object() frame_provider = FrameProvider(db_task.data) if data_type == 'chunk': data_id = int(data_id) data_quality = FrameProvider.Quality.COMPRESSED \ if data_quality == 'compressed' else FrameProvider.Quality.ORIGINAL path = os.path.realpath( frame_provider.get_chunk(data_id, data_quality)) # Follow symbol links if the chunk is a link on a real image otherwise # mimetype detection inside sendfile will work incorrectly. return sendfile(request, path) elif data_type == 'frame': data_id = int(data_id) data_quality = FrameProvider.Quality.COMPRESSED \ if data_quality == 'compressed' else FrameProvider.Quality.ORIGINAL buf, mime = frame_provider.get_frame(data_id, data_quality) return HttpResponse(buf.getvalue(), content_type=mime) elif data_type == 'preview': return sendfile(request, frame_provider.get_preview()) else: return Response( data='unknown data type {}.'.format(data_type), status=status.HTTP_400_BAD_REQUEST) except APIException as e: return Response(data=e.default_detail, status=e.status_code) except Exception as e: msg = 'cannot get requested data type: {}, number: {}, quality: {}'.format( data_type, data_id, data_quality) slogger.task[pk].error(msg, exc_info=True) return Response(data=msg + '\n' + str(e), status=status.HTTP_400_BAD_REQUEST) @swagger_auto_schema( method='get', operation_summary='Method returns annotations for a specific task') @swagger_auto_schema( method='put', operation_summary= 'Method performs an update of all annotations in a specific task') @swagger_auto_schema( method='patch', operation_summary= 'Method performs a partial update of annotations in a specific task', manual_parameters=[ openapi.Parameter('action', in_=openapi.IN_QUERY, required=True, type=openapi.TYPE_STRING, enum=['create', 'update', 'delete']) ]) @swagger_auto_schema( method='delete', operation_summary='Method deletes all annotations for a specific task') @action(detail=True, methods=['GET', 'DELETE', 'PUT', 'PATCH'], serializer_class=LabeledDataSerializer) def annotations(self, request, pk): self.get_object() # force to call check_object_permissions if request.method == 'GET': data = annotation.get_task_data(pk, request.user) serializer = LabeledDataSerializer(data=data) if serializer.is_valid(raise_exception=True): return Response(serializer.data) elif request.method == 'PUT': if request.query_params.get("format", ""): return load_data_proxy( request=request, rq_id="{}@/api/v1/tasks/{}/annotations/upload".format( request.user, pk), rq_func=annotation.load_task_data, pk=pk, ) else: serializer = LabeledDataSerializer(data=request.data) if serializer.is_valid(raise_exception=True): data = annotation.put_task_data(pk, request.user, serializer.data) return Response(data) elif request.method == 'DELETE': annotation.delete_task_data(pk, request.user) return Response(status=status.HTTP_204_NO_CONTENT) elif request.method == 'PATCH': action = self.request.query_params.get("action", None) if action not in annotation.PatchAction.values(): raise serializers.ValidationError( "Please specify a correct 'action' for the request") serializer = LabeledDataSerializer(data=request.data) if serializer.is_valid(raise_exception=True): try: data = annotation.patch_task_data(pk, request.user, serializer.data, action) except (AttributeError, IntegrityError) as e: return Response(data=str(e), status=status.HTTP_400_BAD_REQUEST) return Response(data) @swagger_auto_schema( method='get', operation_summary='Method allows to download annotations as a file', manual_parameters=[ openapi.Parameter('filename', openapi.IN_PATH, description="A name of a file with annotations", type=openapi.TYPE_STRING, required=True), openapi.Parameter( 'format', openapi.IN_QUERY, description= "A name of a dumper\nYou can get annotation dumpers from this API:\n/server/annotation/formats", type=openapi.TYPE_STRING, required=True), openapi.Parameter( 'action', in_=openapi.IN_QUERY, description= 'Used to start downloading process after annotation file had been created', required=False, enum=['download'], type=openapi.TYPE_STRING) ], responses={ '202': openapi.Response( description='Dump of annotations has been started'), '201': openapi.Response( description='Annotations file is ready to download'), '200': openapi.Response(description='Download of file started') }) @action(detail=True, methods=['GET'], serializer_class=None, url_path='annotations/(?P<filename>[^/]+)') def dump(self, request, pk, filename): """ Dump of annotations in common case is a long process which cannot be performed within one request. First request starts dumping process. When the file is ready (code 201) you can get it with query parameter action=download. """ filename = re.sub(r'[\\/*?:"<>|]', '_', filename) username = request.user.username db_task = self.get_object() # call check_object_permissions as well timestamp = datetime.now().strftime("%Y_%m_%d_%H_%M_%S") action = request.query_params.get("action") if action not in [None, "download"]: raise serializers.ValidationError( "Please specify a correct 'action' for the request") dump_format = request.query_params.get("format", "") try: db_dumper = AnnotationDumper.objects.get(display_name=dump_format) except ObjectDoesNotExist: raise serializers.ValidationError( "Please specify a correct 'format' parameter for the request") file_path = os.path.join( db_task.get_task_artifacts_dirname(), "{}.{}.{}.{}".format(filename, username, timestamp, db_dumper.format.lower())) queue = django_rq.get_queue("default") rq_id = "{}@/api/v1/tasks/{}/annotations/{}/{}".format( username, pk, dump_format, filename) rq_job = queue.fetch_job(rq_id) if rq_job: if rq_job.is_finished: if not rq_job.meta.get("download"): if action == "download": rq_job.meta[action] = True rq_job.save_meta() return sendfile(request, rq_job.meta["file_path"], attachment=True, attachment_filename="{}.{}".format( filename, db_dumper.format.lower())) else: return Response(status=status.HTTP_201_CREATED) else: # Remove the old dump file try: os.remove(rq_job.meta["file_path"]) except OSError: pass finally: rq_job.delete() elif rq_job.is_failed: exc_info = str(rq_job.exc_info) rq_job.delete() return Response(data=exc_info, status=status.HTTP_500_INTERNAL_SERVER_ERROR) else: return Response(status=status.HTTP_202_ACCEPTED) rq_job = queue.enqueue_call( func=annotation.dump_task_data, args=(pk, request.user, file_path, db_dumper, request.scheme, request.get_host()), job_id=rq_id, ) rq_job.meta["file_path"] = file_path rq_job.save_meta() return Response(status=status.HTTP_202_ACCEPTED) @swagger_auto_schema( method='get', operation_summary= 'When task is being created the method returns information about a status of the creation process' ) @action(detail=True, methods=['GET'], serializer_class=RqStatusSerializer) def status(self, request, pk): self.get_object() # force to call check_object_permissions response = self._get_rq_response(queue="default", job_id="/api/{}/tasks/{}".format( request.version, pk)) serializer = RqStatusSerializer(data=response) if serializer.is_valid(raise_exception=True): return Response(serializer.data) @staticmethod def _get_rq_response(queue, job_id): queue = django_rq.get_queue(queue) job = queue.fetch_job(job_id) response = {} if job is None or job.is_finished: response = {"state": "Finished"} elif job.is_queued: response = {"state": "Queued"} elif job.is_failed: response = {"state": "Failed", "message": job.exc_info} else: response = {"state": "Started"} if 'status' in job.meta: response['message'] = job.meta['status'] return response @staticmethod @swagger_auto_schema( method='get', operation_summary= 'Method provides a meta information about media files which are related with the task', responses={'200': DataMetaSerializer()}) @action(detail=True, methods=['GET'], serializer_class=DataMetaSerializer, url_path='data/meta') def data_info(request, pk): db_task = models.Task.objects.prefetch_related( 'data__images').select_related('data__video').get(pk=pk) if hasattr(db_task.data, 'video'): media = [db_task.data.video] else: media = list(db_task.data.images.order_by('frame')) frame_meta = [{ 'width': item.width, 'height': item.height, 'name': item.path, } for item in media] db_data = db_task.data db_data.frames = frame_meta serializer = DataMetaSerializer(db_data) return Response(serializer.data) @swagger_auto_schema( method='get', operation_summary='Export task as a dataset in a specific format', manual_parameters=[ openapi.Parameter('action', in_=openapi.IN_QUERY, required=False, type=openapi.TYPE_STRING, enum=['download']), openapi.Parameter('format', in_=openapi.IN_QUERY, required=False, type=openapi.TYPE_STRING) ], responses={ '202': openapi.Response( description='Dump of annotations has been started'), '201': openapi.Response( description='Annotations file is ready to download'), '200': openapi.Response(description='Download of file started') }) @action(detail=True, methods=['GET'], serializer_class=None, url_path='dataset') def dataset_export(self, request, pk): db_task = self.get_object() action = request.query_params.get("action", "") action = action.lower() if action not in ["", "download"]: raise serializers.ValidationError( "Unexpected parameter 'action' specified for the request") dst_format = request.query_params.get("format", "") if not dst_format: dst_format = DatumaroTask.DEFAULT_FORMAT dst_format = dst_format.lower() if dst_format not in [ f['tag'] for f in DatumaroTask.get_export_formats() ]: raise serializers.ValidationError( "Unexpected parameter 'format' specified for the request") rq_id = "/api/v1/tasks/{}/dataset/{}".format(pk, dst_format) queue = django_rq.get_queue("default") rq_job = queue.fetch_job(rq_id) if rq_job: last_task_update_time = timezone.localtime(db_task.updated_date) request_time = rq_job.meta.get('request_time', None) if request_time is None or request_time < last_task_update_time: rq_job.cancel() rq_job.delete() else: if rq_job.is_finished: file_path = rq_job.return_value if action == "download" and osp.exists(file_path): rq_job.delete() timestamp = datetime.now().strftime( "%Y_%m_%d_%H_%M_%S") filename = "task_{}-{}-{}.zip".format( db_task.name, timestamp, dst_format) return sendfile(request, file_path, attachment=True, attachment_filename=filename.lower()) else: if osp.exists(file_path): return Response(status=status.HTTP_201_CREATED) elif rq_job.is_failed: exc_info = str(rq_job.exc_info) rq_job.delete() return Response( exc_info, status=status.HTTP_500_INTERNAL_SERVER_ERROR) else: return Response(status=status.HTTP_202_ACCEPTED) try: server_address = request.get_host() except Exception: server_address = None ttl = DatumaroTask.CACHE_TTL.total_seconds() queue.enqueue_call(func=DatumaroTask.export_project, args=(pk, request.user, dst_format, server_address), job_id=rq_id, meta={'request_time': timezone.localtime()}, result_ttl=ttl, failure_ttl=ttl) return Response(status=status.HTTP_202_ACCEPTED)