def test_get_async_csv_submission_status(self, AsyncResult): result = csv_import.get_async_csv_submission_status(None) self.assertEqual(result, { 'error': u'Empty job uuid', 'job_status': 'FAILURE' }) class BacklogLimitExceededMockAsyncResult(object): def __init__(self): self.result = 0 @property def state(self): raise BacklogLimitExceeded() AsyncResult.return_value = BacklogLimitExceededMockAsyncResult() result = csv_import.get_async_csv_submission_status('x-y-z') self.assertEqual(result, {'job_status': 'PENDING'}) class MockAsyncResult(object): def __init__(self): self.result = self.state = 'SUCCESS' AsyncResult.return_value = MockAsyncResult() result = csv_import.get_async_csv_submission_status('x-y-z') self.assertEqual(result, {'job_status': 'SUCCESS'}) class MockAsyncResult2(object): def __init__(self): self.result = self.state = 1 AsyncResult.return_value = MockAsyncResult2() result = csv_import.get_async_csv_submission_status('x-y-z') self.assertEqual(result, 1)
def test_get_async_csv_submission_status(self, AsyncResult): result = csv_import.get_async_csv_submission_status(None) self.assertEqual(result, { 'error': 'Empty job uuid', 'job_status': 'FAILURE' }) class BacklogLimitExceededMockAsyncResult(object): def __init__(self): self.result = 0 @property def state(self): raise BacklogLimitExceeded() AsyncResult.return_value = BacklogLimitExceededMockAsyncResult() result = csv_import.get_async_csv_submission_status('x-y-z') self.assertEqual(result, {'job_status': 'PENDING'}) class MockAsyncResult(object): def __init__(self): self.result = self.state = 'SUCCESS' def get(self): return {'job_status': 'SUCCESS'} AsyncResult.return_value = MockAsyncResult() result = csv_import.get_async_csv_submission_status('x-y-z') self.assertEqual(result, {'job_status': 'SUCCESS'}) class MockAsyncResult2(object): def __init__(self): self.result = self.state = 'PROGRESS' self.info = { "info": [], "job_status": "PROGRESS", "progress": 4000, "total": 70605 } AsyncResult.return_value = MockAsyncResult2() result = csv_import.get_async_csv_submission_status('x-y-z') self.assertEqual(result, { 'info': [], 'job_status': 'PROGRESS', 'progress': 4000, 'total': 70605 }) class MockAsyncResultIOError(object): def __init__(self): self.result = IOError("File not found!") self.state = 'FAILURE' AsyncResult.return_value = MockAsyncResultIOError() result = csv_import.get_async_csv_submission_status('x-y-z') self.assertEqual(result, { 'error': 'File not found!', 'job_status': 'FAILURE' })
def test_get_async_csv_submission_status(self, AsyncResult): result = csv_import.get_async_csv_submission_status(None) self.assertEqual(result, {'error': 'Empty job uuid', 'job_status': 'FAILURE'}) class BacklogLimitExceededMockAsyncResult(object): def __init__(self): self.result = 0 @property def state(self): raise BacklogLimitExceeded() AsyncResult.return_value = BacklogLimitExceededMockAsyncResult() result = csv_import.get_async_csv_submission_status('x-y-z') self.assertEqual(result, {'job_status': 'PENDING'}) class MockAsyncResult(object): def __init__(self): self.result = self.state = 'SUCCESS' def get(self): return {'job_status': 'SUCCESS'} AsyncResult.return_value = MockAsyncResult() result = csv_import.get_async_csv_submission_status('x-y-z') self.assertEqual(result, {'job_status': 'SUCCESS'}) class MockAsyncResult2(object): def __init__(self): self.result = self.state = 'PROGRESS' self.info = { "info": [], "job_status": "PROGRESS", "progress": 4000, "total": 70605 } AsyncResult.return_value = MockAsyncResult2() result = csv_import.get_async_csv_submission_status('x-y-z') self.assertEqual(result, {'info': [], 'job_status': 'PROGRESS', 'progress': 4000, 'total': 70605}) class MockAsyncResultIOError(object): def __init__(self): self.result = IOError("File not found!") self.state = 'FAILURE' AsyncResult.return_value = MockAsyncResultIOError() result = csv_import.get_async_csv_submission_status('x-y-z') self.assertEqual(result, {'error': 'File not found!', 'job_status': 'FAILURE'})
def csv_import(self, request, *args, **kwargs): """ Endpoint for CSV data imports Calls :py:func:`onadata.libs.utils.csv_import.submit_csv` for POST requests passing the `request.FILES.get('csv_file')` upload for import and :py:func:onadata.libs.utils.csv_import.get_async_csv_submission_status for GET requests passing `job_uuid` query param for job progress polling """ resp = {} if request.method == 'GET': resp.update(get_async_csv_submission_status( request.QUERY_PARAMS.get('job_uuid'))) else: csv_file = request.FILES.get('csv_file', None) if csv_file is None: resp.update({u'error': u'csv_file field empty'}) else: num_rows = sum(1 for row in csv_file) - 1 if num_rows < settings.CSV_ROW_IMPORT_ASYNC_THRESHOLD: resp.update(submit_csv(request.user.username, self.get_object(), csv_file)) else: resp.update( {u'task_id': submit_csv_async.delay( request.user.username, self.get_object(), csv_file).task_id}) return Response( data=resp, status=status.HTTP_200_OK if resp.get('error') is None else status.HTTP_400_BAD_REQUEST)
def csv_import(self, request, *args, **kwargs): """ Endpoint for CSV data imports Calls :py:func:`onadata.libs.utils.csv_import.submit_csv` for POST requests passing the `request.FILES.get('csv_file')` upload for import and :py:func:onadata.libs.utils.csv_import.get_async_csv_submission_status for GET requests passing `job_uuid` query param for job progress polling """ resp = {} if request.method == 'GET': resp.update( get_async_csv_submission_status( request.QUERY_PARAMS.get('job_uuid'))) else: csv_file = request.FILES.get('csv_file', None) if csv_file is None: resp.update({u'error': u'csv_file field empty'}) else: num_rows = sum(1 for row in csv_file) - 1 if num_rows < settings.CSV_ROW_IMPORT_ASYNC_THRESHOLD: resp.update( submit_csv(request.user.username, self.get_object(), csv_file)) else: resp.update({ u'task_id': submit_csv_async.delay(request.user.username, self.get_object(), csv_file).task_id }) return Response(data=resp, status=status.HTTP_200_OK if resp.get('error') is None else status.HTTP_400_BAD_REQUEST)
def csv_import(self, request, *args, **kwargs): """ Endpoint for CSV data imports Calls :py:func:`onadata.libs.utils.csv_import.submit_csv` for POST requests passing the `request.FILES.get('csv_file')` upload for import and :py:func:onadata.libs.utils.csv_import.get_async_csv_submission_status for GET requests passing `job_uuid` query param for job progress polling """ self.object = self.get_object() resp = {} if request.method == 'GET': try: resp.update( get_async_csv_submission_status( request.query_params.get('job_uuid'))) self.last_modified_date = timezone.now() except ValueError: raise ParseError(('The instance of the result is not a ' 'basestring; the job_uuid variable might ' 'be incorrect')) else: csv_file = request.FILES.get('csv_file', None) if csv_file is None: resp.update({u'error': u'csv_file field empty'}) elif csv_file.name.split('.')[-1] != CSV_EXTENSION: resp.update({u'error': u'csv_file not a csv file'}) else: overwrite = request.query_params.get('overwrite') overwrite = True \ if overwrite and overwrite.lower() == 'true' else False size_threshold = settings.CSV_FILESIZE_IMPORT_ASYNC_THRESHOLD if csv_file.size < size_threshold: resp.update( submit_csv(request.user.username, self.object, csv_file, overwrite)) else: csv_file.seek(0) upload_to = os.path.join(request.user.username, 'csv_imports', csv_file.name) file_name = default_storage.save(upload_to, csv_file) task = submit_csv_async.delay(request.user.username, self.object.pk, file_name, overwrite) if task is None: raise ParseError('Task not found') else: resp.update({u'task_id': task.task_id}) return Response(data=resp, status=status.HTTP_200_OK if resp.get('error') is None else status.HTTP_400_BAD_REQUEST)
def csv_import(self, request, *args, **kwargs): """ Endpoint for CSV data imports Calls :py:func:`onadata.libs.utils.csv_import.submit_csv` for POST requests passing the `request.FILES.get('csv_file')` upload for import and :py:func:onadata.libs.utils.csv_import.get_async_csv_submission_status for GET requests passing `job_uuid` query param for job progress polling """ self.object = self.get_object() resp = {} if request.method == 'GET': try: resp.update(get_async_csv_submission_status( request.query_params.get('job_uuid'))) self.last_modified_date = timezone.now() except ValueError: raise ParseError(('The instance of the result is not a ' 'basestring; the job_uuid variable might ' 'be incorrect')) else: csv_file = request.FILES.get('csv_file', None) if csv_file is None: resp.update({u'error': u'csv_file field empty'}) elif csv_file.name.split('.')[-1] != CSV_EXTENSION: resp.update({u'error': u'csv_file not a csv file'}) else: overwrite = request.query_params.get('overwrite') overwrite = True \ if overwrite and overwrite.lower() == 'true' else False size_threshold = settings.CSV_FILESIZE_IMPORT_ASYNC_THRESHOLD if csv_file.size < size_threshold: resp.update(submit_csv(request.user.username, self.object, csv_file, overwrite)) else: csv_file.seek(0) upload_to = os.path.join(request.user.username, 'csv_imports', csv_file.name) file_name = default_storage.save(upload_to, csv_file) task = submit_csv_async.delay(request.user.username, self.object.pk, file_name, overwrite) if task is None: raise ParseError('Task not found') else: resp.update({u'task_id': task.task_id}) return Response( data=resp, status=status.HTTP_200_OK if resp.get('error') is None else status.HTTP_400_BAD_REQUEST)
def csv_import(self, request, *args, **kwargs): """ Endpoint for CSV data imports Calls :py:func:`onadata.libs.utils.csv_import.submit_csv` for POST requests passing the `request.FILES.get('csv_file')` upload for import and :py:func:onadata.libs.utils.csv_import.get_async_csv_submission_status for GET requests passing `job_uuid` query param for job progress polling """ self.object = self.get_object() resp = {} if request.method == 'GET': try: resp.update( get_async_csv_submission_status( request.query_params.get('job_uuid'))) self.last_modified_date = timezone.now() except ValueError: raise ParseError(('The instance of the result is not a ' 'basestring; the job_uuid variable might ' 'be incorrect')) else: csv_file = request.FILES.get('csv_file', None) if csv_file is None: resp.update({u'error': u'csv_file field empty'}) else: num_rows = sum(1 for row in csv_file) - 1 if num_rows < settings.CSV_ROW_IMPORT_ASYNC_THRESHOLD: resp.update( submit_csv(request.user.username, self.object, csv_file)) else: tmp_file_path = utils.generate_tmp_path(csv_file) task = submit_csv_async.delay(request.user.username, self.object, tmp_file_path) if task is None: raise ParseError('Task not found') else: resp.update({u'task_id': task.task_id}) return Response(data=resp, status=status.HTTP_200_OK if resp.get('error') is None else status.HTTP_400_BAD_REQUEST)