def post(self, request, course_id): """ Kicks off an asynchronous course import and returns an ID to be used to check the task's status """ courselike_key = CourseKey.from_string(course_id) if not has_course_author_access(request.user, courselike_key): return self.make_error_response( status_code=status.HTTP_403_FORBIDDEN, developer_message='The user requested does not have the required permissions.', error_code='user_mismatch' ) try: if 'course_data' not in request.FILES: return self.make_error_response( status_code=status.HTTP_400_BAD_REQUEST, developer_message='Missing required parameter', error_code='internal_error', field_errors={'course_data': '"course_data" parameter is required, and must be a .tar.gz file'} ) filename = request.FILES['course_data'].name if not filename.endswith('.tar.gz'): return self.make_error_response( status_code=status.HTTP_400_BAD_REQUEST, developer_message='Parameter in the wrong format', error_code='internal_error', field_errors={'course_data': '"course_data" parameter is required, and must be a .tar.gz file'} ) course_dir = path(settings.GITHUB_REPO_ROOT) / base64.urlsafe_b64encode(repr(courselike_key)) temp_filepath = course_dir / filename if not course_dir.isdir(): # pylint: disable=no-value-for-parameter os.mkdir(course_dir) log.debug('importing course to {0}'.format(temp_filepath)) with open(temp_filepath, "wb+") as temp_file: for chunk in request.FILES['course_data'].chunks(): temp_file.write(chunk) log.info("Course import %s: Upload complete", courselike_key) with open(temp_filepath, 'rb') as local_file: django_file = File(local_file) storage_path = course_import_export_storage.save(u'olx_import/' + filename, django_file) async_result = import_olx.delay( request.user.id, text_type(courselike_key), storage_path, filename, request.LANGUAGE_CODE) return Response({ 'task_id': async_result.task_id }) except Exception as e: return self.make_error_response( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, developer_message=str(e), error_code='internal_error' )
def post(self, request, course_key): """ Kicks off an asynchronous course import and returns an ID to be used to check the task's status """ try: if 'course_data' not in request.FILES: raise self.api_error( status_code=status.HTTP_400_BAD_REQUEST, developer_message='Missing required parameter', error_code='internal_error', ) filename = request.FILES['course_data'].name if not filename.endswith('.tar.gz'): raise self.api_error( status_code=status.HTTP_400_BAD_REQUEST, developer_message='Parameter in the wrong format', error_code='internal_error', ) course_dir = path(settings.GITHUB_REPO_ROOT) / base64.urlsafe_b64encode(repr(course_key)) temp_filepath = course_dir / filename if not course_dir.isdir(): os.mkdir(course_dir) log.debug(u'importing course to {0}'.format(temp_filepath)) with open(temp_filepath, "wb+") as temp_file: for chunk in request.FILES['course_data'].chunks(): temp_file.write(chunk) log.info(u"Course import %s: Upload complete", course_key) with open(temp_filepath, 'rb') as local_file: django_file = File(local_file) storage_path = course_import_export_storage.save(u'olx_import/' + filename, django_file) async_result = import_olx.delay( request.user.id, text_type(course_key), storage_path, filename, request.LANGUAGE_CODE) return Response({ 'task_id': async_result.task_id }) except Exception as e: log.exception(str(e)) raise self.api_error( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, developer_message=str(e), error_code='internal_error' )
def _write_chunk(request, courselike_key): """ Write the OLX file data chunk from the given request to the local filesystem. """ # Upload .tar.gz to local filesystem for one-server installations not using S3 or Swift data_root = path(settings.GITHUB_REPO_ROOT) subdir = base64.urlsafe_b64encode(repr(courselike_key)) course_dir = data_root / subdir filename = request.FILES['course-data'].name courselike_string = text_type(courselike_key) + filename # Do everything in a try-except block to make sure everything is properly cleaned up. try: # Use sessions to keep info about import progress _save_request_status(request, courselike_string, 0) if not filename.endswith('.tar.gz'): _save_request_status(request, courselike_string, -1) return JsonResponse( { 'ErrMsg': _('We only support uploading a .tar.gz file.'), 'Stage': -1 }, status=415 ) temp_filepath = course_dir / filename if not course_dir.isdir(): os.mkdir(course_dir) logging.debug('importing course to {0}'.format(temp_filepath)) # Get upload chunks byte ranges try: matches = CONTENT_RE.search(request.META["HTTP_CONTENT_RANGE"]) content_range = matches.groupdict() except KeyError: # Single chunk # no Content-Range header, so make one that will work content_range = {'start': 0, 'stop': 1, 'end': 2} # stream out the uploaded files in chunks to disk if int(content_range['start']) == 0: mode = "wb+" else: mode = "ab+" size = os.path.getsize(temp_filepath) # Check to make sure we haven't missed a chunk # This shouldn't happen, even if different instances are handling # the same session, but it's always better to catch errors earlier. if size < int(content_range['start']): _save_request_status(request, courselike_string, -1) log.warning( "Reported range %s does not match size downloaded so far %s", content_range['start'], size ) return JsonResponse( { 'ErrMsg': _('File upload corrupted. Please try again'), 'Stage': -1 }, status=409 ) # The last request sometimes comes twice. This happens because # nginx sends a 499 error code when the response takes too long. elif size > int(content_range['stop']) and size == int(content_range['end']): return JsonResponse({'ImportStatus': 1}) with open(temp_filepath, mode) as temp_file: for chunk in request.FILES['course-data'].chunks(): temp_file.write(chunk) size = os.path.getsize(temp_filepath) if int(content_range['stop']) != int(content_range['end']) - 1: # More chunks coming return JsonResponse({ "files": [{ "name": filename, "size": size, "deleteUrl": "", "deleteType": "", "url": reverse_course_url('import_handler', courselike_key), "thumbnailUrl": "" }] }) log.info("Course import %s: Upload complete", courselike_key) with open(temp_filepath, 'rb') as local_file: django_file = File(local_file) storage_path = course_import_export_storage.save(u'olx_import/' + filename, django_file) import_olx.delay( request.user.id, text_type(courselike_key), storage_path, filename, request.LANGUAGE_CODE) # Send errors to client with stage at which error occurred. except Exception as exception: # pylint: disable=broad-except _save_request_status(request, courselike_string, -1) if course_dir.isdir(): shutil.rmtree(course_dir) log.info("Course import %s: Temp data cleared", courselike_key) log.exception( "error importing course" ) return JsonResponse( { 'ErrMsg': str(exception), 'Stage': -1 }, status=400 ) return JsonResponse({'ImportStatus': 1})
def _write_chunk(request, courselike_key): """ Write the OLX file data chunk from the given request to the local filesystem. """ # Upload .tar.gz to local filesystem for one-server installations not using S3 or Swift data_root = path(settings.GITHUB_REPO_ROOT) subdir = base64.urlsafe_b64encode(repr(courselike_key)) course_dir = data_root / subdir filename = request.FILES['course-data'].name courselike_string = text_type(courselike_key) + filename # Do everything in a try-except block to make sure everything is properly cleaned up. try: # Use sessions to keep info about import progress _save_request_status(request, courselike_string, 0) if not filename.endswith('.tar.gz'): _save_request_status(request, courselike_string, -1) return JsonResponse( { 'ErrMsg': _('We only support uploading a .tar.gz file.'), 'Stage': -1 }, status=415) temp_filepath = course_dir / filename if not course_dir.isdir(): # pylint: disable=no-value-for-parameter os.mkdir(course_dir) logging.debug('importing course to {0}'.format(temp_filepath)) # Get upload chunks byte ranges try: matches = CONTENT_RE.search(request.META["HTTP_CONTENT_RANGE"]) content_range = matches.groupdict() except KeyError: # Single chunk # no Content-Range header, so make one that will work content_range = {'start': 0, 'stop': 1, 'end': 2} # stream out the uploaded files in chunks to disk if int(content_range['start']) == 0: mode = "wb+" else: mode = "ab+" size = os.path.getsize(temp_filepath) # Check to make sure we haven't missed a chunk # This shouldn't happen, even if different instances are handling # the same session, but it's always better to catch errors earlier. if size < int(content_range['start']): _save_request_status(request, courselike_string, -1) log.warning( "Reported range %s does not match size downloaded so far %s", content_range['start'], size) return JsonResponse( { 'ErrMsg': _('File upload corrupted. Please try again'), 'Stage': -1 }, status=409) # The last request sometimes comes twice. This happens because # nginx sends a 499 error code when the response takes too long. elif size > int(content_range['stop']) and size == int( content_range['end']): return JsonResponse({'ImportStatus': 1}) with open(temp_filepath, mode) as temp_file: for chunk in request.FILES['course-data'].chunks(): temp_file.write(chunk) size = os.path.getsize(temp_filepath) if int(content_range['stop']) != int(content_range['end']) - 1: # More chunks coming return JsonResponse({ "files": [{ "name": filename, "size": size, "deleteUrl": "", "deleteType": "", "url": reverse_course_url('import_handler', courselike_key), "thumbnailUrl": "" }] }) log.info("Course import %s: Upload complete", courselike_key) with open(temp_filepath, 'rb') as local_file: django_file = File(local_file) storage_path = course_import_export_storage.save( u'olx_import/' + filename, django_file) import_olx.delay(request.user.id, text_type(courselike_key), storage_path, filename, request.LANGUAGE_CODE) # Send errors to client with stage at which error occurred. except Exception as exception: # pylint: disable=broad-except _save_request_status(request, courselike_string, -1) if course_dir.isdir(): # pylint: disable=no-value-for-parameter shutil.rmtree(course_dir) log.info("Course import %s: Temp data cleared", courselike_key) log.exception("error importing course") return JsonResponse({ 'ErrMsg': str(exception), 'Stage': -1 }, status=400) return JsonResponse({'ImportStatus': 1})
def post(self, request, course_id): """ Kicks off an asynchronous course import and returns an ID to be used to check the task's status """ courselike_key = CourseKey.from_string(course_id) if not has_course_author_access(request.user, courselike_key): return self.make_error_response( status_code=status.HTTP_403_FORBIDDEN, developer_message= 'The user requested does not have the required permissions.', error_code='user_mismatch') try: if 'course_data' not in request.FILES: return self.make_error_response( status_code=status.HTTP_400_BAD_REQUEST, developer_message='Missing required parameter', error_code='internal_error', field_errors={ 'course_data': '"course_data" parameter is required, and must be a .tar.gz file' }) filename = request.FILES['course_data'].name if not filename.endswith('.tar.gz'): return self.make_error_response( status_code=status.HTTP_400_BAD_REQUEST, developer_message='Parameter in the wrong format', error_code='internal_error', field_errors={ 'course_data': '"course_data" parameter is required, and must be a .tar.gz file' }) course_dir = path( settings.GITHUB_REPO_ROOT) / base64.urlsafe_b64encode( repr(courselike_key)) temp_filepath = course_dir / filename if not course_dir.isdir(): # pylint: disable=no-value-for-parameter os.mkdir(course_dir) log.debug('importing course to {0}'.format(temp_filepath)) with open(temp_filepath, "wb+") as temp_file: for chunk in request.FILES['course_data'].chunks(): temp_file.write(chunk) log.info("Course import %s: Upload complete", courselike_key) with open(temp_filepath, 'rb') as local_file: django_file = File(local_file) storage_path = course_import_export_storage.save( u'olx_import/' + filename, django_file) async_result = import_olx.delay(request.user.id, text_type(courselike_key), storage_path, filename, request.LANGUAGE_CODE) return Response({'task_id': async_result.task_id}) except Exception as e: return self.make_error_response( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, developer_message=str(e), error_code='internal_error')