def write_plupload(request, pub_name): """file upload for plupload""" logger.info("Starting write plupload") pub = models.Publisher.objects.get(name=pub_name) logger.debug("%s %s" % (str(type(request.REQUEST['meta'])), request.REQUEST['meta'])) logger.debug("Publisher Plupload started") if request.method == 'POST': name = request.REQUEST.get('name', '') uploaded_file = request.FILES['file'] if not name: name = uploaded_file.name logger.debug("plupload name = '%s'" % name) #check to see if a user has uploaded a file before, and if they have #not, make them a upload directory upload_dir = "/results/referenceLibrary/temp" if not os.path.exists(upload_dir): return render_to_json({"error": "upload path does not exist"}) dest_path = os.path.join(upload_dir, name) logger.debug("plupload destination = '%s'" % dest_path) chunk = request.REQUEST.get('chunk', '0') chunks = request.REQUEST.get('chunks', '0') logger.debug("plupload chunk %s %s of %s" % (str(type(chunk)), str(chunk), str(chunks))) debug = [chunk, chunks] with open(dest_path, ('wb' if chunk == '0' else 'ab')) as f: for content in uploaded_file.chunks(): logger.debug("content chunk = '%d'" % len(content)) f.write(content) if int(chunk) + 1 >= int(chunks): try: upload = move_upload(pub, dest_path, name, request.REQUEST['meta']) async_upload = run_pub_scripts.delay(pub, upload) except Exception as err: logger.exception( "There was a problem during upload of a file for a publisher." ) else: logger.info("Successfully pluploaded %s" % name) logger.debug("plupload done") return render_to_json({"chunk posted": debug}) else: return render_to_json({"method": "only post here"})
def write_plupload(request, pub_name): """file upload for plupload""" logger.info("Starting write plupload") pub = models.Publisher.objects.get(name=pub_name) logger.debug("%s %s" % (str(type(request.REQUEST['meta'])), request.REQUEST['meta'])) logger.debug("Publisher Plupload started") if request.method == 'POST': name = request.REQUEST.get('name','') uploaded_file = request.FILES['file'] if not name: name = uploaded_file.name logger.debug("plupload name = '%s'" % name) #check to see if a user has uploaded a file before, and if they have #not, make them a upload directory upload_dir = "/results/referenceLibrary/temp" if not os.path.exists(upload_dir): return render_to_json({"error":"upload path does not exist"}) dest_path = os.path.join(upload_dir, name) logger.debug("plupload destination = '%s'" % dest_path) chunk = request.REQUEST.get('chunk','0') chunks = request.REQUEST.get('chunks','0') logger.debug("plupload chunk %s %s of %s" % (str(type(chunk)), str(chunk), str(chunks))) debug = [chunk, chunks] with open(dest_path,('wb' if chunk=='0' else 'ab')) as f: for content in uploaded_file.chunks(): logger.debug("content chunk = '%d'" % len(content)) f.write(content) my_contentupload_id = None if int(chunk) + 1 >= int(chunks): try: upload = move_upload(pub, dest_path, name, request.REQUEST['meta']) async_upload = run_pub_scripts.delay(pub, upload) my_contentupload_id = upload.id except Exception as err: logger.exception("There was a problem during upload of a file for a publisher.") else: logger.info("Successfully pluploaded %s" % name) logger.debug("plupload done") return render_to_json({"chunk posted": debug, "contentupload_id": my_contentupload_id}) else: return render_to_json({"method":"only post here"})
def delete_genome(request, pk): """delete a reference genome the filesystem file deletions should be done with a method on the model""" if request.method=="POST": rg = shortcuts.get_object_or_404(models.ReferenceGenome, pk=pk) with_dir = request.GET.get('with_dir', False) #delete dir by default try_delete = rg.delete() if not try_delete: #the file could not be deleted, present the user with an error message. return render_to_json({"status":" <strong>Error</strong> <p>Genome could not be deleted.</p> \ <p>Check the file permissions for the genome on the file system at: </p> \ <p><strong>" + str(rg.reference_path) + "</p></strong> "}) return render_to_json({"status":"Genome was deleted successfully"}) if request.method == "GET": return render_to_json({"status":"This must be accessed via post"})
def genome_status(request, pk): """Provide a way for the index creator to let us know when the index has been created""" if request.method=="POST": rg = shortcuts.get_object_or_404(models.ReferenceGenome, pk=pk) status = request.POST.get('status',False) enabled = request.POST.get('enabled',False) verbose_error = request.POST.get('verbose_error',"") index_version = request.POST.get('index_version',"") if not status: return render_to_json({"status":"error genome status not given"}) rg.status = status rg.enabled = enabled rg.verbose_error = verbose_error rg.index_version = index_version rg.reference_path = os.path.join(settings.TMAP_DIR, rg.short_name) rg.save() return render_to_json({"status":"genome status updated", "enabled" : enabled }) if request.method=="GET": rg = shortcuts.get_object_or_404(models.ReferenceGenome, pk=pk) return render_to_json({"status":rg.status})
def fileUpload(request): """file upload for plupload""" if request.method == 'POST': name = request.REQUEST.get('name','') uploaded_file = request.FILES['file'] if not name: name = uploaded_file.name name,ext = os.path.splitext(name) #check to see if a user has uploaded a file before, and if they have #not, make them a upload directory upload_dir = "/results/referenceLibrary/temp/" if not os.path.exists(upload_dir): return render_to_json({"error":"upload path does not exist"}) dest_path = '%s%s%s%s' % (upload_dir,os.sep,name,ext) chunk = request.REQUEST.get('chunk','0') chunks = request.REQUEST.get('chunks','0') debug = [chunk, chunks] with open(dest_path,('wb' if chunk==0 else 'ab')) as f: for content in uploaded_file.chunks(): f.write(content) if int(chunk) + 1 >= int(chunks): #the upload has finished pass return render_to_json({"chuck posted":debug}) else: return render_to_json({"method":"only post here"})
def new_genome(request): """This is the page to create a new genome. The XML-RPC server is ionJobServer. """ if request.method=="POST": """parse the data sent in""" #required name = request.POST.get('name',False) short_name = request.POST.get('short_name',False) fasta = request.POST.get('target_file',False) version = request.POST.get('version',False) notes = request.POST.get('notes',"") #optional read_sample_size = request.POST.get('read_sample_size',False) read_exclude_length = request.POST.get('read_exclude_length',False) #URL download url = request.POST.get('url',False) #if any of those were false send back a failed message if not all((name, short_name, fasta, version)): return render_to_json({"status":"Form validation failed","error":True}) if not set(short_name).issubset("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_"): return render_to_json( {"status":"The short name has invalid characters. The valid values are letters, numbers, and underscores.","error": True} ) #TODO: check to make sure the zip file only has one fasta or fa path = "/results/referenceLibrary/temp/" if not url: #check to ensure the size on the OS the same as the reported. reported_file_size = request.POST.get('reported_file_size',False) try: uploaded_file_size = str(os.path.getsize(path + fasta)) except OSError: return render_to_json( {"status":"The FASTA temporary files was not found","error":True} ) if reported_file_size != uploaded_file_size : try: os.remove(path + fasta) pass except OSError: return render_to_json( {"status":"The FASTA temporary did not match the expected size, and could not be deleted.","error":True} ) return render_to_json( {"status": "The file you uploaded differs from the expected size. This is due to an error uploading. The temporary file has been removed.", "reported": reported_file_size, "uploaded": uploaded_file_size, "error" : True } ) #Make an genome ref object if models.ReferenceGenome.objects.filter(short_name=short_name,index_version=settings.TMAP_VERSION): #check to see if the genome already exists in the database with the same version return render_to_json({"status":"Failed - Genome with this short name and index version already exist.","error":True}) rg = models.ReferenceGenome() rg.name = name rg.short_name = short_name rg.version = version rg.date = datetime.datetime.now() rg.notes = notes rg.status = "queued" rg.enabled = False rg.index_version = settings.TMAP_VERSION #before the object is saved we should ping the xml-rpc server to see if it is alive. try: host = "127.0.0.1" conn = client.connect(host,settings.JOBSERVER_PORT) #just check uptime to make sure the call does not fail conn.uptime() except (socket.error,xmlrpclib.Fault): return render_to_json( {"status":"Unable to connect to ionJobserver process. You may need to restart ionJobserver","error":True} ) #if the above didn't fail then we can save the object #this object must be saved before the tmap call is made rg.save() #kick off the anaserve tmap xmlrpc call import traceback try: host = "127.0.0.1" conn = client.connect(host,settings.JOBSERVER_PORT) tmap_bool, tmap_status = conn.tmap(str(rg.id), fasta, short_name, name, version, read_sample_size, read_exclude_length, settings.TMAP_VERSION) except (socket.error,xmlrpclib.Fault): #delete the genome object, because it was not sucessful rg.delete() return render_to_json( {"status":"Error with index creation", "error" : traceback.format_exc() } ) if not tmap_bool: rg.delete() return render_to_json( {"status":tmap_status,"error":True} ) return render_to_json({"status":"The genome index is being created. This might take a while, check the status on the references tab. You are being redirected there now.","error":False}) elif request.method=="GET": ctxd = {} ctx = template.RequestContext(request, ctxd) #when we get a POST that data should be validated and go to the xmlrpc process return shortcuts.render_to_response("rundb/ion_new_genome.html", context_instance=ctx)