def addFile(request, dataset_id): import shutil from tardis.apps.mrtardis.utils import add_staged_file_to_dataset if "file_id" not in request.POST: return HttpResponseNotFound() file_id = request.POST["file_id"] file = Dataset_File.objects.get(pk=file_id) shutil.copy(file.get_absolute_filepath(), get_full_staging_path(request.user.username)) add_staged_file_to_dataset(file.filename, dataset_id, request.user.username, file.mimetype) return parseMTZfile(request, dataset_id=dataset_id)
def makeJobScripts(self, request): """ create PBS/OGE job submission files, one for each rmsd, pdb file and spacegroup """ time = "12:0:0" pbs_prefix = "#$ " pbs_head = "#!/bin/sh\n" pbs_head += "%s-m abe\n" % pbs_prefix pbs_head += "%s-S /bin/bash\n" % pbs_prefix pbs_head += "%s-cwd\n" % pbs_prefix pbs_head += "%s-l h_rt=%s\n" % (pbs_prefix, time) pbs_commands = "\n. /etc/profile\n" pbs_commands += "module load phenix\n" pbs_commands += ". $PHENIX/build/$PHENIX_MTYPE/setpaths.sh\n" pingurl = request.build_absolute_uri( reverse('tardis.apps.mrtardis.views.jobfinished', args=[self.dataset.id])) wget_command = "wget -O - %s?jobid=$JOB_ID" % pingurl ## ping server ten times with more and more delay pbs_footer = "touch jobid-$JOB_ID.finished\n" pbs_footer += "I=0; while [[ \"true\" != `%s`" % wget_command pbs_footer += "&& $I -lt 10 ]];" pbs_footer += "do echo yes; sleep $(($I*2)); I=$(($I+1)); done" #pbs_footer = wget_command phaser_command = "phenix.phaser" spacegroups = [utils.sgNumNameTrans(number=sgnum) for sgnum in self.get_params("space_group", value=True)] if self.get_param("sg_all", value=True) == "True": spacegroups.append("ALL") rmsds = self.get_params("rmsd", value=True) for pdbfile in self.get_params("PDBfile", value=True): for sg in spacegroups: for rmsd in rmsds: parameters = self.getPhaserCommands(sg, rmsd, pdbfile) output = pbs_head + pbs_commands output += "echo -e \"" + parameters + "\"|" +\ phaser_command + " \n" output += pbs_footer jobfilename = pdbfile + "_" + sg + "_" + \ str(rmsd) + ".jobfile" ofile = open(os.path.join( get_full_staging_path(request.user.username), jobfilename), 'w') ofile.write(output) ofile.close() utils.add_staged_file_to_dataset( jobfilename, self.dataset.id, request.user.username, mimetype="application/x-shellscript") self.new_param("jobscript", jobfilename)
def retrieveFromHPC(self, location="msg"): if self.get_status(value=True) != "readyToRetrieve": return False hpc_username = self.get_param("hpc_username", value=True) user = HPCUser.objects.get(hpc_username=hpc_username) excludefiles = self.get_params("uploaded_file", value=True) hpclink = self.connectToHPC(location, hpc_username) newfiles = hpclink.download(self.get_hpc_dir(), get_full_staging_path(user.user.username), excludefiles=excludefiles) for newfile in newfiles: add_staged_file_to_dataset(newfile, self.dataset.id, user.user.username) hpclink.rmtree(self.get_hpc_dir()) self.set_status("finished") return True
def extractPDBzips(self, username): """ Extracts pdb files out of zips, adds them to the dataset and removes the zip. """ zipquery = Dataset_File.objects.filter(dataset=self.dataset, filename__iendswith=".zip") for zipfileobj in zipquery: zippath = zipfileobj.get_absolute_filepath() thiszip = zipfile.ZipFile(zippath, 'r') extractlist = [] for filename in thiszip.namelist(): if filename.endswith((".pdb", ".PDB")) and \ not filename.startswith("__MACOSX"): extractlist.append(filename) thiszip.extractall(get_full_staging_path(username), extractlist) thiszip.close() for pdbfile in extractlist: #print pdbfile utils.add_staged_file_to_dataset( pdbfile, self.dataset.id, username, mimetype="chemical/x-pdb") zipfileobj.deleteCompletely()