def test_consistency(self): # Test the upload and download consistency: print "Test consistency of upload and download." tempfilepath = self.setting.mirrordir + "/" + tempfilename # Copy infile to tempfile in mirror dir: shutil.copyfile(infilename, tempfilepath) filesize = os.path.getsize(tempfilepath) # Upload tempfile: metadata = common.FileMetadata(tempfilename, filesize, self.setting.totalnode, self.setting.coding) workflow.uploadFile(self.setting, metadata) print "Upload finishes." # Clean temporary directories: clean.cleanAll() print "Clean finishes." # Download tempfile: metadata = common.FileMetadata(tempfilename, 0, self.setting.totalnode, self.setting.coding) workflow.downloadFile(self.setting, metadata) print "Download finishes." # Copy tempfile to outfile: shutil.copyfile(tempfilepath, outfilename) # Clean data in cloud and temporary directories: metadata = common.FileMetadata(tempfilename, 0, self.setting.totalnode, self.setting.coding) workflow.deleteFile(self.setting, metadata) clean.cleanAll() # Check if infile is same as outfile: print "test file difference" self.assertEqual(filecmp.cmp(infilename, outfilename), 1) # Delete outfile: os.unlink(outfilename)
def test_consistency(self): #Test the upload and download consistency: print "Test consistency of upload and download." tempfilepath = self.setting.mirrordir + "/" + tempfilename #Copy infile to tempfile in mirror dir: shutil.copyfile(infilename, tempfilepath) filesize = os.path.getsize(tempfilepath) #Upload tempfile: metadata = common.FileMetadata(tempfilename, filesize, self.setting.totalnode, self.setting.coding) workflow.uploadFile(self.setting, metadata) print "Upload finishes." #Clean temporary directories: clean.cleanAll() print "Clean finishes." #Download tempfile: metadata = common.FileMetadata(tempfilename, 0, self.setting.totalnode, self.setting.coding) workflow.downloadFile(self.setting, metadata) print "Download finishes." #Copy tempfile to outfile: shutil.copyfile(tempfilepath, outfilename) #Clean data in cloud and temporary directories: metadata = common.FileMetadata(tempfilename, 0, self.setting.totalnode, self.setting.coding) workflow.deleteFile(self.setting, metadata) clean.cleanAll() #Check if infile is same as outfile: print "test file difference" self.assertEqual(filecmp.cmp(infilename, outfilename), 1) #Delete outfile: os.unlink(outfilename)
def __init__(self, path, flags, *mode): '''File open. Download file from cloud for read mode.''' self.file = os.fdopen(os.open("." + path, flags, *mode), flag2mode(flags)) self.fd = self.file.fileno() self.path = path print "open file ",path #Set direct_io and keep_cache options as required by fuse: self.direct_io = True self.keep_cache = False #Construct file metadata: filename = path[path.index("/")+1:] filesize = os.path.getsize("." + path) self.metadata = common.FileMetadata(filename,filesize,setting.totalnode,setting.coding) if ("r" in self.file.mode) or ("+" in self.file.mode): #Download file from clouds to mirror dir: workflow.downloadFile(setting, self.metadata)
def __init__(self, path, flags, *mode): '''File open. Download file from cloud for read mode.''' self.file = os.fdopen(os.open("." + path, flags, *mode), flag2mode(flags)) self.fd = self.file.fileno() self.path = path print "open file ", path #Set direct_io and keep_cache options as required by fuse: self.direct_io = True self.keep_cache = False #Construct file metadata: filename = path[path.index("/") + 1:] filesize = os.path.getsize("." + path) self.metadata = common.FileMetadata(filename, filesize, setting.totalnode, setting.coding) if ("r" in self.file.mode) or ("+" in self.file.mode): #Download file from clouds to mirror dir: workflow.downloadFile(setting, self.metadata)