def onUpload(self, ext, src='',folder='', dst=''):

		lpw = LukePathWalker()
		
		print "fetching files..."
		files = []
		for file in lpw.getDirectoryContent(src+folder):
			if lpw.checkExtention(ext, file):
				files.append(file)
		print len(files)," files fetched. Uploading..."

		n = 0 # number of iteration completed  for printing pourposes
		m = 0 # number of files skipped

		for fname in files:

			srcpath = fname
			# allows for custom path in the destination folder to be placed before the path
			dstpath = os.path.join(dst,fname[len(src):])
			# compute the size of the file
			fsize = os.path.getsize(srcpath)
			# create a key to use as destination
                        k = boto.s3.key.Key(self.bucket)
                        # it will be set as the destination path
                        k.key = dstpath

			if not self.keyExists(k):

				# selection between single-part upload and multipart (in case is too big)
				if fsize > self.MAX_SIZE:
					mp = self.bucket.initiate_multipart_upload(dstpath)
					fp = open(srcpath,'rb')
					fpnum = 0

					while (fp.tell() < fsize):
						fpnum += 1
						# function to upload a file in chunks
						mp.upload_part_from_file(fp, fpnum, size=self.PART_SIZE)

					mp.complete_upload() # function to notify the completion of the multipart upload 

				else:	
					# uploads a file in its entirety
					k.set_contents_from_filename(srcpath)
			else:
				m += 1

			printProgress(n, len(files),"uploading on: "+self.bname, pathLeaf(fname))
			n += 1
		
		return [n-m,m,n]
					z += 1
					printProgress(x*(len(ind)*len(diles))+(y*len(diles)+z), iter_len, basename, fname)
					
				y += 1
			x += 1

			return iter_len

			


if __name__ == '__main__':
	
	timer = Chrono()
	ncg   = NetcdfGeometry()
	lpw   = LukePathWalker()

	mydir = "/sdiles/ubuntu/sdiles"
	myext = ["nc"] 

	paths = []
	for file in lpw.getDirectoryContent(mydir):
		if lpw.checkExtention(myext, file):
			paths.append(file)


	for i in range(50,len(paths)):
		
		
		timer.start()
		md5 = getMD5(paths[i])