def upload_files(bucketname, srcdir): print bucketname, srcdir conn = S3Connection() bucket = conn.get_bucket(bucketname) for path, dir, files in os.walk(srcdir): for file in files: filekey = os.path.relpath(os.path.join(path, file), srcdir).replace('\\', '/') filepath = os.path.normpath(os.path.join(path, file)) #print "filekey: ", filekey #print "filepath: ", filepath key = bucket.lookup(filekey) if key: fingerprint = key.etag.replace('"', '') else: fingerprint = None key = Key(bucket, filekey) fp = str(key.compute_md5(open(filepath, "rb"))[0]) fs = os.path.getsize(filepath) if fingerprint != fp: print "Uploading file %s (%d bytes, %s MD5) .." % (filekey, fs, fp) key.set_contents_from_filename(filepath, cb = percent_cb, num_cb = 100) key.set_acl('public-read') else: print "File %s already on S3 and unchanged." % filekey
def upload_files(bucketname, srcdir): print bucketname, srcdir conn = S3Connection() bucket = conn.get_bucket(bucketname) for path, dir, files in os.walk(srcdir): for file in files: filekey = os.path.relpath(os.path.join(path, file), srcdir).replace('\\', '/') filepath = os.path.normpath(os.path.join(path, file)) #print "filekey: ", filekey #print "filepath: ", filepath key = bucket.lookup(filekey) if key: fingerprint = key.etag.replace('"', '') else: fingerprint = None key = Key(bucket, filekey) fp = str(key.compute_md5(open(filepath, "rb"))[0]) fs = os.path.getsize(filepath) if fingerprint != fp: print "Uploading file %s (%d bytes, %s MD5) .." % (filekey, fs, fp) key.set_contents_from_filename(filepath, cb=percent_cb, num_cb=100) key.set_acl('public-read') else: print "File %s already on S3 and unchanged." % filekey
def _upload(self,srcFile,bucket,key,view_bar=True,is_replace=True): cb = None if view_bar: cb = self._view_bar if not is_replace: local_file_size = os.path.getsize(srcFile) local_modify_time = os.path.getmtime(srcFile) res = self._head(bucket,key) if res and local_file_size == res.size: format = "%a, %d %b %Y %H:%M:%S GMT" s3_last_modify_time = format_unixtime(res.last_modified,format) if s3_last_modify_time >= local_modify_time: return (200,False,{"etag":res.etag}) fp = open(srcFile,'rb') from boto.s3.key import Key try: b = self.conn.get_bucket(bucket) k = Key(b) md5 = k.compute_md5(fp) k.key = key k.etag = md5[0] res = k.set_contents_from_file(fp,cb=cb) if res is not None: return (200,True,{"etag":k.etag}) except boto.exception.S3ResponseError as e: return (e.status,True,{"reason":e.reason})
def checksumsMD5(target, source, env): """ SCons builder for computing a MD5 fingerprint file for artifacts. """ checksums = {} for s in source: key = Key(s.name) md5 = key.compute_md5(open(s.path, "rb"))[0] checksums[s.name] = md5 ## MD5 (autobahn.js) = 9f26d4774ce6ebafd32e75b68b22a526 ## checksumsString = ''.join(["MD5 (%s) = %s\n" % c for c in checksums.items()]) f = open(target[0].path, 'wb') f.write(checksumsString.encode('utf8')) f.close()
def md5_cache_file(self): """ Compute the MD5 using BOTO """ key = Key() if self.obj['cache_file']: fname = self.obj['cache_file'] f_open = open( fname ) self.obj['md5'], self.obj['md5_encoded'] = key.compute_md5( f_open ) f_open.close() else: raise Exception("VALUE_NOT_SET", "cache_file has not been set") key.close() del key
def synth(self,fileName,upload=False): self.toFile('temp.mid') try:os.mkdir('tracks') except:pass print "\tGenerating MP3..." try: p=subprocess.Popen('fluidsynth/fluidsynth.exe -nli -r 44100 -g 1 -o synth.cpu-cores=2 -F temp.raw fluidsynth\Scc1t2.sf2 temp.mid'.split(),stdout=subprocess.PIPE,stderr=subprocess.PIPE) out,err=p.communicate() if p.wait()!=0: print err return False except KeyboardInterrupt: print "Interrupted" os.remove('temp.raw') return False os.remove('temp.mid') try: p=subprocess.Popen(['lame/lame.exe','-S','temp.raw','tracks/%s'%fileName], stdout=subprocess.PIPE, stderr=subprocess.PIPE) out,err=p.communicate() if p.wait()!=0: print err return False except KeyboardInterrupt: print "Interrupted" os.remove('tracks/%s'%fileName) return False os.remove('temp.raw') if upload: def cb(cur,total): progress = int((float(cur)/total*40)) print '\r\t|'+'='*progress+'-'*(40-progress)+'| %d%%'%(progress*2.5), print "\tUploading file to S3...", if not self.population.amusic.s3bucket: self.population.amusic.inits3() k = Key(self.population.amusic.s3bucket) k.key = 'tracks/%s' % fileName if k.exists(): k.open_read() if k.etag and k.compute_md5(open('tracks/'+fileName,'rb'))[0] == k.etag[1:-1]: print "up to date" else: print k.set_contents_from_filename('tracks/'+fileName,cb=cb,num_cb=40,replace=True) print '\n' k.set_acl('public-read')
def upload(credentials, bucket, key, file_path): bucket = Bucket( connection=S3Connection(*credentials), name=bucket ) s3_key = bucket.get_key(key) if s3_key: old_hash = s3_key.etag.strip('"') else: s3_key = Key(bucket) s3_key.key = key old_hash = None with open(file_path) as fh: new_hash, _ = s3_key.compute_md5(fh) if new_hash == old_hash: logger.info("File {key} unchanged", key=key) else: s3_key.set_contents_from_file(fh) logger.success('Uploaded: {key}', key=key)
def uploadFileToAmazonS3(inputFullFilename, outputBucketName, outputSubfolderName, outputFilename, AccessID, SecretKey): from boto.s3.key import Key c = boto.connect_s3(AccessID, SecretKey) s3bucket = c.create_bucket(outputBucketName) s3bucket.set_acl('public-read') def cb(cur,total): progress = int((float(cur)/total*40)) print '\r\t|'+'='*progress+'-'*(40-progress)+'| %d%%'%(progress*2.5), print "uploading file %s to S3..." % inputFullFilename, k = Key(s3bucket) k.key = outputSubfolderName + "/" + outputFilename if k.exists(): k.open_read() if k.etag and k.compute_md5(open(inputFullFilename, 'rb'))[0] == k.etag[1:-1]: print "up to date" else: print #todo: maybe skip this if the file is known to be up to date k.set_contents_from_filename(inputFullFilename, cb=cb, num_cb=5, replace=True) print '\n' k.set_acl('public-read')
from boto.s3.connection import S3Connection from boto.s3.key import Key conn = S3Connection() bucket = conn.get_bucket(BUCKET) def percent_cb(complete, total): sys.stdout.write("%d %%\n" % round(100. * float(complete) / float(total))) sys.stdout.flush() for f in FILES: filename = os.path.basename(f) key = bucket.lookup(filename) if key: fingerprint = key.etag.replace('"', '') else: fingerprint = None key = Key(bucket, filename) fp = str(key.compute_md5(open(f, "rb"))[0]) fs = os.path.getsize(f) if fingerprint != fp: print "Uploading file %s (%d bytes, %s MD5) .." % (f, fs, fp) key.set_contents_from_filename(f, cb = percent_cb, num_cb = 100) key.set_acl('public-read') else: print "File %s already on S3 and unchanged." % f
bucket_name = argv[1] key_string = argv[2] b = conn.get_bucket(bucket_name) k = Key(b) k.key = key_string def progress(transferred, total): pct = float(transferred) / total tt = time() - t0 rate = transferred / tt for p in ['B', 'KB', 'MB', 'GB', 'TB']: if rate / 1024 > 1: rate = rate / 1024 else: break print "{percentage:.0%} done. {rate:.2f} {unit}/sec".format(percentage=pct, rate=rate, unit=p) file_name = argv[3] with open(file_name, 'r') as f: k.compute_md5(f) #sum = 'ae5a572dd92448821a3cfb22b03dd323' #sum2ple = k.get_md5_from_hexdigest(sum) t0 = time() k.set_contents_from_filename(file_name, cb=progress, num_cb=10)
def upload_file(filepath, bucket, prefix=None, reduced_redundancy=False): global pbar logger = logging.getLogger(__name__) if not isinstance(filepath, Path): filepath = Path(filepath) # it seems that s3 can only cope with utf-8 (but os.stat only with locale) c1, c2 = locale.getdefaultlocale() #filename_u = str(filepath.name).encode('utf-8') #filename_l = str(filepath.name).encode(c2) key = Key(bucket) if prefix: key.key = '%s/%s' % (prefix, filepath.name.replace(' ', '_')) else: key.key = '%s' % (filepath.name.replace(' ', '_')) if key.exists(): # if key already exists, compute md5 sum to test if necesary to upload if filepath.stat().st_size == key.size: f = filepath.open('rb') # rb very important md5 = key.compute_md5(f) f.close() if '"%s"' % md5[0] == key.etag: logger.info('file %s is already online' % key.key) return key size = filepath.stat().st_size if size == 0: logger.info('Bad filesize for "%s"', filepath) return 0 widgets = [ #unicode(filename, errors='ignore').encode('utf-8'), ' ', #filepath.name.encode('utf-8'), ' ', str(filepath), ' ', progress_bar.FileTransferSpeed(), ' <<<', progress_bar.Bar(), '>>> ', progress_bar.Percentage(), ' ', progress_bar.ETA() ] pbar = progress_bar.ProgressBar(widgets=widgets, maxval=size) pbar.start() try: key.set_contents_from_filename( #str(filepath).encode(c2), str(filepath), cb=progress_callback, num_cb=20, reduced_redundancy=reduced_redundancy, ) key.set_acl('public-read') except IOError as e: logger.error('Failed to open file', exc_info=True) return 0 pbar.finish() return key
def synth(self, filename=None, upload=False, generateMP3=False, tempFilename="temp"): if filename == None: filename = self.filename() self.toFile(tempFilename + '.mid') print "\tGenerating..." if os.name == 'nt': fileFolder = os.path.join("..", "..", "..", "..", "amusic_files") else: fileFolder = os.path.join("..", "..", "..", "..", "amusic_files") staticFolder = os.path.join("..", "amusicsite", "polls", "static") fullFilename = os.path.join(staticFolder, 'tracks', filename) try:os.mkdir(os.path.join(staticFolder, 'tracks')) except:pass if not(os.path.isdir(fileFolder)): raise Exception("folder does not exist " + fileFolder) #soundFontFile = os.path.join("fluidsynth", "Scc1t2.sf2") soundFontFile = os.path.join(fileFolder, "WeedsGM3.sf2") #soundFontFile = r"I:\downloads\Saber_5ths_and_3rds\Saber_5ths_and_3rds.sf2" #I:\downloads\BassFing if not(os.path.isfile(soundFontFile)): raise Exception("file does not exist " + soundFontFile) try: if os.name == 'nt': print "using fluidsynth command for Windows" #p=subprocess.Popen('fluidsynth/fluidsynth.exe -nli -r 44100 -g 1 -o synth.cpu-cores=2 -F temp.raw fluidsynth\Scc1t2.sf2 temp.mid'.split(),stdout=subprocess.PIPE,stderr=subprocess.PIPE) commandLine = ('fluidsynth/fluidsynth.exe -nli -r 44100 -g 1 -o synth.cpu-cores=2 -F ' + tempFilename + '.wav ' + soundFontFile + ' ' + tempFilename + '.mid') print commandLine p=subprocess.Popen(commandLine.split(),stdout=subprocess.PIPE,stderr=subprocess.PIPE) else: print "using fluidsynth command for Linux" #p=subprocess.Popen('fluidsynth -nli -r 44100 -g 1 -o synth.cpu-cores=2 -F temp.raw fluidsynth/Scc1t2.sf2 temp.mid'.split(),stdout=subprocess.PIPE,stderr=subprocess.PIPE) commandLine = ('fluidsynth -nli -r 44100 -g 1 -o synth.cpu-cores=2 -F ' + tempFilename + '.wav ' + soundFontFile + ' ' + tempFilename + '.mid') print commandLine p=subprocess.Popen(commandLine.split(),stdout=subprocess.PIPE,stderr=subprocess.PIPE) print "running fluidsynth" out,err=p.communicate() if p.wait()!=0: print err return False print "finished fluidsynth" except KeyboardInterrupt: print "Interrupted" #os.remove('temp.raw') return False #os.remove('temp.mid') try: print os.getcwd() if generateMP3: # mp3 file print "generating:", fullFilename if os.name == 'nt': print "using lame (mp3 codec) command for Windows" #p=subprocess.Popen(['lame/lame.exe','-S','temp.raw','tracks/%s'%filename], stdout=subprocess.PIPE, stderr=subprocess.PIPE) p=subprocess.Popen(['lame/lame.exe','-S',tempFilename + '.wav',fullFilename], stdout=subprocess.PIPE, stderr=subprocess.PIPE) else: print "using lame (mp3 codec) command for Linux" #p=subprocess.Popen(['lame','-S','temp.raw','tracks/%s'%filename], stdout=subprocess.PIPE, stderr=subprocess.PIPE) p=subprocess.Popen(['lame','-S',tempFilename + '.wav',fullFilename], stdout=subprocess.PIPE, stderr=subprocess.PIPE) out,err=p.communicate() if p.wait()!=0: print err return False else: # wave file print fullFilename + ".wav" shutil.copyfile(tempFilename + ".wav", fullFilename + ".wav") except KeyboardInterrupt: print "Interrupted" os.remove(fullFilename) return False #os.remove('temp.raw') if upload: def cb(cur,total): progress = int((float(cur)/total*40)) print '\r\t|'+'='*progress+'-'*(40-progress)+'| %d%%'%(progress*2.5), print "\tUploading file %s to S3..." % filename, if not self.population.amusic.s3bucket: self.population.amusic.inits3() k = Key(self.population.amusic.s3bucket) k.key = 'tracks/%s' % filename if k.exists(): k.open_read() if k.etag and k.compute_md5(open('tracks/'+filename,'rb'))[0] == k.etag[1:-1]: print "up to date" else: print k.set_contents_from_filename('tracks/'+filename,cb=cb,num_cb=40,replace=True) print '\n' k.set_acl('public-read')
def s3_uploader(target, source, env): """ SCons builder for Amazon S3 upload. """ def s3_upload_percent_cb(complete, total): if total > 0: sys.stdout.write("%d %%\n" % round(100. * float(complete) / float(total))) sys.stdout.flush() ## the bucket, bucket prefix and object ACLs come from env ## s3_bucket_name = env['S3_BUCKET'] s3_object_acl = env.get('S3_OBJECT_ACL', 'public-read') s3_maxages = env.get('S3_MAXAGES', None) or {} s3_bucket_prefix = env.get('S3_BUCKET_PREFIX', '') s3_relpath = env.get('S3_RELPATH', None) def rpath(o): """ Convert scons source file object to remote S3 URL path. """ if s3_relpath: return (s3_bucket_prefix + os.path.relpath(o.path, s3_relpath)).replace('\\', '/') else: return (s3_bucket_prefix + o.name).replace('\\', '/') ## S3 connection and bucket to upload to ## s3 = None bucket = None try: s3 = S3Connection() bucket = s3.get_bucket(s3_bucket_name) except: # this an ugly, half-baked patch to make FFM region work # (as well as possibly other regions) # http://stackoverflow.com/a/29391782/884770 # https://github.com/boto/boto/issues/2916 # https://github.com/danilop/yas3fs/issues/101 # https://github.com/boto/boto/issues/2741 os.environ['S3_USE_SIGV4'] = 'True' s3 = S3Connection(host='s3.eu-central-1.amazonaws.com') bucket = s3.get_bucket(s3_bucket_name) ## compute MD5s of artifacts to upload ## checksums = {} for s in source: key = Key(s.path) md5 = key.compute_md5(open(s.path, "rb"))[0] checksums[s.path] = md5 ## determine stuff we need to upload ## uploads = [] for s in source: key = bucket.lookup(rpath(s)) if not key or key.etag.replace('"', '') != checksums[s.path]: uploads.append(s) else: print("{0} unchanged versus S3".format(rpath(s))) ## actually upload new or changed stuff ## for u in uploads: print("Uploading '{0}' to S3 at '{1}' ..".format(u.path, rpath(u))) key = Key(bucket, rpath(u)) file_ext = os.path.splitext(u.name)[1].lower() content_type, content_encoding = mimetypes.guess_type(u.name) ## Note that "set_metadata" must be set before uploading! ## if content_type: key.set_metadata('Content-Type', content_type) if content_encoding: key.set_metadata('Content-Encoding', content_encoding) elif file_ext in GZIP_ENCODING_FILE_EXTS: key.set_metadata('Content-Encoding', 'gzip') def set_maxage(key, days): ## HTTP 1.0 expires = '%s GMT' % (email.Utils.formatdate(time.mktime((datetime.now() + timedelta(days = days)).timetuple()))) key.set_metadata('Expires', expires) ## HTTP 1.1 max_age = 'max-age=%d, public' % (3600 * 24 * days) key.set_metadata('Cache-Control', max_age) if file_ext in s3_maxages: set_maxage(key, s3_maxages[file_ext]) if content_type in s3_maxages: set_maxage(key, s3_maxages[content_type]) key.set_contents_from_filename(u.path, cb = s3_upload_percent_cb, num_cb = 100) key.set_acl(s3_object_acl) ## revisit uploaded stuff and get MD5s ## checksumsS3 = {} for s in source: key = bucket.lookup(rpath(s)) md5 = key.etag.replace('"', '') checksumsS3[s.name] = md5 checksumsS3String = ''.join(["MD5 (%s) = %s\n" % c for c in checksumsS3.items()]) ## target produced is checksums as they exist on S3 ## f = open(target[0].path, "wb") f.write(checksumsS3String.encode('utf-8')) f.close()
def synth(self, filename=None, upload=False, generateMP3=False, tempFilename="temp"): if filename == None: filename = self.filename() self.toFile(tempFilename + '.mid') print "\tGenerating..." #if os.name == 'nt': # fileFolder = os.path.join("..", "..", "..", "..", "amusic_files", "sound_fonts") #else: # fileFolder = os.path.join("..", "..", "..", "..", "amusic_files", "sound_fonts") staticFolder = os.path.join("..", "amusicsite", "polls", "static") fullFilename = os.path.join(staticFolder, 'tracks', filename) try:os.mkdir(os.path.join(staticFolder, 'tracks')) except:pass if not(os.path.isdir(fontFolder)): raise Exception("font folder does not exist " + fontFolder) fontFilename = "WeedsGM3.sf2" #fontFilename = random.choice(os.listdir(fontFolder)) #fontFilename = "1115-Filter_Bass_1.SF2" #if " " in fontFilename: # newFilename = string.replace(fontFilename, " ", "_") # os.rename(os.path.join(fontFolder, fontFilename), os.path.join(fontFolder, newFilename)) # fontFilename = newFilename #soundFontFile = os.path.join("fluidsynth", "Scc1t2.sf2") soundFontFile = os.path.join(fontFolder, fontFilename) #soundFontFile = r"I:\downloads\Saber_5ths_and_3rds\Saber_5ths_and_3rds.sf2" #I:\downloads\BassFing if not(os.path.isfile(soundFontFile)): raise Exception("file does not exist " + soundFontFile) try: if os.name == 'nt': print os.getcwd() print "using fluidsynth command for Windows" #p=subprocess.Popen('fluidsynth/fluidsynth.exe -nli -r 44100 -g 1 -o synth.cpu-cores=2 -F temp.raw fluidsynth\Scc1t2.sf2 temp.mid'.split(),stdout=subprocess.PIPE,stderr=subprocess.PIPE) commandLine = (os.path.join("fluidsynth", "fluidsynth.exe") + ' -nli -r 44100 -g 1 -o synth.cpu-cores=2 -F ' + tempFilename + '.wav ' + soundFontFile + ' ' + tempFilename + '.mid') print commandLine #p=subprocess.Popen(commandLine.split(),stdout=subprocess.PIPE,stderr=subprocess.PIPE) else: print "using fluidsynth command for Linux" #p=subprocess.Popen('fluidsynth -nli -r 44100 -g 1 -o synth.cpu-cores=2 -F temp.raw fluidsynth/Scc1t2.sf2 temp.mid'.split(),stdout=subprocess.PIPE,stderr=subprocess.PIPE) commandLine = ('fluidsynth -nli -r 44100 -g 1 -o synth.cpu-cores=2 -F ' + tempFilename + '.wav ' + soundFontFile + ' ' + tempFilename + '.mid') print commandLine #p=subprocess.Popen(commandLine.split(),stdout=subprocess.PIPE,stderr=subprocess.PIPE) print "running fluidsynth" os.system(commandLine) #out,err=p.communicate() #if p.wait()!=0: # print err # return False print "finished fluidsynth" except KeyboardInterrupt: print "Interrupted" #os.remove('temp.raw') return False #os.remove('temp.mid') try: #print os.getcwd() if generateMP3: # mp3 file print "generating:" print os.path.join(os.getcwd(), fullFilename) if os.name == 'nt': print "using lame (mp3 codec) command for Windows" #p=subprocess.Popen(['lame/lame.exe','-S','temp.raw','tracks/%s'%filename], stdout=subprocess.PIPE, stderr=subprocess.PIPE) p=subprocess.Popen(['lame/lame.exe','-S',tempFilename + '.wav',fullFilename], stdout=subprocess.PIPE, stderr=subprocess.PIPE) else: print "using lame (mp3 codec) command for Linux" #p=subprocess.Popen(['lame','-S','temp.raw','tracks/%s'%filename], stdout=subprocess.PIPE, stderr=subprocess.PIPE) p=subprocess.Popen(['lame','-S',tempFilename + '.wav',fullFilename], stdout=subprocess.PIPE, stderr=subprocess.PIPE) out,err=p.communicate() if p.wait()!=0: print err return False else: # wave file print os.path.join(os.getcwd(), fullFilename + ".wav") shutil.copyfile(tempFilename + ".wav", fullFilename + ".wav") except KeyboardInterrupt: print "Interrupted" os.remove(fullFilename) return False #os.remove('temp.raw') if upload: def cb(cur,total): progress = int((float(cur)/total*40)) print '\r\t|'+'='*progress+'-'*(40-progress)+'| %d%%'%(progress*2.5), print "\tUploading file %s to S3..." % filename, if not self.population.amusic.s3bucket: self.population.amusic.inits3() k = Key(self.population.amusic.s3bucket) k.key = 'tracks/%s' % filename if k.exists(): k.open_read() if k.etag and k.compute_md5(open('tracks/'+filename,'rb'))[0] == k.etag[1:-1]: print "up to date" else: print k.set_contents_from_filename('tracks/'+filename,cb=cb,num_cb=40,replace=True) print '\n' k.set_acl('public-read')
# TODO: provide real AWS credentials aws_access_key = '' aws_secret_key = '' conn = S3Connection(aws_access_key, aws_secret_key) # TODO: provide the bucket name bucket = conn.create_bucket('bucket_name') # TODO: change to real file location on disk file_name = 'on_disk_file_location' file_size = os.stat(file_name).st_size file_pt = file(file_name) # TODO: change s3 Key name k = Key(bucket, 's3_key_name') md5sum = k.compute_md5(file_pt) print 'md5 sum:' print md5sum written_size = k.set_contents_from_filename(file_name, md5=md5sum) print 'actual size:' print file_size print 'written size:' print written_size print 'are they equal?' print (file_size == written_size)
localFileSize = os.path.getsize(localPath) if (localFileSize != s3FileSize): log( "local and S3 file sizes of "+localPath+" do not match." ) log( localPath+" "+ str(localFileSize)+" vs S3 copy at "+str(s3FileSize) ) if (overwriteOK) : log( "overwriting S3 copy" ) needsWrite = True else : log( "exiting with error" ) exit(-1) debug( "calculating local md5 hash..." ) (localHexMD5,localB64MD5) = testKey.compute_md5(fp) debug( "done" ) fp.close() if (s3HexMD5 == localHexMD5): debug( "existing S3 copy of file "+localPath+" verified with correct size and checksum, good" ) elif (overwriteOK) : log( "md5 sums for local and S3 copies of "+localPath+" did not match! Overwriting S3 copy." ) needsWrite = True else: log( "error: md5 sums for local and S3 copies of "+localPath+" did not match! Exiting without overwriting file" ) log( "exiting with error" ) exit(-1) else : # hadoop localFileSize = os.path.getsize(localPath) if (localFileSize == testKey.size):
# TODO: provide real AWS credentials aws_access_key = '' aws_secret_key = '' conn = S3Connection(aws_access_key, aws_secret_key) # TODO: provide the bucket name bucket = conn.create_bucket('bucket_name') # TODO: change to real file location on disk file_name = 'on_disk_file_location' file_size = os.stat(file_name).st_size file_pt = file(file_name) # TODO: change s3 Key name k = Key(bucket, 's3_key_name') md5sum = k.compute_md5(file_pt) print 'md5 sum:' print md5sum written_size = k.set_contents_from_filename(file_name, md5=md5sum) print 'actual size:' print file_size print 'written size:' print written_size print 'are they equal?' print(file_size == written_size)