def sync_s3( remote_dir , local_dir ): for x in bucket.listdir( remote_dir ): out = local_dir + "/" + x[0] if os.path.exists( out ) and x[2].find( utils.md5sum( out ) ) >= 0: continue dir = out.rpartition( "/" )[0] if not os.path.exists( dir ): os.makedirs( dir ) thing = bucket.get( x[0] ) open( out , "wb" ).write( thing.read() )
def sync_s3(remote_dir, local_dir): for x in bucket.listdir(remote_dir): out = local_dir + "/" + x[0] if os.path.exists(out) and x[2].find(utils.md5sum(out)) >= 0: continue dir = out.rpartition("/")[0] if not os.path.exists(dir): os.makedirs(dir) thing = bucket.get(x[0]) open(out, "wb").write(thing.read())
def make_long_name(local,hash): pcs = local.rpartition( "." ) h = _get_status() if hash: h = utils.md5sum( local ) return "%s/%s-%s.%s" % ( root , pcs[0] , h , pcs[2] )
def make_long_name(local, hash): pcs = local.rpartition(".") h = _get_status() if hash: h = utils.md5sum(local) return "%s/%s-%s.%s" % (root, pcs[0], h, pcs[2])