def write_thread(conn, dest_host, src_file, keyname): global submit_host global time_end global logger global bucket_name #print 'submit host - %s , ending time - %d' %(submit_host,time_end) ret_code = True # get bucket try: bucket = conn.get_bucket(bucket_name) #upload try: key = Key(bucket) key.key = '%s_%s' % (hashlib.md5(keyname).hexdigest()[0:15], keyname) key.md5 = "ea7a25c839be547c6bd964e015671453" key.set_metadata("md5", "ea7a25c839be547c6bd964e015671453") # time the write start = datetime.datetime.now() key.set_contents_from_filename(src_file) stop = datetime.datetime.now() # elapsed_time_string = stop - start elapsed_time = float(elapsed_time_string.seconds) + float( elapsed_time_string.microseconds) / 1000000. timestamp = start.strftime('[%Y-%m-%d %H:%M:%S.%f] ') logger.info( "Host - %s write to bucket %s elapsed time - %.3f sec at %s" % (dest_host, bucket_name, elapsed_time, timestamp)) except Exception as e: timestamp = start.strftime('[%Y-%m-%d %H:%M:%S.%f] ') logger.info("Host - %s start write to bucket %s at %s" % (dest_host, bucket_name, timestamp)) logger.info("Thread Exception (write object) %s" % (str(e))) ret_code = False except Exception as e: logger.info("Thread Exception (get_bucket) %s" % (str(e))) ret_code = False return ret_code
def test_upload_download(self, rse, attributes): t_start = time.time() access_key = attributes['access_key'] secret_key = attributes['secret_key'] is_secure = attributes['is_secure'] s3path = attributes['endpoint'] parsed = urlparse.urlparse(s3path) scheme = parsed.scheme hostname = parsed.netloc.partition(':')[0] port = int(parsed.netloc.partition(':') [2]) if parsed.netloc.partition(':')[2] != '' else 0 balance_hosts = self.list_hosts(hostname, port) for balance_host in balance_hosts: print "Working on %s: %s" % (rse, balance_host) try: conn = boto.connect_s3( aws_access_key_id=access_key, aws_secret_access_key=secret_key, host=balance_host, port=port, is_secure=is_secure, # uncommmnt if you are not using ssl calling_format=boto.s3.connection.OrdinaryCallingFormat(), ) bucket = conn.get_bucket("atlas-test-bucket-new") #upload key = Key(bucket) key.key = os.path.basename(self.__data_file) key.md5 = self.__md5sum key.set_metadata("md5", self.__md5sum) key.set_contents_from_filename(self.__data_file) # print key.key # print key.size # print "key.md5 " + key.md5 # print "key.etag " + key.etag.strip('"') #download key = Key(bucket) key.key = os.path.basename(self.__data_file) key.get_contents_to_filename( "/tmp/%s" % os.path.basename(self.__data_file)) # print key.size # print "key.md5 " + key.md5 # print "key.etag " + key.etag.strip('"') # print "key.get_metadata " + str(key.get_metadata("md5")) record_timer( "objectstore.upload_download.time.success.%s.%s" % (rse, balance_host.split(".")[-1]), (time.time() - t_start)) record_timer( "objectstore.upload_download.time.success.%s" % (rse), (time.time() - t_start)) record_counter("objectstore.upload_download.%s.%s.success" % (rse, balance_host.split(".")[-1])) record_counter("objectstore.upload_download.%s.success" % (rse)) except: print traceback.format_exc() record_counter("objectstore.upload_download.%s.%s.failure" % (rse, balance_host.split(".")[-1])) record_counter("objectstore.upload_download.%s.failure" % (rse)) record_timer( "objectstore.upload_download.time.failure.%s.%s" % (rse, balance_host.split(".")[-1]), (time.time() - t_start)) record_timer( "objectstore.upload_download.time.failure.%s" % (rse), (time.time() - t_start))