def download(self, bucket_name, file_name, d_file_name): try: self.s3_client.download_file(bucket_name, file_name, d_file_name, Config=config) except ClientError as e: if e.response['Error']['Code'] == "404": print "download_file_%s_Fail." % file_name logger.error("download_file_%s_Fail." % file_name) fp.append_report(report_file, "[%s] - download_file_%s_Fail\n" % (time.strftime("%Y-%m-%d %H:%M:%S"), file_name)) else: raise
def upload_dir_report(report_file, bucket, dir_name, file_number, datanode_number, restartyts3, restartytsn): check = False if fp.check_bucket(bucket): logger.info("call s3cmd to upload dir %s to bucket %s." % (dir_name, bucket)) time_start = time.time() ret, output = commands.getstatusoutput( "%s --no-ssl put --recursive %s s3://%s" % (s3cmd_home, dir_name, bucket)) if ret != 0: logger.error("upload dir %s error!", dir_name) return time_end = time.time() total_time = time_end - time_start logger.info("upload dir %s totally cost %f" % (dir_name, total_time)) time.sleep(2) if int(datanode_number) > 0: fp.close_partial_datanode(datanode_number) if restartyts3 == True: print "restart yts3" fp.restart_yts3() if restartytsn == True: print "restart ytsn" fp.restart_ytsn() if file_number <= 10: wait_time = 600 elif file_number > 10: wait_time = 1200 time_start = time.time() check = check_dir_upload(bucket, dir_name, file_number) while not check: time_end = time.time() if time_end - time_start > wait_time: break check = check_dir_upload(bucket, dir_name, file_number) if check == True: fp.append_report( report_file, "[%s] - upload_dir_%s_file_number=%d_Pass\n" % (time.strftime("%Y-%m-%d %H:%M:%S"), dir_name, file_number)) else: fp.append_report( report_file, "[%s] - upload_dir_%s_file_number=%d_Fail\n" % (time.strftime("%Y-%m-%d %H:%M:%S"), dir_name, file_number))
def download_dir_file_report(report_file, bucket, dir_name, file_names, size): for file_name in file_names: upload_file_size = os.path.getsize(dir_name + "/" + file_name) download_file_name = file_name + ".down" find = check_dir_file_upload(bucket, dir_name, file_name) if not find: logger.error("the file %s is not exist in bucket %s" % (file_name, bucket)) fp.append_report( report_file, "[%s] - download_dir_%s_file_%s_Fail.\n" % (time.strftime("%Y-%m-%d %H:%M:%S"), dir_name, file_name)) return else: ret, output = commands.getstatusoutput( "%s --no-ssl get s3://%s/%s/%s %s" % (s3cmd_home, bucket, dir_name, file_name, download_file_name)) if ret != 0: fp.append_report( report_file, "[%s] - download_dir_%s_file_%s_Fail.\n" % (time.strftime("%Y-%m-%d %H:%M:%S"), dir_name, file_name)) logger.error("download_dir_%s_file_%s_fail.\n" % (dir_name, file_name)) return if not os.path.exists(download_file_name): logger.info("the download file %s is not exist.") append_report( report_file, "[%s] - download_file_%s_size=%d_is not exist.\n" % (time.strftime("%Y-%m-%d %H:%M:%S"), file_name, size)) return download_file_size = os.path.getsize(download_file_name) if upload_file_size == download_file_size: fp.append_report( report_file, "[%s] - download_dir_%s_file_%s_size=%d_Pass.\n" % (time.strftime("%Y-%m-%d %H:%M:%S"), dir_name, file_name, size)) logger.info("download dir %s file %s successfully." % (dir_name, file_name)) else: fp.append_report( report_file, "[%s] - download_dir_%s_file_%s_size=%d_Fail.\n" % (time.strftime("%Y-%m-%d %H:%M:%S"), dir_name, file_name, size)) return ret, output = commands.getstatusoutput( "diff %s/%s %s" % (dir_name, file_name, download_file_name)) if ret != 0: logger.error("files %s and %s are not same." % (file_name, download_file_name)) fp.append_report( report_file, "[%s] - compare download_dir_%s_file_%s_size=%d and %s_Fail.\n" % (time.strftime("%Y-%m-%d %H:%M:%S"), dir_name, file_name, size, download_file_name)) return else: logger.info("files %s and %s.down are same." % (file_name, file_name)) fp.append_report( report_file, "[%s] - compare download_dir_%s_file_%s_size=%d and %s_Pass.\n" % (time.strftime("%Y-%m-%d %H:%M:%S"), dir_name, file_name, size, download_file_name)) ret, output = commands.getstatusoutput("rm -f %s" % (download_file_name)) if ret == 0: logger.info("remove file %s successfully." % (download_file_name)) ret, output = commands.getstatusoutput("rm -rf %s" % (dir_name)) if ret == 0: logger.info("remove dir %s successfully." % (dir_name))
print "bucket_name: %s" % bucket_name print "file_size: %s" % file_size print "file_number: %s" % file_number print "transfer_way: %s" % transfer_way uuid_str = uuid.uuid4().hex[0:6] tmp_file_name = 'tmpfile_%s_%s' % (bucket_name, uuid_str) file_name = tmp_file_name threads = [] logger.info("Begin to run test file number=%d size=%d upload." % (int(file_number), int(file_size))) fp.append_report( report_file, "\n\n[%s] - begin to run test file number=%d file size=%d upload.\n" % (time.strftime("%Y-%m-%d %H:%M:%S"), int(file_number), int(file_size))) for i in range(int(file_number)): t = threading.Thread(target=fp.c_only_u, args=(bucket_name, file_name + ".txt%s" % i, int(file_size), transfer_way, access_key, secret_key)) t.start() threads.append(t) for th in threads: th.join()
list_config = dp.get_config() for config in list_config: if "config_dir" in config["configfilename"]: bucket_name = config["bucket_name"] file_size = config["file_size"] file_number = config["file_number"] print "bucket_name: %s" % bucket_name print "file_size: %s" % file_size print "file_number: %s" % file_number uuid_str = uuid.uuid4().hex[0:6] tmp_file_name = 'tmpfile_%s_%s' % (bucket_name, uuid_str) dir_name = tmp_file_name print "dir_name: %s" % dir_name logger.info("Begin to run test dir %s upload and download." % dir_name) fp.append_report( report_file, "\n\n[%s] - Begin to run test dir %s upload and download.\n" % (time.strftime("%Y-%m-%d %H:%M:%S"), dir_name)) file_name = [] for i in range(int(file_number)): file_name.append(dir_name + ".txt%s" % i) print "file_name: %s" % file_name dp.p_u_d(dir_name, int(file_size), bucket_name, int(file_number), file_name, datanode_number, False, False)