def tc2_fio_sr_stress(): # pre_case, post_case, pre_loop and post_loop definition tc_logger.info("Defining pre_case, post_case, pre_loop and post_loop inside of test case") test_conf["ud_pre_case"] = ud_pre_case_string test_conf["ud_post_case"] = ud_post_case_string test_conf["ud_pre_loop"] = ud_pre_loop_string test_conf["ud_post_loop"] = ud_post_loop_string # duration configuration if "duration" in test_conf: duration = test_conf["duration"] else: duration = 300 # main function set_up(level='case') func = tc_run_fio_by_duration func(block_size='512k', duration=duration, file_size='10g', runtime='600', iodepth='32', rw='read') result = 0 # result validation statistic_file = os.path.join(test_conf["result_home"], "statistics.csv") statistics_column = "Read (MB/s)" benchmark_file = _get_basic_stress_bm_filename() benchmark_group = ["FIO", "basic", "SR"] checkpoint_prefix = "Sequential Read" result = stress_statistics_validation(statistic_file, statistics_column, benchmark_file, benchmark_group, checkpoint_prefix) | result return result
def fake_deco_by_loops_ffu(loops, func, *args, **kw): i = 1 loops = int(loops) while i <= loops: tc_logger.info("***** {} *****".format(i)) set_up(level='loop', loop=i) if test_conf["event_trace"] is True: command = "{} {} > {} &".format( test_conf["event_trace_home"] + "/" + "trace_enable.sh", test_conf["device_id"], test_conf["log_home"] + "/" + "event_trace_" + str(i) + ".log") tc_logger.info( "Enable event_trace with command: {}".format(command)) os.system(command) func(*args, **kw) tear_down(level='loop', loop=i) if test_conf["event_trace"] is True: command = "{} {}".format( test_conf["event_trace_home"] + "/" + "trace_disable.sh", test_conf["device_id"]) tc_logger.info( "Disable event_trace with command: {}".format(command)) os.system(command) if os.path.exists(test_conf["graceful_stop_point"]): tc_logger.info("File exists: " + test_conf["graceful_stop_point"]) tc_logger.info("Exit loop before duration threshold") break i = i + 1
def fake_deco_by_loops(loops, result_file, func, *args, **kw): i = 1 loops = int(loops) while i <= loops: tc_logger.info("***** {} *****".format(i)) set_up(level='loop', loop=i) if test_conf["event_trace"] is True: command = "{} {} > {} &".format( test_conf["event_trace_home"] + "/" + "trace_enable.sh", test_conf["device_id"], test_conf["log_home"] + "/" + "event_trace_" + str(i) + ".log") tc_logger.info( "Enable event_trace with command: {}".format(command)) os.system(command) result_header, result_value = func(*args, **kw) tear_down(level='loop', loop=i) if test_conf["event_trace"] is True: command = "{} {}".format( test_conf["event_trace_home"] + "/" + "trace_disable.sh", test_conf["device_id"]) tc_logger.info( "Disable event_trace with command: {}".format(command)) os.system(command) result_header_copy = copy.deepcopy(result_header) result_header_copy.insert(0, "Loop") result_value.insert(0, str(i)) write_csv_header(result_file, result_header_copy) write_csv_result(result_file, result_value) if os.path.exists(test_conf["graceful_stop_point"]): tc_logger.info("File exists: " + test_conf["graceful_stop_point"]) tc_logger.info("Exit loop before duration threshold") break i = i + 1
def keep_monitor(): """ Keep monitoring the test :return: None """ if test_conf['monitor'] is True: tc_logger.info('==>Start monitoring execution') # config monitoring log tc_logger.removeHandler(ch) lh = logging.FileHandler(test_conf['monitor_dir'] + '/log.txt') lh.setLevel(logging.DEBUG) formatter = logging.Formatter( '%(asctime)s - %(levelname)s - %(message)s') lh.setFormatter(formatter) tc_logger.addHandler(lh) # start monitoring pool = Pool(1) m_module = import_module('android.monitor') m_func = getattr(m_module, 'monitor') pool.apply_async(func=m_func, args=(test_conf['device_id'], test_conf['monitor_target'])) # revert monitoring log tc_logger.removeHandler(lh) tc_logger.addHandler(ch) tc_logger.info( 'Monitoring system is starting adbd as root, waiting for {} seconds' .format(str(test_conf.get('monitor_root_sleep', 30)))) time.sleep(test_conf.get('monitor_root_sleep', 30))
def read_file_by_iozone(block_size=None, file_size=None, threads=None, sequential=None): """ Read file by iozone, will use params from test_conf generally """ tc_logger.info('==>Start to read file by iozone') device_id = test_conf["device_id"] exe_file = test_conf['tool']['iozone'] exe_dir = os.path.dirname(exe_file) if block_size is None: block_size = test_conf.get('block_size', '4k').lower() if file_size is None: file_size = test_conf.get('file_size', '128m').lower() if threads is None: threads = test_conf.get('threads', '8') if sequential is None: sequential = test_conf.get('sequential', True) sequential = '1' if unify_bool_value(sequential) else '2' command = 'shell "cd {0};{1} -w -r {2} -s {3} -i {4} -I -t {5}"'.format( exe_dir, exe_file, block_size, file_size, sequential, str(threads)) adb = ADB(device_id) adb.execute_adb_command(command) tc_logger.info('==>Read file by iozone over')
def process_param(**kwargs): """ Process parameters both from command line and IDE :param kwargs: key value pair :return: None """ tc_logger.info('==>Start to process parameters') global test_conf if len(sys.argv) > 1: for arg in sys.argv[1:]: kv = arg.strip().split('=', 1) key = kv[0] value = kv[1] if ',' in value: value = value.lower().replace(' ', '').split(',') test_conf[key] = value else: for k, v in kwargs.items(): if ',' in str(v): v = v.lower().replace(' ', '').split(',') test_conf[k] = v # make switch input standard switch = [ 'statistics', 'chart', 'monitor', 'watchdog', 'event_trace', 'loop_health_report' ] for key in switch: if not isinstance(test_conf[key], bool): test_conf[key] = test_conf[key].lower() in ('1', 'true', 'yes', 't') if ("user_test_conf" in test_conf): tc_logger.info('==>Start to process user test conf yaml') with open(test_conf["user_test_conf"], 'r') as file: temp_conf = yaml.safe_load(file) for k, v in temp_conf.items(): test_conf[k] = v tc_logger.info('==>Process user test conf yaml over') k1 = "default_suite_id" k2 = "default_test_id" v1 = test_conf[k1] v2 = test_conf[k2] for k, v in test_conf.items(): if (type(v) == str): test_conf[k] = v.replace("@" + k1, v1).replace("@" + k2, v2) complete_config_path( test_conf['support_device'] + test_conf['support_tool'], test_conf) device = Device(test_conf['device_id']) device.root_device() if test_conf.get('device_type', None) is None: test_conf['device_type'] = device.get_host_manufacturer()[1][0] if test_conf.get('chip_manufacturer', None) is None: test_conf['chip_manufacturer'] = device.get_chip_manufacturer()[1][0] if test_conf.get('chip_capacity', None) is None: test_conf['chip_capacity'] = device.get_chip_capacity()[1][0] tc_logger.info('==>Process parameters over, valid parameters:') tc_logger.info(test_conf)
def convert_csv_file_to_json_file(source, target=None): if (target is None): if (source.endswith(".csv")): target = source[0: len(source) - 4] + ".json" else: target = source + ".json" data = read_csv_to_json(source) tc_logger.info("Dumping json to {}".format(target)) dump_append_dict_to_json_file(data, target)
def write_csv_result(file, string, new_line=True): if (isinstance(string, list)): string = [str(elem) for elem in string] string = test_conf["csv_delimiter"].join(string) if (not os.path.exists(file)): raise Exception("csv file doesn't exists: {}".format(file)) tc_logger.info("Writting: {}".format(string)) if (new_line == True): os.system("echo \"{}\" >> {}".format(string, file)) else: os.system("echo -n \"{}\" >> {}".format(string, file))
def _sleep(*args, **kwargs): """ Sleep some time, default is 10 seconds, time as a param after sleep action has top priority """ sleep_time = test_conf.get('loop_sleep', None) if sleep_time is None: sleep_time = 10 if args: sleep_time = int(args[0]) tc_logger.info('==>Sleeping {} seconds.'.format(sleep_time)) sleep(int(sleep_time))
def read_file_by_fio(block_size=None, file_size=None, rw=None, rwmixread=None, sub_jobs=None, runtime=None): """ Write file by fio, use params from test_conf generally """ tc_logger.info('==>Write file by fio over') device_id = test_conf["device_id"] exe_file = test_conf['tool']['fio'] iodepth = test_conf.get("iodepth", "32") if block_size is None: block_size = test_conf.get("block_size", "4k") if file_size is None: file_size = test_conf.get("file_size", "10G") if rw is None: rw = test_conf.get("rw", "read") if rwmixread is None: rwmixread = test_conf.get("rwmixread", "50") if runtime is None: runtime = test_conf.get("runtime", "600") if sub_jobs is None: sub_jobs = test_conf.get('sub_jobs', None) rewrite = unify_bool_value(test_conf.get('rewrite', True)) filename = os.path.join(test_conf['tool']['dir'], 'fio_test_file') # testcase business workflow adb = ADB(device_id) if rw in ["randrw", "rw", "readwrite"]: rw = rw + " --rwmixread=" + rwmixread _sub_jobs = '--name=perf_std --filename={}'.format(filename) if sub_jobs: _sub_jobs_list = list() for i in range(1, int(sub_jobs) + 1): sub_name = 'job' + str(i) sub_filename = 'fio_test_' + str(file_size) + '_' + str(i) if not rewrite: rand_str = random.choice(range(10000)) sub_filename = 'fio_test_' + str(file_size) + '_' + str( i) + '_' + str(rand_str) sub_file_path = os.path.join(test_conf['tool']['dir'], sub_filename) _sub_job = '--name={0} --filename={1}'.format( sub_name, sub_file_path) _sub_jobs_list.append(_sub_job) _sub_jobs = ' '.join(_sub_jobs_list) fio_command = "shell {0} --direct=1 --norandommap=0 --numjobs=1 --ioengine=libaio --iodepth={1} --rw={2} --bs={3} --size={4} --runtime={5} --output-format=json,normal {6}" \ .format(exe_file, iodepth, rw, block_size, file_size, runtime, _sub_jobs) adb.execute_adb_command(fio_command) tc_logger.info('==>Write file by fio over')
def initiate_device(): """ Initiate device, like set screen, enable mtp :return: None """ tc_logger.info('==>Start to initiate devices') device = Device(test_conf['device_id']) device.root_device() device.verify_hdmi_connection() device.set_screen() device.switch_mtp(True)
def tc2_iozone_sequential_stress(): # pre_case, post_case, pre_loop and post_loop definition tc_logger.info("Defining pre_case, post_case, pre_loop and post_loop inside of test case") test_conf["ud_pre_case"] = ud_pre_case_string test_conf["ud_post_case"] = ud_post_case_string test_conf["ud_pre_loop"] = ud_pre_loop_string test_conf["ud_post_loop"] = ud_post_loop_string # duration configuration if "duration" in test_conf: duration = test_conf["duration"] else: duration = 300 # main function set_up(level='case') func = tc_run_iozone_by_duration func(block_size='512k', duration=duration, file_size='128m', sequential='True', threads='8') result = 0 # result validation - Initial Writer statistic_file = os.path.join(test_conf["result_home"], "statistics.csv") statistics_column = "Initial Writers(MB/s)" benchmark_file = _get_basic_stress_bm_filename() benchmark_group = ["IOZone", "basic", "SW"] checkpoint_prefix = "Initial Writers" result = stress_statistics_validation(statistic_file, statistics_column, benchmark_file, benchmark_group, checkpoint_prefix) | result # result validation - Initial Writer statistics_column = "Rewriters(MB/s)" benchmark_group = ["IOZone", "basic", "SW"] checkpoint_prefix = "Rewriters" result = stress_statistics_validation(statistic_file, statistics_column, benchmark_file, benchmark_group, checkpoint_prefix) | result # result validation - Reader statistics_column = "Readers(MB/s)" benchmark_group = ["IOZone", "basic", "SR"] checkpoint_prefix = "Readers" result = stress_statistics_validation(statistic_file, statistics_column, benchmark_file, benchmark_group, checkpoint_prefix) | result # result validation - Re-Reader statistics_column = "Re-readers(MB/s)" benchmark_group = ["IOZone", "basic", "SR"] checkpoint_prefix = "Readers" result = stress_statistics_validation(statistic_file, statistics_column, benchmark_file, benchmark_group, checkpoint_prefix) | result return result
def tc2_ab_default_stress(): # pre_case, post_case, pre_loop and post_loop definition tc_logger.info("Defining pre_case, post_case, pre_loop and post_loop inside of test case") test_conf["ud_pre_case"] = ud_pre_case_string test_conf["ud_post_case"] = ud_post_case_string test_conf["ud_pre_loop"] = ud_pre_loop_string test_conf["ud_post_loop"] = ud_post_loop_string # duration configuration if "duration" in test_conf: duration = test_conf["duration"] else: duration = 300 # main function set_up(level='case') func = tc_run_micro_by_duration func(duration=duration) result = 0 # result validation - Sequential Read statistic_file = os.path.join(test_conf["result_home"], "statistics.csv") statistics_column = "Sequential Read(MB/s)" benchmark_file = _get_basic_stress_bm_filename() benchmark_group = ["AB", "default", "SR"] checkpoint_prefix = "Sequential Read" result = stress_statistics_validation(statistic_file, statistics_column, benchmark_file, benchmark_group, checkpoint_prefix) | result # result validation - Sequential Write statistics_column = "Sequential Write(MB/s)" benchmark_group = ["AB", "default", "SW"] checkpoint_prefix = "Sequential Write" result = stress_statistics_validation(statistic_file, statistics_column, benchmark_file, benchmark_group, checkpoint_prefix) | result # result validation - Random Read statistics_column = "Random Read(MB/s)" benchmark_group = ["AB", "default", "RR"] checkpoint_prefix = "Random Read" result = stress_statistics_validation(statistic_file, statistics_column, benchmark_file, benchmark_group, checkpoint_prefix) | result # result validation - Random Write statistics_column = "Random Write(MB/s)" benchmark_group = ["AB", "default", "RW"] checkpoint_prefix = "Random Write" result = stress_statistics_validation(statistic_file, statistics_column, benchmark_file, benchmark_group, checkpoint_prefix) | result return result
def read_csv_to_lls(file): if (not os.path.exists(file)): raise Exception("csv file doesn't exists: {}".format(file)) tc_logger.info("Parsing {} to list of list of string".format(file)) data = [] with open(file, mode='r') as f: rows = f.readlines() for row in rows: data.append(row.split(test_conf["csv_delimiter"])) for i in range(0, len(data)): for j in range(0, len(data[i])): data[i][j] = data[i][j].strip() return data
def get_exclusive_action(precondition=True, level='case'): """ Get exclusive action from input :param precondition: True means it's precondition, False means post condition :param level: case level or loop level :return: exclusive action, list """ if level == 'case': key = 'ud_pre_case' if precondition else 'ud_post_case' else: key = 'ud_pre_loop' if precondition else 'ud_post_loop' exclusive_action = test_conf.get(key, list()) if isinstance(exclusive_action, str): exclusive_action = [exclusive_action] if exclusive_action: tc_logger.info('==>Exclusive action: {}'.format(exclusive_action)) return exclusive_action
def get_additional_action(precondition=True, level='case'): """ Get additional action from input :param precondition: True means it's precondition, False means post condition :param level: case level or loop level :return: additional action, list """ if level == 'case': key = 'add_pre_case' if precondition else 'add_post_case' else: key = 'add_pre_loop' if precondition else 'add_post_loop' additional_action = test_conf.get(key, list()) if isinstance(additional_action, str): additional_action = [additional_action] if additional_action: tc_logger.info('==>Additional action: {}'.format(additional_action)) return additional_action
def read_csv_to_json(file): if (not os.path.exists(file)): raise Exception("csv file doesn't exists: {}".format(file)) tc_logger.info("Parsing {} to json".format(file)) data = {} with open(file, mode='r') as f: header = f.readline() header = header.split(test_conf["csv_delimiter"]) rows = f.readlines() i = 0 for row in rows: temp = {} value = row.split(test_conf["csv_delimiter"]) for k in range(0, len(value)): temp[header[k].strip()] = value[k].strip() data[str(i)] = temp i = i + 1 return data
def write_csv_header(file, header): if (isinstance(header, list)): header = [str(elem) for elem in header] header = test_conf["csv_delimiter"].join(header) if (not os.path.exists(file)): tc_logger.info("Creating file {}".format(file)) os.system("touch {}".format(file)) tc_logger.info("Writting csv header: {}".format(header)) os.system("echo \"{}\" >> {}".format(header, file)) else: tc_logger.info("File already exists: {}".format(file)) tc_logger.info("Skip writting csv header")
def create_file_by_iozone(block_size=None, file_size=None, threads=None): """ Create file by iozone, use params from test_conf generally, this method will write file 2 times """ tc_logger.info('==>Start to create file by iozone') device_id = test_conf["device_id"] exe_file = test_conf['tool']['iozone'] exe_dir = os.path.dirname(exe_file) if block_size is None: block_size = test_conf.get('block_size', '4k').lower() if file_size is None: file_size = test_conf.get('file_size', '128m').lower() if threads is None: threads = test_conf.get('threads', '8') command = 'shell "cd {0};{1} -w -r {2} -s {3} -i 0 -I -t {4}"'.format( exe_dir, exe_file, block_size, file_size, str(threads)) adb = ADB(device_id) adb.execute_adb_command(command) tc_logger.info('==>Create file by iozone over')
def initiate_file(): """ Create dirs and files for test :return: None """ tc_logger.info('==>Start to initiate dirs and files') if 'job_id' in test_conf: test_conf['job_home'] = os.path.join(result_shop, test_conf['job_id']) else: test_conf['job_home'] = result_shop if 'suite_id' in test_conf: test_conf['suite_home'] = os.path.join(test_conf['job_home'], test_conf['suite_id']) if 'test_id' in test_conf: test_conf['result_home'] = os.path.join(test_conf['suite_home'], test_conf['test_id']) test_conf['log_home'] = os.path.join(test_conf['result_home'], 'log') test_conf['chart_home'] = os.path.join(test_conf['result_home'], 'chart') test_conf['monitor_home'] = os.path.join(test_conf['result_home'], 'monitor') test_conf['screenshot_home'] = os.path.join(test_conf['result_home'], 'screenshot') test_conf['graceful_stop_point'] = os.path.join( test_conf['result_home'], 'pause') test_conf['mongo_json'] = os.path.join(test_conf['result_home'], 'mongo.json') test_conf["pass_indicator"] = os.path.join(test_conf['result_home'], 'PASS') test_conf["fail_indicator"] = os.path.join(test_conf['result_home'], 'FAIL') test_conf["exception_indicator"] = os.path.join( test_conf['result_home'], 'EXCEPTION') os.system('mkdir -p {}'.format(test_conf['result_home'])) os.system('mkdir -p {}'.format(test_conf['log_home'])) os.system('mkdir -p {}'.format(test_conf['chart_home'])) os.system('mkdir -p {}'.format(test_conf['monitor_home'])) os.system('mkdir -p {}'.format(test_conf['screenshot_home'])) tc_logger.info('==>Initiate dirs and files over')
def FIO_4G_SR_Storage_Down_Perf(): # pre_case, post_case, pre_loop and post_loop definition tc_logger.info("Defining pre_case, post_case, pre_loop and post_loop inside of test case") test_conf["ud_pre_case"] = ud_pre_case_string test_conf["ud_post_case"] = ud_post_case_string test_conf["ud_pre_loop"] = ud_pre_loop_string test_conf["ud_post_loop"] = ud_post_loop_string # loops configration test_conf["sub_jobs"] = _sub_jobs test_conf["file_size"] = _file_size[1] _loops = int(device.auto_calculate_loops()[1][0]) # main function set_up(level='case') func = tc_run_fio_by_loops.__wrapped__ func(iodepth=32, rw="read", block_size="512k", runtime=600, rewrite="false", loops=_loops) # performance result verification benchmark_item = get_benchmark_item(_get_perf_bm_filename(), ["FIO", "FIO_4G_Storage_Down_Perf"]) tc_logger.info("Benchmark is as below") tc_logger.info(str(benchmark_item)) result_file = os.path.join(test_conf["result_home"], "fio_rpt.csv") result = 0 # SeqRead verification values = get_column_from_csv(result_file, "Read (MB/s)") values = values[:-1] checkpoints = ["Sequential Read(MB/s)" + " - " + str(i+1) for i in range(len(values))] result = assert_values_meet_benchmark(values, benchmark_item["SR"], False, "dc.yaml", checkpoints, True) | result return result
def gather_action(precondition=True, level='case'): """ Combine default precondition in test_conf.ymal with custom precondition from set_up_case/tear_down_case :param precondition: True means it's precondition, False means post condition :param level: case level or loop level :return: actions, list """ default_action = get_default_action(precondition, level) exclusive_action = get_exclusive_action(precondition, level) additional_action = get_additional_action(precondition, level) txt = 'All pre {} actions'.format( level) if precondition else 'All post {} actions'.format(level) if exclusive_action: actions = exclusive_action else: for action in additional_action: if action not in default_action: default_action.append(action) actions = default_action tc_logger.info('==>{}: {}'.format(txt, actions)) return actions
def get_default_action(precondition=True, level='case'): """ Get default action from input :param precondition: True means it's precondition, False means post condition :param level: case level or loop level :return: default action, list """ default_action = list() if level == 'case': key = 'pre_case' if precondition else 'post_case' else: key = 'pre_loop' if precondition else 'post_loop' pre_key = test_conf.get(key, None) if pre_key: default_action = test_conf.get(pre_key, list()) if default_action is None: default_action = list() else: if None in default_action: default_action.remove(None) if default_action: tc_logger.info('==>Default {} action: {}'.format(key, default_action)) return default_action
def tc2_IOZone_basic_Rand_3times(): # pre_case, post_case, pre_loop and post_loop definition tc_logger.info( "Defining pre_case, post_case, pre_loop and post_loop inside of test case" ) test_conf["ud_pre_case"] = ud_pre_case_string test_conf["ud_post_case"] = ud_post_case_string test_conf["ud_pre_loop"] = ud_pre_loop_string test_conf["ud_post_loop"] = ud_post_loop_string # main function set_up(level='case') func = tc_run_iozone_by_loops.__wrapped__ func(threads=8, file_size="128m", block_size="4k", sequential=False, loops=3) # performance result verification benchmark_item = get_benchmark_item(_get_basic_perf_bm_filename(), ["IOZone", "basic"]) tc_logger.info("Benchmark is as below") tc_logger.info(str(benchmark_item)) result_file = os.path.join(test_conf["result_home"], "iozone_result.csv") result = 0 # Random Read verification checkpoints_prefix = "Random readers(MB/s)" values = get_column_from_csv(result_file, checkpoints_prefix) values = values[-1:] + values[:-1] checkpoints = [ checkpoints_prefix + " - " + str(i) for i in range(1, len(values)) ] checkpoints.insert(0, checkpoints_prefix + " - avg") result = assert_values_meet_benchmark(values, benchmark_item["RR"], False, "dc.yaml", checkpoints, True) | result # Random Reread verification checkpoints_prefix = "Random writers(MB/s)" values = get_column_from_csv(result_file, checkpoints_prefix) values = values[-1:] + values[:-1] checkpoints = [ checkpoints_prefix + " - " + str(i) for i in range(1, len(values)) ] checkpoints.insert(0, checkpoints_prefix + " - avg") result = assert_values_meet_benchmark(values, benchmark_item["RW"], False, "dc.yaml", checkpoints, True) | result # return result return result
def tc2_FIO_basic_RandRW73_3times(): # pre_case, post_case, pre_loop and post_loop definition tc_logger.info( "Defining pre_case, post_case, pre_loop and post_loop inside of test case" ) test_conf["ud_pre_case"] = ud_pre_case_string test_conf["ud_post_case"] = ud_post_case_string test_conf["ud_pre_loop"] = ud_pre_loop_string test_conf["ud_post_loop"] = ud_post_loop_string # main function set_up(level='case') func = tc_run_fio_by_loops.__wrapped__ func(iodepth=32, rw="randrw", rwmixread="70", block_size="4k", file_size="1g", runtime=600, loops=3) # performance result verification benchmark_item = get_benchmark_item(_get_basic_perf_bm_filename(), ["FIO", "basic_randrw73"]) tc_logger.info("Benchmark is as below") tc_logger.info(str(benchmark_item)) result_file = os.path.join(test_conf["result_home"], "fio_rpt.csv") result = 0 # Read verification values = get_column_from_csv(result_file, "Read (MB/s)") values = values[-1:] + values[:-1] checkpoints = [ "Random Read(MB/s)" + " - " + str(i) for i in range(1, len(values)) ] checkpoints.insert(0, "Random Read(MB/s)" + " - avg") result = assert_values_meet_benchmark(values, benchmark_item["RR"], False, "dc.yaml", checkpoints, True) | result # Read verification values = get_column_from_csv(result_file, "Write (MB/s)") values = values[-1:] + values[:-1] checkpoints = [ "Random Write(MB/s)" + " - " + str(i) for i in range(1, len(values)) ] checkpoints.insert(0, "Random Write(MB/s)" + " - avg") result = assert_values_meet_benchmark(values, benchmark_item["RW"], False, "dc.yaml", checkpoints, True) | result # return result return result
def wrapper(*args, **kw): case_name = func.__name__ try: result = func(*args, **kw) if (result is None): result = default_result if (result == 0 or result == "0"): os.system("touch {}".format(test_conf["pass_indicator"])) tc_logger.info('[PASS] -- Test case {}'.format(case_name)) else: os.system("touch {}".format(test_conf["fail_indicator"])) tc_logger.info('[FAIL] -- Test case {}'.format(case_name)) except Exception as e: os.system("touch {}".format(test_conf["exception_indicator"])) # traceback.print_exc() tc_logger.error(traceback.format_exc()) tc_logger.info('[FAIL] -- Test case {}'.format(case_name))
def FIO_4G_Restore_No_enough_SW_3times(): # pre_case, post_case, pre_loop and post_loop definition tc_logger.info( "Defining pre_case, post_case, pre_loop and post_loop inside of test case" ) test_conf["ud_pre_case"] = ud_pre_case_string test_conf["ud_post_case"] = ud_post_case_string test_conf["ud_pre_loop"] = ud_pre_loop_string test_conf["ud_post_loop"] = ud_post_loop_string # main function set_up(level='case') func = tc_run_fio_by_loops.__wrapped__ func(iodepth=32, sub_jobs=4, rw="write", rewrite="false", block_size="512k", file_size="1g", runtime=600, loops=3) # performance result verification benchmark_item = get_benchmark_item(_get_basic_perf_bm_filename(), ["FIO", "FIO_4G_Restore_No_enough"]) tc_logger.info("Benchmark is as below") tc_logger.info(str(benchmark_item)) result_file = os.path.join(test_conf["result_home"], "fio_rpt.csv") result = 0 # SeqWrite verification values = get_column_from_csv(result_file, "Write (MB/s)") values = values[-1:] + values[:-1] checkpoints = [ "Sequential Write(MB/s)" + " - " + str(i) for i in range(1, len(values)) ] checkpoints.insert(0, "Sequential Write(MB/s)" + " - avg") result = assert_values_meet_benchmark(values, benchmark_item["SW"], False, "dc.yaml", checkpoints, True) | result return result
def tc2_AB_default_3times(): # pre_case, post_case, pre_loop and post_loop definition tc_logger.info( "Defining pre_case, post_case, pre_loop and post_loop inside of test case" ) test_conf["ud_pre_case"] = ud_pre_case_string test_conf["ud_post_case"] = ud_post_case_string test_conf["ud_pre_loop"] = ud_pre_loop_string test_conf["ud_post_loop"] = ud_post_loop_string # main function set_up(level='case') # tc_run_micro_by_loops(loops=3) func = tc_run_micro_by_loops.__wrapped__ func(loops=3) # performance result verification benchmark_item = get_benchmark_item(_get_basic_perf_bm_filename(), ["AB", "default"]) tc_logger.info("Benchmark is as below") tc_logger.info(str(benchmark_item)) result_file = os.path.join(test_conf["result_home"], "androbench_result.csv") result = 0 # SR verification checkpoints_prefix = "Sequential Read(MB/s)" values = get_column_from_csv(result_file, checkpoints_prefix) values = values[-1:] + values[:-1] checkpoints = [ checkpoints_prefix + " - " + str(i) for i in range(1, len(values)) ] checkpoints.insert(0, checkpoints_prefix + " - avg") result = assert_values_meet_benchmark(values, benchmark_item["SR"], False, "dc.yaml", checkpoints, True) | result # SW verification checkpoints_prefix = "Sequential Write(MB/s)" values = get_column_from_csv(result_file, checkpoints_prefix) values = values[-1:] + values[:-1] checkpoints = [ checkpoints_prefix + " - " + str(i) for i in range(1, len(values)) ] checkpoints.insert(0, checkpoints_prefix + " - avg") result = assert_values_meet_benchmark(values, benchmark_item["SW"], False, "dc.yaml", checkpoints, True) | result # RR verification checkpoints_prefix = "Random Read(MB/s)" values = get_column_from_csv(result_file, checkpoints_prefix) values = values[-1:] + values[:-1] checkpoints = [ checkpoints_prefix + " - " + str(i) for i in range(1, len(values)) ] checkpoints.insert(0, checkpoints_prefix + " - avg") result = assert_values_meet_benchmark(values, benchmark_item["RR"], False, "dc.yaml", checkpoints, True) | result # RW verification checkpoints_prefix = "Random Write(MB/s)" values = get_column_from_csv(result_file, checkpoints_prefix) values = values[-1:] + values[:-1] checkpoints = [ checkpoints_prefix + " - " + str(i) for i in range(1, len(values)) ] checkpoints.insert(0, checkpoints_prefix + " - avg") result = assert_values_meet_benchmark(values, benchmark_item["RW"], False, "dc.yaml", checkpoints, True) | result # return result return result
def fake_deco_by_duration(duration, func, *args, **kw): duration = int(duration) main_start_time = int(time.time()) main_current_time = main_start_time i = 0 while (main_current_time - main_start_time) < duration: i = i + 1 tc_logger.info("***** {} *****".format(i)) set_up(level='loop', loop=i) if test_conf["event_trace"] is True: command = "{} {} > {} &".format( test_conf["event_trace_home"] + "/" + "trace_enable.sh", test_conf["device_id"], test_conf["log_home"] + "/" + "event_trace_" + str(i) + ".log") tc_logger.info( "Enable event_trace with command: {}".format(command)) os.system(command) func(*args, **kw) tear_down(level='loop', loop=i) if test_conf["event_trace"] is True: command = "{} {}".format( test_conf["event_trace_home"] + "/" + "trace_disable.sh", test_conf["device_id"]) tc_logger.info( "Disable event_trace with command: {}".format(command)) os.system(command) main_current_time = int(time.time()) if os.path.exists(test_conf["graceful_stop_point"]): tc_logger.info("File exists: " + test_conf["graceful_stop_point"]) tc_logger.info("Exit loop before duration threshold") break tc_logger.info("Main Logic End Time = " + time.strftime("%Y-%m-%d %H:%M:%S")) tc_logger.info("Main Logic End Time = " + str(int(time.time())))
def wrapper(*args, **kwargs): if condition: tc_logger.info('[Skip] {}'.format(reason)) else: func(*args, **kwargs)