Example #1
0
def parsing_run(target_exec_dir, target):
    # get the test cases defined files
    config_files = server_utils.get_cases_def_files(target_exec_dir)
    logging.debug("the selected configuration are %s" % config_files)
    dic = {}
    for i in range(0, len(config_files)):
        # run benchmarks selected in each configuration file
        # config_file = os.path.join(caliper_path.CALIPER_PRE, config_files[i])
        config_file = os.path.join(config_files[i])
        config, sections = server_utils.read_config_file(config_file)
        logging.debug(sections)

        # get if it is the 'common' or 'arm' or 'android'
        classify = config_files[i].split("/")[-1].strip().split("_")[0]
        logging.debug(classify)

        for i in range(0, len(sections)):
            dic[sections[i]] = {}
            # try to resolve the configuration of the configuration file
            try:
                run_file = config.get(sections[i], 'run')
                parser = config.get(sections[i], 'parser')
            except Exception:
                raise AttributeError("The is no option value of parser")

            print_format()
            logging.info("Parsing %s" % sections[i])
            bench = os.path.join(classify, sections[i])

            try:
                result = parse_all_cases(target_exec_dir, target, bench,
                                         sections[i], run_file, parser, dic)
            except Exception:
                logging.info("Running %s Exception" % sections[i])
                crash_handle.main()
                print_format()
                run_flag = server_utils.get_fault_tolerance_config(
                    'fault_tolerance', 'run_error_continue')
                if run_flag == 1:
                    continue
                else:
                    return result
            else:
                logging.info("Parsing %s Finished" % sections[i])
                print_format()
    outfp = open(
        os.path.join(
            caliper_path.folder_ope.workspace,
            caliper_path.folder_ope.name.strip() + "/final_parsing_logs.yaml"),
        'w')
    outfp.write(yaml.dump(dic, default_flow_style=False))
    outfp.close()
    return 0
Example #2
0
def build_caliper(target_arch, flag=0):
    """
    target_arch means to build the caliper for the special arch
    flag mean build for the target or local machine (namely server)
        0: means for the target
        1: means for the server
    """

    if target_arch:
        arch = target_arch
    else:
        arch = 'x86_64'
    # get the files list of 'cfg'
    files_list = server_utils.get_cases_def_files(arch)
    logging.debug("config files are %s" % files_list)

    source_build_file = caliper_path.SOURCE_BUILD_FILE
    des_build_file = os.path.join(TMP_DIR, caliper_path.BUILD_FILE)
    logging.info("destination file of building is %s" % des_build_file)

    for i in range(0, len(files_list)):
        # get the directory, such as 'common','server' and so on
        dir_name = files_list[i].strip().split("/")[-1].split("_")[0]

        config = ConfigParser.ConfigParser()
        config.read(files_list[i])

        sections = config.sections()
        for i in range(0, len(sections)):
            if os.path.exists(des_build_file):
                os.remove(des_build_file)
            shutil.copyfile(os.path.abspath(source_build_file), des_build_file)

            try:
                result = generate_build(config, sections[i],
                                        dir_name, des_build_file)
            except Exception, e:
                logging.info(e)
            else:
                if result:
                    return result
            result = build_each_tool(dir_name, sections[i],
                                        des_build_file, target_arch)
            if os.path.exists(des_build_file):
                os.remove(des_build_file)
            if result:
                build_flag = server_utils.get_fault_tolerance_config(
                                                "fault_tolerance",
                                                "build_error_continue")
                if build_flag == 1:
                    continue
                else:
                    return result
Example #3
0
def parsing_run(sections, run_case_list):
    dic = {}
    for i in range(0, len(sections)):
        dic[sections[i]] = {}
        # try to resolve the configuration of the configuration file
        try:
            run_file = sections[i] + '_run.cfg'
            parser = sections[i] + '_parser.py'
        except Exception:
            raise AttributeError("The is no option value of parser")

        logging.info("=" * 55)
        logging.info("Parsing %s" % sections[i])
        bench = os.path.join(caliper_path.BENCHS_DIR, sections[i], 'defaults')

        try:
            result = parse_all_cases(bench,sections[i], parser, dic, run_case_list)
        except Exception:
            logging.info("Parsing %s Exception" % sections[i])
            crash_handle.main()
            logging.info("=" * 55)
            run_flag = server_utils.get_fault_tolerance_config(
                'fault_tolerance', 'run_error_continue')
            if run_flag == 1:
                continue
            else:
                return result
        else:
            logging.info("Parsing %s Finished" % sections[i])
            logging.info("=" * 55)
        try:
            logging.info("Parsing json %s" % sections[i])
            log_bench = os.path.join(Folder.exec_dir, sections[i])
            logfile = log_bench + "_output.log"
            outfile_name = sections[i] + '.json'
            outfile = os.path.join(Folder.json_dir, outfile_name)
            if not os.path.exists(Folder.json_dir):
                os.mkdir(Folder.json_dir)
            parser_case(sections[i], parser, sections[i], logfile, outfile, 'json')
            # parser_json(sections[i],  parser, logfile)
        except Exception as e:
            logging.info(e)
        else:
            logging.info("Parsing json %s Finished" % sections[i])

    outfp = open(os.path.join(caliper_path.folder_ope.workspace,
                              caliper_path.folder_ope.name.strip()
                              + "/final_parsing_logs.yaml"), 'w')
    outfp.write(yaml.dump(dic, default_flow_style=False))
    outfp.close()
    return 0
Example #4
0
def parsing_run(target_exec_dir, target):
    # get the test cases defined files
    config_files = server_utils.get_cases_def_files(target_exec_dir)
    logging.debug("the selected configuration are %s" % config_files)
    dic = {}
    for i in range(0, len(config_files)):
        # run benchmarks selected in each configuration file
        # config_file = os.path.join(caliper_path.CALIPER_PRE, config_files[i])
        config_file = os.path.join(config_files[i])
        config, sections = server_utils.read_config_file(config_file)
        logging.debug(sections)

        # get if it is the 'common' or 'arm' or 'android'
        classify = config_files[i].split("/")[-1].strip().split("_")[0]
        logging.debug(classify)

        for i in range(0, len(sections)):
            dic[sections[i]] = {}
            # try to resolve the configuration of the configuration file
            try:
                run_file = config.get(sections[i], 'run')
                parser = config.get(sections[i], 'parser')
            except Exception:
                raise AttributeError("The is no option value of parser")

            print_format()
            logging.info("Parsing %s" % sections[i])
            bench = os.path.join(classify, sections[i])

            try:
                result = parse_all_cases(target_exec_dir, target, bench,
                                       sections[i], run_file,parser,dic)
            except Exception:
                logging.info("Running %s Exception" % sections[i])
                crash_handle.main()
                print_format()
                run_flag = server_utils.get_fault_tolerance_config(
                    'fault_tolerance', 'run_error_continue')
                if run_flag == 1:
                    continue
                else:
                    return result
            else:
                logging.info("Parsing %s Finished" % sections[i])
                print_format()
    outfp = open(os.path.join(caliper_path.folder_ope.workspace,
                              caliper_path.folder_ope.name.strip()
                              +"/final_parsing_logs.yaml"),'w')
    outfp.write(yaml.dump(dic, default_flow_style=False))
    outfp.close()
    return 0
Example #5
0
def build_caliper(target_arch, flag=0, clear=0):
    """
    target_arch means to build the caliper for the special arch
    flag mean build for the target or local machine (namely server)
        0: means for the target
        1: means for the server
    """
    copy = 0
    global GEN_DIR, WS_GEN_DIR, BUILD_MAPPING_FILE, BUILD_MAPPING_DIR
    GEN_DIR = caliper_path.GEN_DIR
    WS_GEN_DIR = os.path.join(FOLDER.workspace, 'binary')

    prev_build_files = []
    current_build_files = []
    WS_prev_build_files = []
    WS_current_build_files = []

    if target_arch:
        arch = target_arch
    else:
        arch = 'x86_64'
    # get the files list of 'cfg'
    files_list = server_utils.get_cases_def_files(arch)
    logging.debug("config files are %s" % files_list)
    BUILD_MAPPING_DIR = os.path.join(BUILD_MAPPING_DIR, arch)
    if not os.path.exists(BUILD_MAPPING_DIR):
        try:
            os.makedirs(BUILD_MAPPING_DIR)
        except:
            pass
    source_build_file = caliper_path.SOURCE_BUILD_FILE
    des_build_file = os.path.join(TMP_DIR, caliper_path.BUILD_FILE)
    logging.info("destination file of building is %s" % des_build_file)
    set_signals()

    # check and delete those binaries if it is already built if -c is used
    if clear:
        logging.info("=" * 55)
        logging.info(
            "WARNING: Please wait, dont run any other instance of caliper")
        for i in range(0, len(files_list)):
            # get the directory, such as 'common','server' and so on
            dir_name = files_list[i].strip().split("/")[-1].split("_")[0]
            config = ConfigParser.ConfigParser()
            config.read(files_list[i])
            sections = config.sections()
            for i in range(0, len(sections)):
                BUILD_MAPPING_FILE = os.path.join(BUILD_MAPPING_DIR,
                                                  sections[i] + '.yaml')
                with client_utils.SimpleFlock(BUILD_MAPPING_FILE, 60):
                    fp = open(BUILD_MAPPING_FILE)
                    dic = yaml.load(fp)
                    fp.close()
                    if type(dic) != dict:
                        dic = {}
                    if sections[i] in dic.keys():
                        for file in dic[sections[i]]['binaries']:
                            try:
                                shutil.rmtree(file)
                            except:
                                pass
                        dic[sections[i]]['binaries'] = []
                        dic[sections[i]]['ProcessID'] = 0
                    fp = open(BUILD_MAPPING_FILE, 'w')
                    fp.write(yaml.dump(dic, default_flow_style=False))
                    fp.close()
        logging.info("It is safe to run caliper now")
        logging.info("=" * 55)

    #STARING THE BUILD
    for i in range(0, len(files_list)):
        # get the directory, such as 'common','server' and so on
        dir_name = files_list[i].strip().split("/")[-1].split("_")[0]
        config = ConfigParser.ConfigParser()
        config.read(files_list[i])
        sections = config.sections()

        for i in range(0, len(sections)):
            BUILD = 0
            BUILD_MAPPING_FILE = os.path.join(BUILD_MAPPING_DIR,
                                              sections[i] + '.yaml')
            reset_binary_mapping()

            try:
                #Lock the file and modify it if this is the first process which is building the tool
                with client_utils.SimpleFlock(BUILD_MAPPING_FILE, 60):
                    fp = open(BUILD_MAPPING_FILE)
                    dic = yaml.load(fp)
                    if type(dic) != dict:
                        dic = {}
                    fp.close()
                    if sections[i] not in dic.keys():
                        dic[sections[i]] = {}
                        dic[sections[i]]['binaries'] = []
                        dic[sections[i]]['ProcessID'] = os.getpid()
                        BUILD = 1
                    fp = open(BUILD_MAPPING_FILE, 'w')
                    fp.write(yaml.dump(dic, default_flow_style=False))
                    fp.close()

                #checking if binary field is empty, empty means that the previous build is a failure
                if not dic[sections[i]]['binaries']:
                    BUILD = 1

                # Checking if the tool if already built or is in the process of being built by another process
                if dic[sections[i]]['ProcessID'] not in currentProcess:
                    # We shall continue to build the next tools and we'll copy these binaries later
                    logging.info("=" * 55)
                    logging.info(
                        "%s is being built by someother process, we'll build the remaining tools"
                        % sections[i])
                    continue
            except Exception as e:
                logging.debug(e)
                sys.exit(1)

            if BUILD == 0:
                #Collecting the build files in the Workspace binary dir and main binary dir
                WS_prev_build_files = getAllFilesRecursive(WS_GEN_DIR)
                prev_build_files = getAllFilesRecursive(GEN_DIR)

                #checking if the required binaries are present or not in the main binary dir
                for j in dic[sections[i]]['binaries']:
                    if j not in prev_build_files:
                        if j != BUILD_MAPPING_FILE:
                            #the binaries are not present we have to build it
                            BUILD = 1

            if BUILD == 1:
                if os.path.exists(des_build_file):
                    os.remove(des_build_file)
                shutil.copyfile(os.path.abspath(source_build_file),
                                des_build_file)

                try:
                    result = generate_build(config, sections[i], dir_name,
                                            des_build_file)
                except Exception, e:
                    logging.info(e)
                else:
                    if result:
                        return result

                result = build_each_tool(dir_name, sections[i], des_build_file,
                                         target_arch)
                if result:
                    build_flag = server_utils.get_fault_tolerance_config(
                        "fault_tolerance", "build_error_continue")
                    with client_utils.SimpleFlock(BUILD_MAPPING_FILE, 60):
                        dic[sections[i]]['ProcessID'] = 0
                        dic[sections[i]]['binaries'] = []
                        fp = open(BUILD_MAPPING_FILE, 'w')
                        fp.write(yaml.dump(dic, default_flow_style=False))
                        fp.close()
                    if build_flag == 1:
                        #Build has failed so delete the section entry in the build_mapping.yaml
                        continue
                    else:
                        return result

                if os.path.exists(des_build_file):
                    os.remove(des_build_file)
            else:
                #Copy the generated binaries to the Workspace binaries
                for j in dic[sections[i]]['binaries']:
                    if j not in WS_prev_build_files:
                        if j != BUILD_MAPPING_FILE:
                            WS_Dir = os.path.join(WS_GEN_DIR,
                                                  '/'.join(j.split('/')[5:-1]))
                            try:
                                os.makedirs(WS_Dir)
                            except:
                                pass
                            shutil.copy(j, WS_Dir)
                logging.info("=" * 55)
                logging.info("%s is already build", sections[i])

            #get the binary present in the WS binary dir
            #WS_prev_build_dir - WS_current_build_dir = "TOOL CHAIN RELATED BINARIES"
            #Copy the ToolChainRelated binaries to the main binary folder
            WS_current_build_files = getAllFilesRecursive(WS_GEN_DIR)
            for files in WS_current_build_files:
                if files not in WS_prev_build_files:
                    deflocation = os.path.join(str(GEN_DIR),
                                               '/'.join(files.split('/')[6:]))
                    try:
                        os.makedirs('/'.join(deflocation.split('/')[:-1]))
                    except:
                        pass
                    if not os.path.exists(deflocation):
                        shutil.copy(files, deflocation)
                    (dic[sections[i]]['binaries']).append(str(deflocation))
            dic[sections[i]]['ProcessID'] = 0
            with client_utils.SimpleFlock(BUILD_MAPPING_FILE, 60):
                fp = open(BUILD_MAPPING_FILE)
                temp = yaml.load(fp)
                if type(temp) != dict:
                    temp = {}
                fp.close()
                copy_dic(temp, dic, sections[i])
                fp = open(BUILD_MAPPING_FILE, 'w')
                fp.write(yaml.dump(dic, default_flow_style=False))
                fp.close()
Example #6
0
def build_caliper(target_arch, flag=0,clear=0):
    """
    target_arch means to build the caliper for the special arch
    flag mean build for the target or local machine (namely server)
        0: means for the target
        1: means for the server
    """
    copy = 0
    global GEN_DIR, WS_GEN_DIR,BUILD_MAPPING_FILE,BUILD_MAPPING_DIR
    GEN_DIR = caliper_path.GEN_DIR
    WS_GEN_DIR = os.path.join(FOLDER.workspace, 'binary')

    prev_build_files = []
    current_build_files = []
    WS_prev_build_files = []
    WS_current_build_files = []

    if target_arch:
        arch = target_arch
    else:
        arch = 'x86_64'
    # get the files list of 'cfg'
    files_list = server_utils.get_cases_def_files(arch)
    logging.debug("config files are %s" % files_list)
    BUILD_MAPPING_DIR = os.path.join(BUILD_MAPPING_DIR,arch)
    if not os.path.exists(BUILD_MAPPING_DIR):
        try:
            os.makedirs(BUILD_MAPPING_DIR)
        except:
            pass
    source_build_file = caliper_path.SOURCE_BUILD_FILE
    des_build_file = os.path.join(TMP_DIR, caliper_path.BUILD_FILE)
    logging.info("destination file of building is %s" % des_build_file)
    set_signals()

    # check and delete those binaries if it is already built if -c is used
    if clear:
        logging.info("=" * 55)
        logging.info("WARNING: Please wait, dont run any other instance of caliper")
        for i in range(0, len(files_list)):
            # get the directory, such as 'common','server' and so on
            dir_name = files_list[i].strip().split("/")[-1].split("_")[0]
            config = ConfigParser.ConfigParser()
            config.read(files_list[i])
            sections = config.sections()
            for i in range(0, len(sections)):
                BUILD_MAPPING_FILE = os.path.join(BUILD_MAPPING_DIR, sections[i] + '.yaml')
                with client_utils.SimpleFlock(BUILD_MAPPING_FILE, 60):
                    fp = open(BUILD_MAPPING_FILE)
                    dic = yaml.load(fp)
                    fp.close()
                    if type(dic) != dict:
                        dic = {}
                    if sections[i] in dic.keys():
                        for file in dic[sections[i]]['binaries']:
                            try:
                                shutil.rmtree(file)
                            except:
                                pass
                        dic[sections[i]]['binaries'] = []
                        dic[sections[i]]['ProcessID'] = 0
                    fp = open(BUILD_MAPPING_FILE, 'w')
                    fp.write(yaml.dump(dic, default_flow_style=False))
                    fp.close()
        logging.info("It is safe to run caliper now")
        logging.info("=" * 55)

    #STARING THE BUILD
    for i in range(0, len(files_list)):
        # get the directory, such as 'common','server' and so on
        dir_name = files_list[i].strip().split("/")[-1].split("_")[0]
        config = ConfigParser.ConfigParser()
        config.read(files_list[i])
        sections = config.sections()

        for i in range(0, len(sections)):
            BUILD = 0
            BUILD_MAPPING_FILE = os.path.join(BUILD_MAPPING_DIR, sections[i] + '.yaml')
            reset_binary_mapping()

            try:
                #Lock the file and modify it if this is the first process which is building the tool
                with client_utils.SimpleFlock(BUILD_MAPPING_FILE, 60):
                    fp = open(BUILD_MAPPING_FILE)
                    dic = yaml.load(fp)
                    if type(dic) != dict:
                        dic = {}
                    fp.close()
                    if sections[i] not in dic.keys():
                        dic[sections[i]] = {}
                        dic[sections[i]]['binaries'] = []
                        dic[sections[i]]['ProcessID'] = os.getpid()
                        BUILD = 1
                    fp = open(BUILD_MAPPING_FILE, 'w')
                    fp.write(yaml.dump(dic, default_flow_style=False))
                    fp.close()

                #checking if binary field is empty, empty means that the previous build is a failure
                if not dic[sections[i]]['binaries']:
                    BUILD = 1

                # Checking if the tool if already built or is in the process of being built by another process
                if dic[sections[i]]['ProcessID'] not in currentProcess:
                    # We shall continue to build the next tools and we'll copy these binaries later
                    logging.info("=" * 55)
                    logging.info("%s is being built by someother process, we'll build the remaining tools" % sections[i])
                    continue
            except Exception as e:
                logging.debug(e)
                sys.exit(1)


            if BUILD == 0:
                #Collecting the build files in the Workspace binary dir and main binary dir
                WS_prev_build_files = getAllFilesRecursive(WS_GEN_DIR)
                prev_build_files = getAllFilesRecursive(GEN_DIR)

                #checking if the required binaries are present or not in the main binary dir
                for j in dic[sections[i]]['binaries']:
                    if j not in prev_build_files:
                        if j != BUILD_MAPPING_FILE:
                            #the binaries are not present we have to build it
                            BUILD = 1


            if BUILD == 1:
                if os.path.exists(des_build_file):
                    os.remove(des_build_file)
                shutil.copyfile(os.path.abspath(source_build_file), des_build_file)

                try:
                    result = generate_build(config, sections[i],
                                            dir_name, des_build_file)
                except Exception, e:
                    logging.info(e)
                else:
                    if result:
                        return result

                result = build_each_tool(dir_name, sections[i],
                                         des_build_file, target_arch)
                if result:
                    build_flag = server_utils.get_fault_tolerance_config("fault_tolerance",
                                 "build_error_continue")
                    with client_utils.SimpleFlock(BUILD_MAPPING_FILE, 60):
                         dic[sections[i]]['ProcessID'] = 0
                         dic[sections[i]]['binaries'] = []
                         fp = open(BUILD_MAPPING_FILE, 'w')
                         fp.write(yaml.dump(dic, default_flow_style=False))
                         fp.close()
                    if build_flag == 1:
                        #Build has failed so delete the section entry in the build_mapping.yaml
                        continue
                    else:
                        return result

                if os.path.exists(des_build_file):
                    os.remove(des_build_file)
            else:
                #Copy the generated binaries to the Workspace binaries
                for j in dic[sections[i]]['binaries']:
                    if j not in WS_prev_build_files:
                        if j != BUILD_MAPPING_FILE:
                            WS_Dir = os.path.join(WS_GEN_DIR,'/'.join(j.split('/')[5:-1]))
                            try:
                                os.makedirs(WS_Dir)
                            except:
                                pass
                            shutil.copy(j, WS_Dir)
                logging.info("=" * 55)
                logging.info("%s is already build", sections[i])

            #get the binary present in the WS binary dir
            #WS_prev_build_dir - WS_current_build_dir = "TOOL CHAIN RELATED BINARIES"
            #Copy the ToolChainRelated binaries to the main binary folder
            WS_current_build_files = getAllFilesRecursive(WS_GEN_DIR)
            for files in WS_current_build_files:
                if files not in WS_prev_build_files:
                    deflocation = os.path.join(str(GEN_DIR) , '/'.join(files.split('/')[6:]))
                    try:
                        os.makedirs('/'.join(deflocation.split('/')[:-1]))
                    except:
                        pass
                    if not os.path.exists(deflocation):
                        shutil.copy(files, deflocation)
                    (dic[sections[i]]['binaries']).append(str(deflocation))
            dic[sections[i]]['ProcessID'] = 0
            with client_utils.SimpleFlock(BUILD_MAPPING_FILE, 60):
                fp = open(BUILD_MAPPING_FILE)
                temp = yaml.load(fp)
                if type(temp) != dict:
                    temp = {}
                fp.close()
                copy_dic(temp,dic,sections[i])
                fp = open(BUILD_MAPPING_FILE, 'w')
                fp.write(yaml.dump(dic, default_flow_style=False))
                fp.close()
Example #7
0
def caliper_run(target_exec_dir, target):
    # get the test cases defined files
    config_files = server_utils.get_cases_def_files(target_exec_dir)
    logging.debug("the selected configuration are %s" % config_files)

    for i in range(0, len(config_files)):
        # run benchmarks selected in each configuration file
        # config_file = os.path.join(caliper_path.CALIPER_PRE, config_files[i])
        config_file = os.path.join(config_files[i])
        config, sections = server_utils.read_config_file(config_file)
        logging.debug(sections)

        # get if it is the 'common' or 'arm' or 'android'
        classify = config_files[i].split("/")[-1].strip().split("_")[0]
        logging.debug(classify)

        for i in range(0, len(sections)):
            # run for each benchmark
            target_arch = server_utils.get_host_arch(target)
            build_name = sections[i] + '_' + target_arch + '.suc'
            build_suc = os.path.join(Folder.build_dir, build_name)
            if not os.path.exists(build_suc):
                continue
            build_host_name = sections[i] + '_' + \
                    server_utils.get_local_machine_arch() + '.fail'
            if os.path.exists(build_host_name):
                continue

            # try to resolve the configuration of the configuration file
            try:
                run_file = config.get(sections[i], 'run')
                parser = config.get(sections[i], 'parser')
            except Exception:
                raise AttributeError("The is no option value of parser")

            print_format()
            logging.info("Running %s" % sections[i])
            bench = os.path.join(classify, sections[i])
            try:
                result = run_all_cases(target_exec_dir, target, bench,
                                       sections[i], run_file)
            except Exception:
                logging.info("Running %s Exception" % sections[i])
                crash_handle.main()
                print_format()
                if sections[i] == "ltp":
                    try:
                        unmount = target.run(
                            "if  df -h |grep caliper_nfs  ; then umount /mnt/caliper_nfs/; fi"
                        )
                    except Exception:
                        unmount = target.run(
                            "if  df -h |grep caliper_nfs  ; then fuser -km /mnt/caliper_nfs ;fi"
                        )
                        unmount = target.run(
                            "if  df -h |grep caliper_nfs  ; then umount /mnt/caliper_nfs/ ;fi"
                        )
                run_flag = server_utils.get_fault_tolerance_config(
                    'fault_tolerance', 'run_error_continue')
                if run_flag == 1:
                    continue
                else:
                    return result
            else:
                logging.info("Running %s Finished" % sections[i])
                if sections[i] == "ltp":
                    try:
                        unmount = target.run(
                            "if  df -h |grep caliper_nfs  ; then umount /mnt/caliper_nfs/ ;fi"
                        )
                    except Exception:
                        unmount = target.run(
                            "if  df -h |grep caliper_nfs  ; then fuser -km /mnt/caliper_nfs/ ;fi"
                        )
                        unmount = target.run(
                            "if  df -h |grep caliper_nfs  ; then umount /mnt/caliper_nfs/ ;fi"
                        )
                print_format()

    return 0
Example #8
0
def run_all_cases(target_exec_dir, target, kind_bench, bench_name, run_file):
    """
    function: run one benchmark which was selected in the configuration files
    """
    try:
        # get the abspath, which is filename of run config for the benchmark
        bench_conf_file = os.path.join(caliper_path.config_files.tests_cfg_dir,
                                       kind_bench, run_file)
        # get the config sections for the benchmrk
        configRun, sections_run = server_utils.read_config_file(
            bench_conf_file)
    except AttributeError as e:
        raise AttributeError
    except Exception:
        raise
    logging.debug("the sections to run are: %s" % sections_run)
    if not os.path.exists(Folder.exec_dir):
        os.mkdir(Folder.exec_dir)
    log_bench = os.path.join(Folder.exec_dir, bench_name)
    logfile = log_bench + "_output.log"
    tmp_log_file = log_bench + "_output_tmp.log"
    if os.path.exists(logfile):
        os.remove(logfile)

    starttime = datetime.datetime.now()
    if os.path.exists(Folder.caliper_log_file):
        sections = bench_name + " EXECUTION"
        fp = open(Folder.caliper_log_file, "r")
        f = fp.readlines()
        fp.close()
        op = open(Folder.caliper_log_file, "w")
        for line in f:
            if not (sections in line):
                op.write(line)
        op.close()
    result = subprocess.call(
        "echo '$$ %s EXECUTION START: %s' >> %s" %
        (bench_name, str(starttime)[:19], Folder.caliper_log_file),
        shell=True)
    bench_test = "ltp"
    if bench_name == bench_test:
        tar_ip = settings.get_value('CLIENT', 'ip', type=str)
        target.run(
            "if [[ ! -e /mnt/caliper_nfs ]]; then mkdir -p /mnt/caliper_nfs; fi"
        )
        # fix me , now that we create the folder, why not we mount it directly here
        try:
            tar_mask = ".".join(tar_ip.split(".")[0:3])
            p1 = subprocess.Popen(["ifconfig"], stdout=subprocess.PIPE)
            p2 = subprocess.Popen(["grep", tar_mask],
                                  stdin=p1.stdout,
                                  stdout=subprocess.PIPE)
            p1.stdout.close()
            output, err = p2.communicate()
            output = output.strip()
            host_ip = output.split("inet addr:")[1].split(" ")[0]
        except Exception:
            logging.debug("Unable to get the host_ip")
        try:
            mount_cmd = target.run(
                "mount -t nfs %s:/opt/caliper_nfs /mnt/caliper_nfs" %
                (host_ip))
        except Exception:
            try:
                umount_cmd = target.run("umount /mnt/caliper_nfs/")
                mount_cmd = target.run(
                    "mount -t nfs %s:/opt/caliper_nfs /mnt/caliper_nfs" %
                    (host_ip))
            except Exception:
                logging.debug("Unable to mount")
                return result
        readme_file = log_bench + "_README"
        resultltp = subprocess.call("touch %s" % (readme_file), shell=True)
        resultltp = subprocess.call(
            "echo 'The categorization of ltp in caliper is\nCATEGORY\t\t\t\t\t\tSCENARIOS OF LTP\n\n[command]\t\t\t\t\t\tcommands\n[cpu]\t\t\t\t\t\tsched,cpuhotplug\n[memory]\t\t\t\t\t\tmm.numa,hugetlb\n[dio]\t\t\t\t\t\tdio,io,dma_thread_diotest,timers\n[filesystem]\t\t\t\t\t\tfilecaps,fs,fs_bind,fs_ext4,fs_perms_simple,fs_readonly\n[kernel/\t\t\t\t\t\tsyscalls,controllers,pty,containers,admin_tools,modules,can\n[proc]\t\t\t\t\t\tipc,hyperthreading,nptl,cap_bounds,connectors,pipes\n\n\nltp_output.log contains the screenshot of complete ltp execution and ltp_parser.log contains the information regarding the number of tests executed and among them which all have passed failed or skipped.\n\nFor more information regarding a particular category please see ltp_<category>_output.log which contains the output screen and parser log for that particular category' >> %s"
            % (readme_file),
            shell=True)
    # for each command in run config file, read the config for the benchmark
    for i in range(0, len(sections_run)):
        flag = 0
        try:
            category = configRun.get(sections_run[i], 'category')
            command = configRun.get(sections_run[i], 'command')
        except Exception:
            logging.debug("no value for the %s" % sections_run[i])
            continue
        if bench_name == bench_test:
            subsection = sections_run[i].split(" ")[1]
            subsection_file = log_bench + "_" + subsection + "_output.log"
        if os.path.exists(tmp_log_file):
            os.remove(tmp_log_file)

        server_run_command = get_server_command(kind_bench, sections_run[i])
        logging.debug("Get the server command is: %s" % server_run_command)
        # run the command of the benchmarks
        try:
            flag = run_kinds_commands(sections_run[i], server_run_command,
                                      tmp_log_file, kind_bench, target,
                                      command)
        except Exception, e:
            logging.info(e)
            crash_handle.main()
            if bench_name == bench_test:
                move_logs = subprocess.call(
                    "cp /opt/caliper_nfs/ltp_log/* %s " % (Folder.exec_dir),
                    shell=True)
            server_utils.file_copy(logfile, tmp_log_file, 'a+')
            if os.path.exists(tmp_log_file):
                os.remove(tmp_log_file)
            run_flag = server_utils.get_fault_tolerance_config(
                'fault_tolerance', 'run_error_continue')
            if run_flag == 1:
                continue
            else:
                return result
        else:
            if bench_name == bench_test:
                move_logs = subprocess.call(
                    "cp /opt/caliper_nfs/ltp_log/* %s " % (Folder.exec_dir),
                    shell=True)
                if os.path.exists(subsection_file):
                    server_utils.file_copy(tmp_log_file, subsection_file, 'a+')
            server_utils.file_copy(logfile, tmp_log_file, 'a+')
            if flag != 1:
                logging.info("There is wrong when running the command \"%s\"" %
                             command)
                if os.path.exists(tmp_log_file):
                    os.remove(tmp_log_file)
                crash_handle.main()

                run_flag = server_utils.get_fault_tolerance_config(
                    'fault_tolerance', 'run_error_continue')
                if run_flag == 1:
                    if bench_name != bench_test:
                        continue
                else:
                    return result
            if os.path.exists(tmp_log_file):
                os.remove(tmp_log_file)
Example #9
0
    def run_all_cases(self, kind_bench, bench_name, run_case_list):
        """
        function: run one benchmark which was selected in the configuration files
        """
        try:
            # get the abspath, which is filename of run config for the benchmark
            bench_conf_file = os.path.join(kind_bench, 'main.yml')
            # get the config sections for the benchmrk
            pf = open(bench_conf_file, 'r')
            values = yaml.load(pf.read())
            sections_run = values[bench_name].keys()
        except AttributeError as e:
            raise AttributeError
        except Exception:
            raise
        logging.debug("the sections to run are: %s" % sections_run)
        if not os.path.exists(Folder.exec_dir):
            os.mkdir(Folder.exec_dir)
        log_bench = os.path.join(Folder.exec_dir, bench_name)
        logfile = log_bench + "_output.log"
        tmp_log_file = log_bench + "_output_tmp.log"
        if os.path.exists(logfile):
            os.remove(logfile)

        starttime = datetime.datetime.now()
        if os.path.exists(Folder.caliper_log_file):
            sections = bench_name + " EXECUTION"
            fp = open(Folder.caliper_log_file, "r")
            f = fp.readlines()
            fp.close()
            op = open(Folder.caliper_log_file, "w")
            for line in f:
                if not (sections in line):
                    op.write(line)
            op.close()
        result = subprocess.call(
            "echo '$$ %s EXECUTION START: %s' >> %s" %
            (bench_name, str(starttime)[:19], Folder.caliper_log_file),
            shell=True)
        # for each command in run config file, read the config for the benchmark
        for section in sections_run:
            if section in run_case_list:
                if self.num == 0:
                    config_files = os.path.join(
                        caliper_path.config_files.config_dir,
                        'cases_config.json')
                    fp = open(config_files, 'r')
                    case_list = yaml.load(fp.read())
                    for dimension in case_list:
                        for i in range(len(case_list[dimension])):
                            for tool in case_list[dimension][i]:
                                for case in case_list[dimension][i][tool]:
                                    if case == section:
                                        self.num = case_list[dimension][i][
                                            tool][case][-1]
                flag = 0

                if os.path.exists(tmp_log_file):
                    os.remove(tmp_log_file)
                # run the command of the benchmarks

                try:
                    for j in range(int(self.num)):
                        flag = self.run_client_command(section, tmp_log_file,
                                                       bench_name)
                except Exception, e:
                    logging.info(e)
                    crash_handle.main()
                    server_utils.file_copy(logfile, tmp_log_file, 'a+')
                    if os.path.exists(tmp_log_file):
                        os.remove(tmp_log_file)

                    run_flag = server_utils.get_fault_tolerance_config(
                        'fault_tolerance', 'run_error_continue')
                    if run_flag == 1:
                        continue
                    else:
                        return result
                else:
                    server_utils.file_copy(logfile, tmp_log_file, 'a+')
                    if flag != 1:
                        logging.info(
                            "There is wrong when running the command \"%s\"" %
                            section)

                        if os.path.exists(tmp_log_file):
                            os.remove(tmp_log_file)
                        crash_handle.main()

                        run_flag = server_utils.get_fault_tolerance_config(
                            'fault_tolerance', 'run_error_continue')
                        if run_flag == 1:
                            return result
                    if os.path.exists(tmp_log_file):
                        os.remove(tmp_log_file)
            else:
                continue

            endtime = datetime.datetime.now()
            subprocess.call(
                "echo '$$ %s EXECUTION STOP: %s' >> %s" %
                (section, str(endtime)[:19], Folder.caliper_log_file),
                shell=True)
            subprocess.call(
                "echo '$$ %s EXECUTION DURATION %s Seconds'>>%s" %
                (section,
                 (endtime - starttime).seconds, Folder.caliper_log_file),
                shell=True)
Example #10
0
def caliper_run(target_exec_dir, server, target):
    # get the test cases defined files
    config_files = server_utils.get_cases_def_files(target_exec_dir)
    logging.debug("the selected configuration are %s" % config_files)

    for i in range(0, len(config_files)):
        # run benchmarks selected in each configuration file
        # config_file = os.path.join(caliper_path.CALIPER_PRE, config_files[i])
        config_file = os.path.join(config_files[i])
        config, sections = server_utils.read_config_file(config_file)
        logging.debug(sections)

        # get if it is the 'common' or 'arm' or 'android'
        classify = config_files[i].split("/")[-1].strip().split("_")[0]
        logging.debug(classify)

        if classify == "server" and server:
            try:
                server_ip = settings.get_value("SERVER", "ip", type=str)
                server_port = settings.get_value("SERVER", "port", type=int)
                server_user = settings.get_value("SERVER", "user", type=str)
                logging.info(
                    "Please wait while caliper triggers the server.py script in the server"
                )
                server_pwd = server.run("pwd").stdout
                server_pwd = server_pwd.split("\n")[0]
                server_caliper_dir = os.path.join(server_pwd, "caliper_server")
                server_caliper_dir = os.path.join(server_caliper_dir,
                                                  "server.py")
                server_user = server_user + '@' + server_ip
                script = server_caliper_dir + ' ' + str(server_port)
                subprocess.Popen(
                    ['ssh', '%s' % server_user,
                     'python %s' % script])

            except Exception as e:
                logging.info(e)
                raise AttributeError(
                    "Error in establising connection with server")

        for i in range(0, len(sections)):
            # run for each benchmark
            target_arch = server_utils.get_host_arch(target)
            build_name = sections[i] + '_' + target_arch + '.suc'
            build_suc = os.path.join(Folder.build_dir, build_name)
            if not os.path.exists(build_suc):
                continue
            build_host_name = sections[i] + '_' + \
                    server_utils.get_local_machine_arch() + '.fail'
            if os.path.exists(build_host_name):
                continue

            # try to resolve the configuration of the configuration file
            try:
                run_file = config.get(sections[i], 'run')
                parser = config.get(sections[i], 'parser')
            except Exception:
                raise AttributeError("The is no option value of parser")

            print_format()

            logging.info("Running %s" % sections[i])
            bench = os.path.join(classify, sections[i])
            try:
                system_initialise(target)
                if classify == "server":
                    logging.info("Waiting for server to grant access")
                    sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                    sock.connect((server_ip, server_port))
                    logging.info("%s" % str(sock.recv(1024)))

                result = run_all_cases(target_exec_dir, target, bench,
                                       sections[i], run_file)
                if classify == "server":
                    sock.send("1")
                    sock.close()
            except Exception:
                logging.info("Running %s Exception" % sections[i])
                crash_handle.main()
                print_format()
                if sections[i] == "ltp":
                    try:
                        unmount = target.run(
                            "if  df -h |grep caliper_nfs  ; then umount /mnt/caliper_nfs/; fi"
                        )
                    except Exception:
                        unmount = target.run(
                            "if  df -h |grep caliper_nfs  ; then fuser -km /mnt/caliper_nfs ;fi"
                        )
                        unmount = target.run(
                            "if  df -h |grep caliper_nfs  ; then umount /mnt/caliper_nfs/ ;fi"
                        )
                run_flag = server_utils.get_fault_tolerance_config(
                    'fault_tolerance', 'run_error_continue')
                if run_flag == 1:
                    continue
                else:
                    return result
            else:
                logging.info("Running %s Finished" % sections[i])
                if sections[i] == "ltp":
                    try:
                        unmount = target.run(
                            "if  df -h |grep caliper_nfs  ; then umount /mnt/caliper_nfs/ ;fi"
                        )
                    except Exception:
                        unmount = target.run(
                            "if  df -h |grep caliper_nfs  ; then fuser -km /mnt/caliper_nfs/ ;fi"
                        )
                        unmount = target.run(
                            "if  df -h |grep caliper_nfs  ; then umount /mnt/caliper_nfs/ ;fi"
                        )
                print_format()

    return 0
Example #11
0
def run_all_cases(target_exec_dir, target, kind_bench, bench_name, run_file,
                  parser_file):
    """
    function: run one benchmark which was selected in the configuration files
    """
    try:
        # get the abspath, which is filename of run config for the benchmark
        bench_conf_file = os.path.join(caliper_path.config_files.tests_cfg_dir,
                                       kind_bench, run_file)
        # get the config sections for the benchmrk
        configRun, sections_run = server_utils.read_config_file(
            bench_conf_file)
    except AttributeError as e:
        raise AttributeError
    except Exception:
        raise
    logging.debug("the sections to run are: %s" % sections_run)
    if not os.path.exists(Folder.exec_dir):
        os.mkdir(Folder.exec_dir)
    log_bench = os.path.join(Folder.exec_dir, bench_name)
    logfile = log_bench + "_output.log"
    tmp_log_file = log_bench + "_output_tmp.log"
    parser_result_file = log_bench + "_parser.log"
    tmp_parser_file = log_bench + "_parser_tmp.log"
    if os.path.exists(parser_result_file):
        os.remove(parser_result_file)
    if os.path.exists(logfile):
        os.remove(logfile)

    starttime = datetime.datetime.now()
    result = subprocess.call(
        "echo '$$ %s EXECUTION START: %s' >> %s" %
        (bench_name, str(starttime)[:19], Folder.caliper_log_file),
        shell=True)
    bench_test = "ltp"
    if bench_name == bench_test:
        tar_ip = settings.get_value('CLIENT', 'ip', type=str)
        target.run("if [[ ! -e /mnt/ltp ]]; then mkdir -p /mnt/ltp; fi")
        # fix me , now that we create the folder, why not we mount it directly here
        s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        try:
            # fix me , getting host ip to be optimised
            s.connect(("8.8.8.8", 80))
        except Exception:
            logging.debug(
                "Socket connection failed during ltp pre-requisite check")
        host_ip = s.getsockname()[0]
        try:
            xyz = target.run("mount -t nfs %s:/opt/caliper_nfs /mnt/ltp" %
                             (host_ip))
        except Exception:
            try:
                xyz = target.run("umount /mnt/ltp/")
                xyz = target.run("mount -t nfs %s:/opt/caliper_nfs /mnt/ltp" %
                                 (host_ip))
            except Exception:
                logging.debug("Unable to mount")
                return result
        readme_file = log_bench + "_README"
        resultltp = subprocess.call("touch %s" % (readme_file), shell=True)
        resultltp = subprocess.call(
            "echo 'The categorization of ltp in caliper is\nCATEGORY\t\t\t\t\t\tSCENARIOS OF LTP\n\n[command]\t\t\t\t\t\tcommands\n[cpu]\t\t\t\t\t\tsched,cpuhotplug\n[memory]\t\t\t\t\t\tmm.numa,hugetlb\n[dio]\t\t\t\t\t\tdio,io,dma_thread_diotest,timers\n[filesystem]\t\t\t\t\t\tfilecaps,fs,fs_bind,fs_ext4,fs_perms_simple,fs_readonly\n[kernel/\t\t\t\t\t\tsyscalls,controllers,pty,containers,admin_tools,modules,can\n[proc]\t\t\t\t\t\tipc,hyperthreading,nptl,cap_bounds,connectors,pipes\n\n\nltp_output.log contains the screenshot of complete ltp execution and ltp_parser.log contains the information regarding the number of tests executed and among them which all have passed failed or skipped.\n\nFor more information regarding a particular category please see ltp_<category>_output.log which contains the output screen and parser log for that particular category' >> %s"
            % (readme_file),
            shell=True)

    # for each command in run config file, read the config for the benchmark
    for i in range(0, len(sections_run)):
        flag = 0
        try:
            category = configRun.get(sections_run[i], 'category')
            scores_way = configRun.get(sections_run[i], 'scores_way')
            parser = configRun.get(sections_run[i], 'parser')
            command = configRun.get(sections_run[i], 'command')
        except Exception:
            logging.debug("no value for the %s" % sections_run[i])
            continue
        if bench_name == bench_test:
            subsection = sections_run[i].split(" ")[1]
            subsection_file = log_bench + "_" + subsection + "_output.log"
        if os.path.exists(tmp_parser_file):
            os.remove(tmp_parser_file)
        if os.path.exists(tmp_log_file):
            os.remove(tmp_log_file)

        server_run_command = get_server_command(kind_bench, sections_run[i])
        logging.debug("Get the server command is: %s" % server_run_command)
        # run the command of the benchmarks
        try:
            flag = run_kinds_commands(sections_run[i], server_run_command,
                                      tmp_log_file, kind_bench, target,
                                      command)
        except Exception, e:
            logging.info(e)
            crash_handle.main()
            if bench_name == bench_test:
                xyz = subprocess.call("mv /opt/caliper_nfs/ltp_log/* %s " %
                                      (Folder.exec_dir),
                                      shell=True)
            server_utils.file_copy(logfile, tmp_log_file, 'a+')
            if os.path.exists(tmp_log_file):
                os.remove(tmp_log_file)
            run_flag = server_utils.get_fault_tolerance_config(
                'fault_tolerance', 'run_error_continue')
            if run_flag == 1:
                continue
            else:
                return result
        else:
            if bench_name == bench_test:
                xyz = subprocess.call("mv /opt/caliper_nfs/ltp_log/* %s " %
                                      (Folder.exec_dir),
                                      shell=True)
                if os.path.exists(subsection_file):
                    server_utils.file_copy(tmp_log_file, subsection_file, 'a+')
            server_utils.file_copy(logfile, tmp_log_file, 'a+')
            if flag != 1:
                logging.info("There is wrong when running the command \"%s\"" %
                             command)
                if os.path.exists(tmp_log_file):
                    os.remove(tmp_log_file)
                crash_handle.main()

                run_flag = server_utils.get_fault_tolerance_config(
                    'fault_tolerance', 'run_error_continue')
                if run_flag == 1:
                    if bench_name != bench_test:
                        continue
                else:
                    return result
        # parser the result in the tmp_log_file, the result is the output of
        # running the command
        try:
            logging.debug("Parsering the result of command: %s" % command)
            if bench_name == bench_test:
                outfp = open(tmp_parser_file, "w")
                outfp.write("%s" % (subsection))
                outfp.close()
                parser_result = parser_case(kind_bench, bench_name,
                                            parser_file, parser,
                                            subsection_file, tmp_parser_file)
            else:
                parser_result = parser_case(kind_bench, bench_name,
                                            parser_file, parser, tmp_log_file,
                                            tmp_parser_file)
        except Exception, e:
            logging.info(
                "There's wrong when parsering the result of \" %s \"" %
                sections_run[i])
            logging.info(e)
            if os.path.exists(tmp_parser_file):
                os.remove(tmp_parser_file)
            if os.path.exists(tmp_log_file):
                os.remove(tmp_log_file)
Example #12
0
def caliper_run(target_exec_dir, target):
    # get the test cases defined files
    config_files = server_utils.get_cases_def_files(target_exec_dir)
    logging.debug("the selected configuration are %s" % config_files)

    for i in range(0, len(config_files)):
        # run benchmarks selected in each configuration file
        # config_file = os.path.join(caliper_path.CALIPER_PRE, config_files[i])
        config_file = os.path.join(config_files[i])
        config, sections = server_utils.read_config_file(config_file)
        logging.debug(sections)

        # get if it is the 'common' or 'arm' or 'android'
        classify = config_files[i].split("/")[-1].strip().split("_")[0]
        logging.debug(classify)

        for i in range(0, len(sections)):
            # run for each benchmark
            target_arch = server_utils.get_host_arch(target)
            build_name = sections[i]+'_'+target_arch+'.suc'
            build_suc = os.path.join(Folder.build_dir, build_name)
            if not os.path.exists(build_suc):
		      continue
            build_host_name = sections[i] + '_' + \
                    server_utils.get_local_machine_arch() + '.fail'
            if os.path.exists(build_host_name):
                continue


            # try to resolve the configuration of the configuration file
            try:
                run_file = config.get(sections[i], 'run')
                parser = config.get(sections[i], 'parser')
            except Exception:
                raise AttributeError("The is no option value of parser")

            print_format()
            logging.info("Running %s" % sections[i])
            bench = os.path.join(classify, sections[i])
            try:
                system_initialise(target)
                result = run_all_cases(target_exec_dir, target, bench,
                                        sections[i], run_file)
            except Exception:
                logging.info("Running %s Exception" % sections[i])
                crash_handle.main()
                print_format()
                if sections[i]== "ltp":
                    try:
                        unmount = target.run("if  df -h |grep caliper_nfs  ; then umount /mnt/caliper_nfs/; fi")
                    except Exception:
                        unmount = target.run("if  df -h |grep caliper_nfs  ; then fuser -km /mnt/caliper_nfs ;fi")
                        unmount = target.run("if  df -h |grep caliper_nfs  ; then umount /mnt/caliper_nfs/ ;fi")
                run_flag = server_utils.get_fault_tolerance_config(
                                'fault_tolerance', 'run_error_continue')
                if run_flag == 1:
                    continue
                else:
                    return result
            else:
                logging.info("Running %s Finished" % sections[i])
                if sections[i] == "ltp":
                    try:
                         unmount = target.run("if  df -h |grep caliper_nfs  ; then umount /mnt/caliper_nfs/ ;fi")
                    except Exception:
                         unmount = target.run("if  df -h |grep caliper_nfs  ; then fuser -km /mnt/caliper_nfs/ ;fi")
                         unmount = target.run("if  df -h |grep caliper_nfs  ; then umount /mnt/caliper_nfs/ ;fi")
                print_format()

    return 0
Example #13
0
def run_all_cases(target_exec_dir, target, kind_bench, bench_name,
                    run_file):
    """
    function: run one benchmark which was selected in the configuration files
    """
    try:
        # get the abspath, which is filename of run config for the benchmark
        bench_conf_file = os.path.join(
                                    caliper_path.config_files.tests_cfg_dir,
                                    kind_bench, run_file)
        # get the config sections for the benchmrk
        configRun, sections_run = server_utils.read_config_file(
                                                    bench_conf_file)
    except AttributeError as e:
        raise AttributeError
    except Exception:
        raise
    logging.debug("the sections to run are: %s" % sections_run)
    if not os.path.exists(Folder.exec_dir):
        os.mkdir(Folder.exec_dir)
    log_bench = os.path.join(Folder.exec_dir, bench_name)
    logfile = log_bench + "_output.log"
    tmp_log_file = log_bench + "_output_tmp.log"
    if os.path.exists(logfile):
        os.remove(logfile)

    starttime = datetime.datetime.now()
    if os.path.exists(Folder.caliper_log_file):
        sections = bench_name + " EXECUTION"
        fp = open(Folder.caliper_log_file,"r")
        f = fp.readlines()
        fp.close()
        op = open(Folder.caliper_log_file,"w")
        for line in f:
            if not(sections in line):
                op.write(line)
        op.close()
    result = subprocess.call("echo '$$ %s EXECUTION START: %s' >> %s"
                            % (bench_name,
                                str(starttime)[:19],
                                Folder.caliper_log_file),
                            shell=True)
    bench_test = "ltp"
    if  bench_name == bench_test:
        tar_ip = settings.get_value('CLIENT', 'ip', type=str)
        target.run("if [[ ! -e /mnt/caliper_nfs ]]; then mkdir -p /mnt/caliper_nfs; fi")
# fix me , now that we create the folder, why not we mount it directly here
        try:
             tar_mask = ".".join(tar_ip.split(".")[0:3])
             p1 = subprocess.Popen(["ifconfig"], stdout=subprocess.PIPE)
             p2 = subprocess.Popen(["grep", tar_mask], stdin=p1.stdout, stdout=subprocess.PIPE)
             p1.stdout.close()
             output,err = p2.communicate()
             output = output.strip()
             host_ip = output.split("inet addr:")[1].split(" ")[0]
        except Exception:
            logging.debug("Unable to get the host_ip" )
        try:
            mount_cmd = target.run("mount -t nfs %s:/opt/caliper_nfs /mnt/caliper_nfs" % (host_ip) )
        except Exception:
            try:
                umount_cmd = target.run("umount /mnt/caliper_nfs/")
                mount_cmd = target.run("mount -t nfs %s:/opt/caliper_nfs /mnt/caliper_nfs" % (host_ip) )
            except Exception:
                logging.debug("Unable to mount")
                return result
        readme_file=log_bench+"_README"
        resultltp = subprocess.call("touch %s"
                             %(readme_file),shell=True)
        resultltp = subprocess.call("echo 'The categorization of ltp in caliper is\nCATEGORY\t\t\t\t\t\tSCENARIOS OF LTP\n\n[command]\t\t\t\t\t\tcommands\n[cpu]\t\t\t\t\t\tsched,cpuhotplug\n[memory]\t\t\t\t\t\tmm.numa,hugetlb\n[dio]\t\t\t\t\t\tdio,io,dma_thread_diotest,timers\n[filesystem]\t\t\t\t\t\tfilecaps,fs,fs_bind,fs_ext4,fs_perms_simple,fs_readonly\n[kernel/\t\t\t\t\t\tsyscalls,controllers,pty,containers,admin_tools,modules,can\n[proc]\t\t\t\t\t\tipc,hyperthreading,nptl,cap_bounds,connectors,pipes\n\n\nltp_output.log contains the screenshot of complete ltp execution and ltp_parser.log contains the information regarding the number of tests executed and among them which all have passed failed or skipped.\n\nFor more information regarding a particular category please see ltp_<category>_output.log which contains the output screen and parser log for that particular category' >> %s"
                  %(readme_file),shell=True)
    # for each command in run config file, read the config for the benchmark
    for i in range(0, len(sections_run)):
        flag = 0
        try:
            category = configRun.get(sections_run[i], 'category')
            command = configRun.get(sections_run[i], 'command')
        except Exception:
            logging.debug("no value for the %s" % sections_run[i])
            continue
	if bench_name == bench_test:
	    subsection = sections_run[i].split(" ")[1]
	    subsection_file = log_bench + "_" + subsection + "_output.log"
        if os.path.exists(tmp_log_file):
            os.remove(tmp_log_file)

        server_run_command = get_server_command(kind_bench, sections_run[i])
        logging.debug("Get the server command is: %s" % server_run_command)
        # run the command of the benchmarks
        try:
            flag = run_kinds_commands(sections_run[i], server_run_command,
                                      tmp_log_file, kind_bench,
                                      target, command)
        except Exception, e:
            logging.info(e)
            crash_handle.main()
            if bench_name == bench_test:
                 move_logs = subprocess.call("cp /opt/caliper_nfs/ltp_log/* %s "
                                % (Folder.exec_dir), shell=True)
            server_utils.file_copy(logfile, tmp_log_file, 'a+')
            if os.path.exists(tmp_log_file):
                os.remove(tmp_log_file)
            run_flag = server_utils.get_fault_tolerance_config(
                                'fault_tolerance', 'run_error_continue')
            if run_flag == 1:
              continue
            else:
              return result
        else:
            if bench_name == bench_test:
                move_logs = subprocess.call("cp /opt/caliper_nfs/ltp_log/* %s "
                                % (Folder.exec_dir), shell=True)
                if os.path.exists(subsection_file):
                    server_utils.file_copy(tmp_log_file,subsection_file, 'a+')
            server_utils.file_copy(logfile, tmp_log_file, 'a+')
            if flag != 1:
                logging.info("There is wrong when running the command \"%s\""
                                % command)
                if os.path.exists(tmp_log_file):	
                    os.remove(tmp_log_file)
                crash_handle.main()

                run_flag = server_utils.get_fault_tolerance_config(
                                'fault_tolerance', 'run_error_continue')
                if run_flag == 1:
                    if bench_name != bench_test:
                       continue
                else:
                    return result
            if os.path.exists(tmp_log_file):
                os.remove(tmp_log_file)