def print_step_action(log, arg, msg): if arg is True: txt = '\t\t[YES] ' else: txt = '\t\t[NO] ' log_msg(log, txt + msg)
def run_csmith(num, csmith_args, compiler_run_args, end_time, stat): common.log_msg(logging.DEBUG, "Job #" + str(num)) os.chdir(run_gen.process_dir + str(num)) work_dir = os.getcwd() inf = (end_time == -1) while inf or end_time > time.time(): if os.getcwd() != work_dir: raise common.clean_dir(".") ret_code, output, err_output, time_expired, elapsed_time = \ common.run_cmd([".." + os.sep + "csmith"] + csmith_args.split(), csmith_timeout, num) if ret_code != 0: continue test_file = open("func.c", "w") test_file.write(str(output, "utf-8")) test_file.close() for optset_name, compiler_run_arg in compiler_run_args.items(): ret_code, output, err_output, time_expired, elapsed_time = \ common.run_cmd(compiler_run_arg.split() + ["func.c"], run_gen.compiler_timeout, num) if ret_code != 0: continue opt_stats = None stmt_stats = None if "clang" in optset_name: opt_stats = run_gen.StatsParser.parse_clang_opt_stats_file("func.stats") stmt_stats = run_gen.StatsParser.parse_clang_stmt_stats_file(str(err_output, "utf-8")) stat.add_stats(opt_stats, optset_name, run_gen.StatsVault.opt_stats_id) stat.add_stats(stmt_stats, optset_name, run_gen.StatsVault.stmt_stats_id) stat.update_yarpgen_runs(run_gen.ok)
def iterate_fasta( filename='../database/V-QUEST-reference-allele-db+no-period-references.clustalw.fasta' ): def strip_fasta_ID( s): # strips out allele name from the IMGT fasta naming convention s = s.split('|') return (s[1], s[3], s[0]) # (name, functional_value, accession) if not os.path.exists(filename): log.info('Fetching IMGT V gene reference nucleotide sequences') if not fetch_reference( 'IMGTGENEDB-ReferenceSequences.fasta-nt-WithGaps-F+ORF+inframeP' ): log.error( 'Unable to find/fetch from IMGT V gene nucleotide references') os.sys.exit() consensus = '' current_gene = None return_db = OrderedDict() consensus = get_consensus().lower().replace('-', '.') for record in SeqIO.parse(filename, 'fasta'): allele, functional, accession = strip_fasta_ID(record.description) # if allele != 'IGHV1-18*01': # continue # log.debug('\nAllele {}'.format(allele)) seq = str(record.seq).lower().replace('-', '.') # log.debug('Allele: '+ allele) if not current_gene or allele.split( '*')[0] != current_gene: # first entry in iteration current_gene = allele.split('*')[0] # return_db[current_gene] = OrderedDict({'alleles': OrderedDict()}) # log.debug('allele seq:\n{}\nConsensus seq:\n{}'.format(seq, consensus)) length = len(seq.replace('.', '')) variants, seq, msg = call_variants.get_variants(seq, consensus) # log.debug('Has {} variants'.format(len(variants))) if msg: log.warn('\nAllele {}'.format(allele)) common.log_msg(msg) return_db[allele] = OrderedDict({ 'imgt_accession': accession, 'functional': functional, 'seq': seq, 'length': length }) if variants: # log.debug(variants) if __name__ != '__main__': variants = sets.Set([(x['pos'], x['op']) for x in variants]) return_db[allele]['variants'] = variants # remove_IMGT_periods(return_db[current_gene]['alleles'][allele]) return return_db
def prepare_env_and_blame(fail_dir, valid_res, fail_target, out_dir, lock, num, inplace=False): common.log_msg(logging.DEBUG, "Blaming target: " + fail_target.name + " | " + fail_target.specs.name) os.chdir(fail_dir) if fail_target.specs.name not in compilers_blame_opts: common.log_msg(logging.DEBUG, "We can't blame " + fail_target.name + " (process " + str(num) + ")") return False return blame(fail_dir, valid_res, fail_target, out_dir, lock, num, inplace)
def add_specs(spec_list): spec_list = check_config_list(spec_list, spec_list_len, "Error in spec string, check it: ") try: CompilerSpecs(spec_list[0], spec_list[1], spec_list[2], spec_list[3], spec_list[4]) common.log_msg(logging.DEBUG, "Finished adding compiler spec") except KeyError: common.print_and_exit("Can't find key!")
def blame(fail_dir, valid_res, fail_target, out_dir, lock, num, inplace): blame_str = "" stdout = stderr = b"" if not re.search("-O0", fail_target.args): blame_opts = compilers_blame_opts[fail_target.specs.name] phase_num = 0 try: for i in blame_opts: blame_str += i blame_str += execute_blame_phase(valid_res, fail_target, blame_str, num, phase_num) blame_str += " " phase_num += 1 except: common.log_msg(logging.ERROR, "Something went wrong while executing bpame_opt.py on " + str(fail_dir)) return False gen_test_makefile.gen_makefile(blame_test_makefile_name, True, None, fail_target, blame_str) ret_code, stdout, stderr, time_expired, elapsed_time = \ common.run_cmd(["make", "-f", blame_test_makefile_name, fail_target.name], run_gen.compiler_timeout, num) opt_name_pattern = re.compile(compilers_opt_name_cutter[fail_target.specs.name][0] + ".*" + compilers_opt_name_cutter[fail_target.specs.name][1]) opt_name = opt_name_pattern.findall(str(stderr, "utf-8"))[-1] opt_name = re.sub(compilers_opt_name_cutter[fail_target.specs.name][0], "", opt_name) opt_name = re.sub(compilers_opt_name_cutter[fail_target.specs.name][1], "", opt_name) real_opt_name = opt_name opt_name = opt_name.replace(" ", "_") else: real_opt_name = opt_name = "O0_bug" common.run_cmd(["make", "-f", blame_test_makefile_name, "clean"], run_gen.compiler_timeout, num) seed_dir = os.path.basename(os.path.normpath(fail_dir)) # Create log files in different places depending on "inplace" switch. if not inplace: full_out_path = os.path.join(os.path.join(out_dir, opt_name), seed_dir) common.copy_test_to_out(fail_dir, full_out_path, lock) else: full_out_path = "." # Write to log with open(os.path.join(full_out_path, "log.txt"), "a") as log_file: log_file.write("\nBlaming for " + fail_target.name + " optset was done.\n") log_file.write("Optimization to blame: " + real_opt_name + "\n") log_file.write("Blame opts: " + blame_str + "\n\n") log_file.write("Details of blaming run:\n") log_file.write("=== Compiler log ==================================================\n") log_file.write(str(stdout, "utf-8")) log_file.write("=== Compiler err ==================================================\n") log_file.write(str(stderr, "utf-8")) log_file.write("=== Compiler end ==================================================\n") common.log_msg(logging.DEBUG, "Done blaming") # Inplace mode require blaming string to be communicated back to the caller if not inplace: return True else: return real_opt_name
def add_sets(set_list): set_list = check_config_list(set_list, set_list_len, "Error in set string, check it: ") try: CompilerTarget(set_list[0], CompilerSpecs.all_comp_specs[set_list[1]], set_list[2], Arch(set_list[3], SdeArch[set_list[4]])) common.log_msg(logging.DEBUG, "Finished adding testing set") except KeyError: common.print_and_exit("Can't find key!")
def add_stats_options(stats_opt_list): stats_opt_list = check_config_list(stats_opt_list, stats_capt_opt_list_len, "Error in stats options string, check it: ") try: StatisticsOptions(CompilerSpecs.all_comp_specs[stats_opt_list[0]], stats_opt_list[1]) common.log_msg(logging.DEBUG, "Finished adding stats option string") except KeyError: common.print_and_exit("Can't find key!")
def dump_exec_output(msg, ret_code, output, err_output, time_expired, num): common.log_msg(logging.DEBUG, msg + " (process " + str(num) + ")") common.log_msg(logging.DEBUG, "Ret code: " + str(ret_code) + " | process " + str(num)) common.log_msg(logging.DEBUG, "Time exp: " + str(time_expired) + " | process " + str(num)) common.log_msg( logging.DEBUG, "Output: " + str(output, "utf-8") + " | process " + str(num)) common.log_msg( logging.DEBUG, "Err output: " + str(err_output, "utf-8") + " | process " + str(num))
def process_dir(directory, task_queue): common.log_msg(logging.DEBUG, "Searching for test directories in " + str(directory)) for root, dirs, files in os.walk(directory): for name in dirs: if name.startswith("S_"): common.log_msg(logging.DEBUG, "Adding " + str(os.path.join(root, name))) task_queue.put(os.path.join(root, name)) else: process_dir(os.path.join(root, name), task_queue) return task_queue
def run_fsl(indir, outdir, sub, acqparam_file, fsl="fsl5.0", sessions=[1, 2], log=None): # Field maps *************************************************************** # Step 1: Merge 2 direction (AP-PA) # Setp 2: Apply top-up FSL log_msg(log, "\nField maps processing") # Session 1 & 2 for s in sessions: log_msg(log, "\nSession {}".format(s)) fieldmap_dir1 = op.join( indir, "{}_acq-topup{:02d}_dir-01_epi.nii.gz".format(sub, s)) fieldmap_dir2 = op.join( indir, "{}_acq-topup{:02d}_dir-02_epi.nii.gz".format(sub, s)) fieldmap = op.join(outdir, "fieldmap{:02d}".format(s)) out = op.join(outdir, "topup_results{:02d}".format(s)) fout = op.join(outdir, "field{:02d}".format(s)) iout = op.join(outdir, "unwarped{:02d}".format(s)) cmd = '{}-fslmerge -t {} {} {}'.format(fsl, fieldmap, fieldmap_dir1, fieldmap_dir2) log_msg(log, cmd) system(cmd) cmd = "{}-topup --imain={} --datain={} --config=b02b0.cnf "\ "--out={} --fout={} --iout={}".format(fsl, fieldmap, acqparam_file, out, fout, iout) log_msg(log, cmd) system(cmd)
def hcp_minimal_preprocessing(log_f, sub_dir, subj, t1_file, t2_file): """Create surface using T1 and T2 images (HCP Pipeline) :param subj: Subject name :param hcp_outdir: Subject HCP output directory :param t1_file: Path to T1 image :param t2_file: Path to T2 image :return: Results are stored in hcp_outdir/sub/ by the HCP pipeline. """ scripts_dir = "{}/sourcedata/scripts/batch".format(sub_dir) hcp_stp = op.join(scripts_dir, "hcp_setup.sh") # Config log_msg(log, "** HCP setup **", print_time=True, blank_line=True) run_cmd("mkdir {}/hcp".format(op.join(sub_dir, sub))) # Pre-freesurfer log_msg(log, "** Pre-FreeSurfer **", print_time=True, blank_line=True) run_cmd( "{}/hcp_pre-fs.sh {} {} {} {}".format(scripts_dir, sub_dir, subj, t1_file, t2_file), log_f) # Freesurfer log_msg(log, "** FreeSurfer **", print_time=True, blank_line=True) run_cmd( "{} && {}/hcp_fs.sh {} {}".format(hcp_stp, scripts_dir, sub_dir, subj), log_f) # Post-freesurfer log_msg(log, "** Post-FreeSurfer **", print_time=True, blank_line=True) run_cmd( "{} && {}/hcp_post-fs.sh {} {}".format(hcp_stp, scripts_dir, sub_dir, subj), log_f)
def copy_files(log_f, source_dir, target_dir, subj): source_dir = op.join(source_dir, subj) target_dir = op.join(target_dir, subj) log_msg(log_f, "Create subject directories", print_time=True, blank_line=True) run_cmd("mkdir {}/anat/".format(target_dir), log_f) run_cmd("mkdir {}/fmap/".format(target_dir), log_f) run_cmd("mkdir {}/func/".format(target_dir), log_f) run_cmd("mkdir {}/output/".format(target_dir), log_f) run_cmd("mkdir {}/output/glm_vol/".format(target_dir), log_f) log_msg(log_f, "Copy functional files", print_time=True, blank_line=True) run_cmd("cp {}/func/* {}/func/".format(source_dir, target_dir), log_f) log_msg(log_f, 'Copy others files', print_time=True, blank_line=True) run_cmd("cp {}/anat/* {}/anat/".format(source_dir, target_dir), log_f) run_cmd( "cp {}/fmap/{}_acq-topup1_fieldmap.nii.gz " "{}/fmap/{}_acq-topup1_fieldmap.nii.gz".format(source_dir, subj, target_dir, subj), log_f) run_cmd( "cp {}/fmap/{}_acq-topup1_magnitude.nii.gz " "{}/fmap/{}_acq-topup1_magnitude.nii.gz".format( source_dir, subj, target_dir, subj), log_f) log_msg(log_f, 'Unzip .nii.gz files', print_time=True, blank_line=True) run_cmd("gunzip {}/fmap/*.nii.gz".format(target_dir), log_f) run_cmd("gunzip {}/anat/*.nii.gz".format(target_dir), log_f) run_cmd("gunzip {}/func/*.nii.gz".format(target_dir), log_f)
def create_acqparams_file(indir, outdir, sub, log=None): # Create acquisition parameter file **************************************** log_msg(log, "\nReading json file (from session 01, direction 01)") json_file = op.join(indir, "{}_acq-topup01_dir-01_epi.json".format(sub)) infos = js.load(open(json_file)) dir1 = infos['PhaseEncodingDirection'] readoutTime = infos['TotalReadoutTime'] # Check that all acquisition are similar tmp_file = op.join(indir, "{}_acq-topup02_dir-01_epi.json".format(sub)) tmp_info = js.load(open(tmp_file)) if tmp_info['TotalReadoutTime'] != readoutTime: log_msg("Total readout times are different.", warning=True) if tmp_info['PhaseEncodingDirection'] != dir1: log_msg("Phase encoding directions are different.", warning=True) log_msg( log, "Phase encoding direction: {}\nTotal readout time: {}".format( dir1, readoutTime)) acqparam_file = op.join(outdir, "acqparamsAP_PA.txt") log_msg(log, "Acquisition params file: {}".format(acqparam_file)) txtfile = open(acqparam_file, "w") if dir1 == "j-": # Dir 1 txtfile.write("0 -1 0 {}\n".format(readoutTime)) txtfile.write("0 -1 0 {}\n".format(readoutTime)) txtfile.write("0 -1 0 {}\n".format(readoutTime)) # Dir 2 txtfile.write("0 1 0 {}\n".format(readoutTime)) txtfile.write("0 1 0 {}\n".format(readoutTime)) txtfile.write("0 1 0 {}\n".format(readoutTime)) elif dir1 == "j": # Dir 1 txtfile.write("0 1 0 {}\n".format(readoutTime)) txtfile.write("0 1 0 {}\n".format(readoutTime)) txtfile.write("0 1 0 {}\n".format(readoutTime)) # Dir 2 txtfile.write("0 -1 0 {}\n".format(readoutTime)) txtfile.write("0 -1 0 {}\n".format(readoutTime)) txtfile.write("0 -1 0 {}\n".format(readoutTime)) else: wn.warn("Unrecognized phase encoding direction.") exit(-1) txtfile.close() return acqparam_file
def print_steps(log_f, arg_list): log_msg(log_f, "\nPipeline steps:") log_msg(log_f, "\tPreprocessing:") print_step_action(log_f, arg_list.doCopy, "Copy original files") print_step_action(log_f, arg_list.doHcp, "Run HCP preprocessing pipeline") # print_step_action(log_f, arg_list.doFmap, "Create fieldmap") print_step_action(log_f, arg_list.doSpm, "Run SPM fMRI preprocessing") log_msg(log_f, "\tGLM:") print_step_action(log_f, arg_list.doConvEvent, "Convert labview output " "files to paradigm files") print_step_action(log_f, arg_list.doVolGLM, "Run GLM on volume using SPM") print_step_action(log_f, arg_list.doSurfGLM, "Run GLM on surface using " "Nistats")
default=False, help="Remove original functional files") args = parser.parse_args() # --- INIT ----------------------------------------------------------------- # Path variables sub = args.sub subdir = args.subdir # Create subject dir check_directories(subdir, sub) # Create a log file log = open(op.join(subdir, sub, 'log.txt'), 'a') log_msg(log, "************************************************************" "********************", blank_line=True) log_msg(log, "New call to the python pipeline.\n", print_time=True) # --- CONFIG --------------------------------------------------------------- # Check what must be done if args.doAll: args.doPreProc = True args.doGlm = True if args.doPreProc: args.doCopy = True args.doHcp = True args.doSpm = True if args.doGlm: args.doConvEvent = True args.doVolGLM = True
def recheck(num, lock, task_queue, failed_queue, passed_queue, target, out_dir): common.log_msg(logging.DEBUG, "Started recheck. Process #" + str(num)) cwd_save = os.getcwd() abs_out_dir = os.path.join(cwd_save, out_dir) job_finished = False while not job_finished: try: test_dir = task_queue.get_nowait() task_queue.task_done() common.log_msg( logging.DEBUG, "#" + str(num) + " test directory: " + str(test_dir)) abs_test_dir = os.path.join(cwd_save, test_dir) common.check_and_copy( os.path.join(os.path.join(cwd_save, out_dir), gen_test_makefile.Test_Makefile_name), os.path.join(abs_test_dir, gen_test_makefile.Test_Makefile_name)) os.chdir(os.path.join(cwd_save, abs_test_dir)) valid_res = None out_res = set() prev_out_res_len = 1 # We can't check first result for i in gen_test_makefile.CompilerTarget.all_targets: if i.specs.name not in target.split(): continue common.log_msg(logging.DEBUG, "Re-checking target " + i.name) ret_code, output, err_output, time_expired, elapsed_time = \ common.run_cmd(["make", "-f", gen_test_makefile.Test_Makefile_name, i.name], run_gen.compiler_timeout, num) if time_expired or ret_code != 0: failed_queue.put(test_dir) common.log_msg(logging.DEBUG, "#" + str(num) + " Compilation failed") common.copy_test_to_out( abs_test_dir, os.path.join(abs_out_dir, test_dir), lock) break ret_code, output, err_output, time_expired, elapsed_time = \ common.run_cmd(["make", "-f", gen_test_makefile.Test_Makefile_name, "run_" + i.name], run_gen.run_timeout, num) if time_expired or ret_code != 0: failed_queue.put(test_dir) common.log_msg(logging.DEBUG, "#" + str(num) + " Execution failed") common.copy_test_to_out( abs_test_dir, os.path.join(abs_out_dir, test_dir), lock) break out_res.add(str(output, "utf-8").split()[-1]) if len(out_res) > prev_out_res_len: prev_out_res_len = len(out_res) failed_queue.put(test_dir) common.log_msg(logging.DEBUG, "#" + str(num) + " Out differs") if not blame_opt.prepare_env_and_blame( abs_test_dir, valid_res, i, abs_out_dir, lock, num): common.copy_test_to_out( abs_test_dir, os.path.join(abs_out_dir, test_dir), lock) break valid_res = str(output, "utf-8").split()[-1] passed_queue.put(test_dir) os.chdir(cwd_save) except queue.Empty: job_finished = True
def prepare_and_start(work_dir, config_file, timeout, num_jobs, csmith_bin_path, csmith_runtime, csmith_args, optsets): common.check_dir_and_create(work_dir) # Check for binary of generator csmith_home = os.environ.get("CSMITH_HOME") if csmith_home is None: common.print_and_exit("Please set CSMITH_HOME envirnoment variable") csmith_bin = os.path.abspath(csmith_home + os.sep + csmith_bin_path + os.sep + "csmith") common.check_and_copy(csmith_bin, work_dir) os.chdir(work_dir) ret_code, output, err_output, time_expired, elapsed_time = \ common.run_cmd(["." + os.sep + "csmith", "-v"], csmith_timeout, 0) csmith_version_str = str(output, "utf-8") # TODO: need to add some check, but I hope that it is safe common.log_msg(logging.DEBUG, "Csmith version: " + csmith_version_str) gen_test_makefile.set_standard("c99") gen_test_makefile.parse_config(config_file) compiler_run_args = {} optsets_list = optsets.split() target_was_found = False failed_targets = [] for i in optsets_list: for target in gen_test_makefile.CompilerTarget.all_targets: if target.name != i: continue target_was_found = True common.log_msg(logging.DEBUG, "Trying to form compiler args for " + str(i)) run_str = target.specs.comp_c_name + " " run_str += target.args + " " if target.arch.comp_name != "": run_str += " " + target.specs.arch_prefix + target.arch.comp_name + " " run_str += gen_test_makefile.StatisticsOptions.get_options( target.specs) + " " run_str += csmith_runtime + " " common.log_msg(logging.DEBUG, "Formed compiler args: " + run_str) compiler_run_args[i] = run_str if not target_was_found: failed_targets.append(i) target_was_found = False if len(failed_targets) > 0: common.log_msg(logging.WARNING, "Can't find relevant target: " + str(failed_targets)) for i in range(num_jobs): common.check_dir_and_create(run_gen.process_dir + str(i)) manager_obj = run_gen.manager() stat = manager_obj.Statistics() start_time = time.time() end_time = start_time + timeout * 60 if timeout == -1: end_time = -1 task_threads = [0] * num_jobs for num in range(num_jobs): task_threads[num] = multiprocessing.Process(target=run_csmith, args=(num, csmith_args, compiler_run_args, end_time, stat)) task_threads[num].start() print_statistics(stat, task_threads, num_jobs) sys.stdout.write("\n") for i in range(num_jobs): common.log_msg(logging.DEBUG, "Removing " + run_gen.process_dir + str(i) + " dir") shutil.rmtree(run_gen.process_dir + str(i)) stat_str, verbose_stat_str, prev_len = form_statistics(stat, 0) sys.stdout.write(verbose_stat_str) sys.stdout.flush()
"--verbose", dest="verbose", default=False, action="store_true", help="Increase output verbosity") args = parser.parse_args() log_level = logging.DEBUG if args.verbose else logging.INFO common.setup_logger(None, log_level) stat_log_file = common.wrap_log_file(args.stat_log_file, parser.get_default("stat_log_file")) common.setup_stat_logger(stat_log_file) script_start_time = datetime.datetime.now() common.log_msg(logging.DEBUG, "Command line: " + " ".join(str(p) for p in sys.argv)) common.log_msg( logging.DEBUG, "Start time: " + script_start_time.strftime('%Y/%m/%d %H:%M:%S')) common.check_python_version() prepare_and_start(work_dir=args.work_dir, timeout=args.timeout, num_jobs=args.num_jobs, config_file=args.config_file, csmith_bin_path=args.csmith_bin, csmith_runtime=args.csmith_runtime, csmith_args=args.csmith_args, optsets=args.optsets)
def gen_makefile(out_file_name, force, config_file, only_target=None, inject_blame_opt=None, creduce_file=None, stat_targets=None): # Somebody can prepare test specs and target, so we don't need to parse config file check_if_std_defined() if config_file is not None: parse_config(config_file) output = "" if stat_targets is not None: stat_targets = list(set(stat_targets)) # 1. License license_file = common.check_and_open_file( os.path.abspath(common.yarpgen_scripts + os.sep + ".." + os.sep + license_file_name), "r") for license_str in license_file: output += "#" + license_str license_file.close() output += "###############################################################################\n" output += "#This file was generated automatically.\n" output += "#If you want to make a permanent changes, you should edit gen_test_makefile.py\n" output += "###############################################################################\n\n" # 2. Define common variables for makefile_variable in Makefile_variable_list: test_pwd = "" if creduce_file and makefile_variable.name == "CXXFLAGS": test_pwd = " -I$(TEST_PWD)" output += makefile_variable.name + "=" + makefile_variable.value + test_pwd + "\n" output += "\n" # 3. Define build targets for target in CompilerTarget.all_targets: if only_target is not None and only_target.name != target.name: continue compiler_name = None if selected_standard.is_c(): compiler_name = target.specs.comp_c_name if selected_standard.is_cxx(): compiler_name = target.specs.comp_cxx_name output += target.name + ": " + "COMPILER=" + compiler_name + "\n" optflags_str = target.name + ": " + "OPTFLAGS=" + target.args if target.arch.comp_name != "": optflags_str += " " + target.specs.arch_prefix + target.arch.comp_name optflags_str += "\n" output += optflags_str # For performance reasons driver should always be compiled with -O0 output += re.sub("-O\d", "-O0", (optflags_str.replace("OPTFLAGS", "DRIVER_OPTFLAGS"))) if inject_blame_opt is not None: output += target.name + ": " + "BLAMEOPTS=" + inject_blame_opt + "\n" #TODO: one day we can decide to use gcc also. if stat_targets is not None: for stat_target in stat_targets: if target.name == stat_target: output += target.name + ": " + stat_options.name + "=" + \ StatisticsOptions.get_options(target.specs) + "\n" stat_targets.remove(stat_target) output += target.name + ": " + "EXECUTABLE=" + target.name + "_" + executable.value + "\n" output += target.name + ": " + "$(addprefix " + target.name + "_," if common.selected_gen_std != common.GenStdID.ISPC: output += "$(SOURCES:" + get_file_ext() + "=.o))\n" else: output += "$(patsubst %.ispc,%.o," + "$(SOURCES:" + get_file_ext( ) + "=.o))" + ")\n" output += "\t" + "$(COMPILER) $(LDFLAGS) $(STDFLAGS) $(OPTFLAGS) -o $(EXECUTABLE) $^\n\n" if stat_targets is not None and len(stat_targets) != 0: common.log_msg(logging.WARNING, "Can't find relevant stat_targets: " + str(stat_targets), forced_duplication=True) # 4. Force make to rebuild everything # TODO: replace with PHONY output += "FORCE:\n\n" for source in sources.value.split(): source_prefix = "" force_str = " FORCE\n" if creduce_file and creduce_file != source: source_prefix = "$(TEST_PWD)/" force_str = "\n" source_name = source.split(".")[0] output += "%" + source_name + ".o: " + source_prefix + source + force_str # For performance reasons driver should always be compiled with -O0 optflags_name = "$(OPTFLAGS)" if source_name != "driver" else "$(DRIVER_OPTFLAGS)" output += "\t" + "$(COMPILER) $(CXXFLAGS) $(STDFLAGS) " + optflags_name + " -o $@ -c $<" if source_name == "func": output += " $(STATFLAGS) " if inject_blame_opt is not None: output += " $(BLAMEOPTS) " output += "\n\n" output += "clean:\n" output += "\trm *.o *_$(EXECUTABLE)\n\n" # 5. Define run targets native_arch = detect_native_arch() for target in CompilerTarget.all_targets: if only_target is not None and only_target.name != target.name: continue output += "run_" + target.name + ": " + target.name + "_" + executable.value + "\n" output += "\t@" required_sde_arch = define_sde_arch(native_arch, target.arch.sde_arch) if required_sde_arch != "": output += "sde -" + required_sde_arch + " -- " output += "." + os.sep + target.name + "_" + executable.value + "\n\n" out_file = None if not os.path.isfile(out_file_name): out_file = open(out_file_name, "w") else: if force: out_file = open(out_file_name, "w") else: common.print_and_exit( "File already exists. Use -f if you want to rewrite it.") out_file.write(output) out_file.close()
def blame(fail_dir, valid_res, fail_target, out_dir, lock, num, inplace): blame_str = "" stdout = stderr = b"" if not re.search("-O0", fail_target.args): blame_opts = compilers_blame_opts[fail_target.specs.name] phase_num = 0 blame_phase_num = 0 # Do blaming try: for i in blame_opts: blame_str += i blame_phase_num = execute_blame_phase(valid_res, fail_target, blame_str, num, phase_num) if fail_target.specs.name == "dpcpp": # Special case becasue triagging mechanism is different and there's only one level of triagging. blame_str += str(blame_phase_num - 1) else: blame_str += str(blame_phase_num) blame_str += " " phase_num += 1 except: common.log_msg( logging.ERROR, "Something went wrong while executing blame_opt.py on " + str(fail_dir)) return False # Wrap up results gen_test_makefile.gen_makefile( out_file_name=blame_test_makefile_name, force=True, config_file=None, only_target=fail_target, inject_blame_opt=blame_str if fail_target.specs.name != "dpcpp" else None, inject_blame_env=blame_str if fail_target.specs.name == "dpcpp" else None) ret_code, stdout, stderr, time_expired, elapsed_time = \ common.run_cmd(["make", "-f", blame_test_makefile_name, fail_target.name], run_gen.compiler_timeout, num) if fail_target.specs.name == "dpcpp": ret_code, stdout, stderr, time_expired, elapsed_time = \ common.run_cmd(["make", "-f", blame_test_makefile_name, "run_" + fail_target.name], run_gen.compiler_timeout, num) if fail_target.specs.name != "dpcpp": opt_name_pattern = re.compile( compilers_opt_name_cutter[fail_target.specs.name][0] + ".*" + compilers_opt_name_cutter[fail_target.specs.name][1]) opt_name = opt_name_pattern.findall(str(stderr, "utf-8"))[-1] opt_name = re.sub( compilers_opt_name_cutter[fail_target.specs.name][0], "", opt_name) opt_name = re.sub( compilers_opt_name_cutter[fail_target.specs.name][1], "", opt_name) real_opt_name = opt_name opt_name = opt_name.replace(" ", "_") else: if blame_phase_num == 1: # It's special case for DPC++. 1 means that triagging failed, no specific phase can be blamed. real_opt_name = opt_name = "FailedToBlame" else: opt_name_pattern = re.compile( compilers_opt_name_cutter[fail_target.specs.name][0] + ".*" + compilers_opt_name_cutter[fail_target.specs.name][1]) opt_name = opt_name_pattern.findall(str(stderr, "utf-8"))[0] opt_name = re.sub( compilers_opt_name_cutter[fail_target.specs.name][0], "", opt_name) opt_name = re.sub( compilers_opt_name_cutter[fail_target.specs.name][1], "", opt_name) real_opt_name = opt_name opt_name = opt_name.replace(" ", "_") else: real_opt_name = opt_name = "O0_bug" common.run_cmd(["make", "-f", blame_test_makefile_name, "clean"], run_gen.compiler_timeout, num) seed_dir = os.path.basename(os.path.normpath(fail_dir)) # Create log files in different places depending on "inplace" switch. if not inplace: full_out_path = os.path.join(os.path.join(out_dir, opt_name), seed_dir) common.copy_test_to_out(fail_dir, full_out_path, lock) else: full_out_path = "." # Write to log with open(os.path.join(full_out_path, "log.txt"), "a") as log_file: log_file.write("\nBlaming for " + fail_target.name + " optset was done.\n") log_file.write("Optimization to blame: " + real_opt_name + "\n") log_file.write("Blame opts: " + blame_str + "\n\n") log_file.write("Details of blaming run:\n") log_file.write( "=== Compiler log ==================================================\n" ) log_file.write(str(stdout, "utf-8")) log_file.write( "=== Compiler err ==================================================\n" ) log_file.write(str(stderr, "utf-8")) log_file.write( "=== Compiler end ==================================================\n" ) common.log_msg(logging.DEBUG, "Done blaming") # Inplace mode require blaming string to be communicated back to the caller if not inplace: return True else: return real_opt_name
def execute_blame_phase(valid_res, fail_target, inject_str, num, phase_num): gen_test_makefile.gen_makefile( out_file_name=blame_test_makefile_name, force=True, config_file=None, only_target=fail_target, inject_blame_opt=inject_str + "-1" if fail_target.specs.name != "dpcpp" else None, inject_blame_env=inject_str + "1" if fail_target.specs.name == "dpcpp" else None) ret_code, output, err_output, time_expired, elapsed_time = \ common.run_cmd(["make", "-f", blame_test_makefile_name, fail_target.name], run_gen.compiler_timeout, num) if fail_target.specs.name == "dpcpp": ret_code, output, err_output, time_expired, elapsed_time = \ common.run_cmd(["make", "-f", blame_test_makefile_name, "run_" + fail_target.name], run_gen.compiler_timeout, num) opt_num_regex = re.compile( compilers_blame_patterns[fail_target.specs.name][phase_num]) try: if fail_target.specs.name == "dpcpp": max_opt_num = 250 else: matches = opt_num_regex.findall(str(err_output, "utf-8")) # Some icc phases may not support going to phase "2", i.e. drilling down to num_case level, # in this case we are done. if phase_num == 2 and not matches: return str(-1) max_opt_num_str = matches[-1] remove_brackets_pattern = re.compile("\d+") max_opt_num = int( remove_brackets_pattern.findall(max_opt_num_str)[-1]) common.log_msg( logging.DEBUG, "Max opt num (process " + str(num) + "): " + str(max_opt_num)) except IndexError: common.log_msg( logging.ERROR, "Can't decode max opt number using \"" + compilers_blame_patterns[fail_target.specs.name][phase_num] + "\" regexp (phase " + str(phase_num) + ") in the following output:\n" + str(err_output, "utf-8") + " (process " + str(num) + "): ") raise start_opt = 0 end_opt = max_opt_num cur_opt = max_opt_num failed_flag = True time_to_finish = False while not time_to_finish: start_opt, end_opt, cur_opt = get_next_step(start_opt, end_opt, cur_opt, failed_flag) common.log_msg( logging.DEBUG, "Previous failed (process " + str(num) + "): " + str(failed_flag)) failed_flag = False eff = ((start_opt + 1) >= cur_opt) # Earliest fail was found common.log_msg( logging.DEBUG, "Trying opt (process " + str(num) + "): " + str(start_opt) + "/" + str(cur_opt) + "/" + str(end_opt)) gen_test_makefile.gen_makefile( out_file_name=blame_test_makefile_name, force=True, config_file=None, only_target=fail_target, inject_blame_opt=inject_str + str(cur_opt) if fail_target.specs.name != "dpcpp" else None, inject_blame_env=inject_str + str(cur_opt) if fail_target.specs.name == "dpcpp" else None) ret_code, output, err_output, time_expired, elapsed_time = \ common.run_cmd(["make", "-f", blame_test_makefile_name, fail_target.name], run_gen.compiler_timeout, num) if time_expired or ret_code != 0: dump_exec_output("Compilation failed", ret_code, output, err_output, time_expired, num) failed_flag = True if not eff: continue else: break ret_code, output, err_output, time_expired, elapsed_time = \ common.run_cmd(["make", "-f", blame_test_makefile_name, "run_" + fail_target.name], run_gen.run_timeout, num) if time_expired or ret_code != 0: dump_exec_output("Execution failed", ret_code, output, err_output, time_expired, num) failed_flag = True if not eff: continue else: break if str(output, "utf-8").split()[-1] != valid_res: common.log_msg( logging.DEBUG, "Output differs (process " + str(num) + "): " + str(output, "utf-8").split()[-1] + " vs " + valid_res + " (expected)") failed_flag = True if not eff: continue else: break time_to_finish = (eff and failed_flag) or (eff and not failed_flag and (cur_opt == (end_opt - 1))) common.log_msg( logging.DEBUG, "Time to finish (process " + str(num) + "): " + str(time_to_finish)) if not failed_flag: common.log_msg( logging.DEBUG, "Swapping current and end opt (process " + str(num) + ")") cur_opt = end_opt common.log_msg( logging.DEBUG, "Finished blame phase, result: " + str(inject_str) + str(cur_opt) + " (process " + str(num) + ")") return cur_opt
def run_cmd(command, log_f=None): if log_f is not None: log_msg(log_f, command) else: print(command) system(command)
def check_config_list(config_list, fixed_len, message): common.log_msg(logging.DEBUG, "Adding config list: " + str(config_list)) if len(config_list) < fixed_len: common.print_and_exit(message + str(config_list)) config_list = [x.strip() for x in config_list] return config_list