def run_inference(project_name): common.setup_checker_framework_env() classpath = os.path.join(os.environ['JSR308'], 'generic-type-inference-solver', 'bin') classpath += ':' + os.path.join(os.environ['JSR308'], 'ontology', 'bin') if os.environ.get('CLASSPATH'): os.environ['CLASSPATH'] += ':' + classpath else: os.environ['CLASSPATH'] = classpath project_dir = common.get_project_dir(project_name) annotation_dir = os.path.join(project_dir, common.DLJC_OUTPUT_DIR, 'annotations') if os.path.isdir(annotation_dir): shutil.rmtree(annotation_dir) common.run_dljc(project_name, ['inference'], [ '--solverArgs=solver=Z3', '--checker', 'ontology.OntologyChecker', '--solver', 'ontology.solvers.backend.OntologySolverEngine', '-m', 'ROUNDTRIP', '--cache', '-afud', annotation_dir ]) print("Building annotated JAR for {}".format(project_name)) build_jar(project_name)
def add_project_to_corpus(project): """ Assumes that the project_dir contains a text file named build_command.txt that contains the build command(s) for the project in this directory, and a clean_command.txt that will clean the project. """ common.clean_project(project) """Run dljc Run Randoop to generate test sources Compile test sources Run daikon.Chicory on tests to create dtrace file Precompute graph kernels that are independent of ontology stuff """ common.run_dljc(project, ['dyntrace', 'graphtool'], ['--graph-jar', common.get_jar('prog2dfg.jar'), '--dyntrace-libs', common.LIBS_DIR]) """ run petablox """ #run_petablox(project_dir) """ run graph kernel computation """ project_dir = common.get_project_dir(project) kernel_file_path = common.get_kernel_path(project) graph_kernel_cmd = ['python', common.get_simprog('precompute_kernel.py'), project_dir, kernel_file_path ] common.run_cmd(graph_kernel_cmd) print 'Generated kernel file for {0}.'.format(project) return kernel_file_path
def run_inference(project): common.setup_checker_framework_env() classpath = os.path.join(os.environ['JSR308'], 'generic-type-inference-solver', 'bin') if os.environ.get('CLASSPATH'): os.environ['CLASSPATH'] += ':' + classpath else: os.environ['CLASSPATH'] = classpath project_dir = common.get_project_dir(project) annotation_dir = os.path.join(project_dir, common.DLJC_OUTPUT_DIR, 'annotations') if os.path.isdir(annotation_dir): shutil.rmtree(annotation_dir) with common.cd(project_dir): common.clean_project(project) common.run_dljc(project, ['inference'], [ '--solverArgs=backEndType=maxsatbackend.MaxSat', '--checker', 'ontology.OntologyChecker', '--solver', 'constraintsolver.ConstraintSolver', '-m', 'ROUNDTRIP', '-afud', annotation_dir ])
def main(): if not os.path.exists(common.CORPUS_DIR): print("Please run python fetch_corpus.py first to fetch the corpus.") sys.exit(1) return if not os.path.exists(common.LIBS_DIR): print("Please run fetch_dependencies.sh first to fetch the necessary tools.") sys.exit(1) return project = "" if len(sys.argv) == 2: project = sys.argv[1] else: print('must supply single test name') exit() print("Running analysis on corpus.") print(time.strftime('%X %x')) tools = ['randoop'] print("Cleaning {}".format(project)) common.clean_project(project) common.run_dljc(project, tools) print(time.strftime('%X %x'))
def add_project_to_corpus(project): """ Assumes that the project_dir contains a text file named build_command.txt that contains the build command(s) for the project in this directory, and a clean_command.txt that will clean the project. """ common.clean_project(project) """Run dljc Run Randoop to generate test sources Compile test sources Run daikon.Chicory on tests to create dtrace file Precompute graph kernels that are independent of ontology stuff """ common.run_dljc(project, ['dyntrace', 'graphtool'], [ '--graph-jar', common.get_jar('prog2dfg.jar'), '--dyntrace-libs', common.LIBS_DIR ]) """ run petablox """ #run_petablox(project_dir) """ run graph kernel computation """ project_dir = common.get_project_dir(project) kernel_file_path = common.get_kernel_path(project) graph_kernel_cmd = [ 'python', common.get_simprog('precompute_kernel.py'), project_dir, kernel_file_path ] common.run_cmd(graph_kernel_cmd) print 'Generated kernel file for {0}.'.format(project) return kernel_file_path
def generate_graphs(project): """Run dljc Generate program graphs using prog2dfg Precompute graph kernels that are independent of ontology stuff """ common.run_dljc(project, ['graphtool'], ['--graph-jar', common.get_jar('prog2dfg.jar'), '--cache'])
def generate_graphs(project): """Run dljc Compile test sources Generate program graphs using prog2dfg Precompute graph kernels that are independent of ontology stuff """ print("Generating graphs for {0}...".format(project)) common.run_dljc(project, ['graphtool'], ['--graph-jar', common.get_jar('prog2dfg.jar'), '--cache'])
def main(): if not os.path.exists(common.CORPUS_DIR) or not os.path.exists( common.LIBS_DIR): print "Please run python fetch.py first to fetch the corpus and/or necessary tools." sys.exit(1) return project = "" if len(sys.argv) == 2: project = sys.argv[1] else: print 'must supply single test name' exit() print "Running analysis on corpus." print time.strftime('%X %x') tools = ['dyntrace'] print "Cleaning {}".format(project) common.clean_project(project) print "Analyzing {}".format(project) common.run_dljc(project, tools) print time.strftime('%X %x')
def run(project_list, args, kernel_dir): if os.path.isfile(common.CLUSTER_FILE) and not args.recompute_clusters: print ("Using clusters from: {0}".format(common.CLUSTER_FILE)) else: #compute clusters. # first compile everything using dljc to get the class dirs. for project in project_list: #TODO: If you don't clean stuff before, nothing #happens here. common.clean_project(project) print ("Running Bixie") common.run_dljc(project, ['bixie'], ['-o',common.DLJC_OUTPUT_DIR]) #common.run_dljc(project, [], []) # now run clusterer.jar to get the json file containing the clusters. compute_clusters_for_classes(project_list, common.CLUSTER_FILE, common.CLASS2FIELDS_FILE) for project in project_list: if args.graph: #TODO: If you don't clean stuff before, nothing #happens here. #common.clean_project(project) print ("Generate Graphs") generate_graphs(project) pass generate_project_kernel(project, common.CLUSTER_FILE) # gather kernels for one-against-all comparisons for project in project_list: pl = list(project_list) # create a copy pl.remove(project) gather_kernels(pl, os.path.join(common.WORKING_DIR, kernel_dir, project+"_kernel.txt")) for project in project_list: print ("Generate dtrace for {0}".format(project)) #common.clean_project(project) generate_dtrace(project)
def run_inference(project): common.setup_checker_framework_env() classpath = os.path.join(os.environ['JSR308'], 'generic-type-inference-solver', 'bin') if os.environ.get('CLASSPATH'): os.environ['CLASSPATH'] += ':' + classpath else: os.environ['CLASSPATH'] = classpath project_dir = common.get_project_dir(project) annotation_dir = os.path.join(project_dir, common.DLJC_OUTPUT_DIR, 'annotations') if os.path.isdir(annotation_dir): shutil.rmtree(annotation_dir) with common.cd(project_dir): common.clean_project(project) common.run_dljc(project, ['inference'], ['--solverArgs=backEndType=maxsatbackend.MaxSat', '--checker', 'ontology.OntologyChecker', '--solver', 'constraintsolver.ConstraintSolver', '-m', 'ROUNDTRIP', '-afud', annotation_dir])
def run(project_list, args, kernel_dir): cluster_file = os.path.join(args.dir, common.CLUSTER_FILE) c2f_file = os.path.join(args.dir, common.CLASS2FIELDS_FILE) wfc_file = os.path.join(args.dir, common.WORDCLUSTERS_FILE) if os.path.isfile(cluster_file) and not args.recompute_clusters: print("Using clusters from: {0}".format(cluster_file)) else: # first compile everything using dljc to get the class dirs. print("Building projects and populating dljc cache") for project in project_list: common.clean_project(project) common.run_dljc(project) for project in project_list: common.run_dljc(project, ['bixie'], ['--cache']) # now run clusterer.jar to get the json file containing the clusters. compute_clusters_for_classes(project_list, cluster_file, c2f_file, wfc_file) for project in project_list: if args.graph: generate_graphs(project) generate_project_kernel(project, cluster_file) # gather kernels for one-against-all comparisons for project in project_list: pl = list(project_list) # create a copy pl.remove(project) gather_kernels(pl, os.path.join(kernel_dir, project + "_kernel.txt")) for project in project_list: generate_dtrace(project)
def generate_dtrace(project): #TODO: set the out file to common.get_dtrace_file_for_project(project) common.run_dljc(project, ['dyntrace'], ['--cache'])
def generate_dtrace(project): common.run_dljc(project, ['dyntrace'], ['--cache', '--daikon-xml'])