def regressContech(inputs, numthreads, benchmarks): if os.environ.has_key("NAS_HOME"): NAS_HOME = os.environ["NAS_HOME"] else: print ">Error: Could not find NAS installation. Set NAS_HOME to the root of your NAS directory." exit(1) # Rebuild benchmarks # NAS must be built sequentially compileJobIds = [] for input in inputs: for b in benchmarks: x = compilationTimeCompare(b, input) compileJobIds.append(x) util.waitForJobs(x) time.sleep(10) #Wait for files to be copied back buildRoot = scrape_build.processAll( [util.getFileNameForJob(j) for j in compileJobIds]) # Run the benchmarks os.environ["TIME"] = '{"real":%e, "user":%U, "sys":%S, "mem":%M }' for input in inputs: runJobIds = [] runJobIds.extend( [statsRun(b, numthreads, input, "contech") for b in benchmarks]) runJobIds.extend([nativeRun(b, numthreads, input) for b in benchmarks]) util.waitForJobs(runJobIds) root = buildRoot + scrape_run.processAll( [util.getFileNameForJob(j) for j in runJobIds]) # Aggregate output table = aggregate_scrape_run.aggregate(root) aggregate_scrape_run.computeSlowdown(table) aggregate_scrape_run.generateCsv(table, "results-{}.csv".format(input))
def regressContech(inputs, numthreads, benchmarks): if os.environ.has_key("NAS_HOME"): NAS_HOME = os.environ["NAS_HOME"] else: print ">Error: Could not find NAS installation. Set NAS_HOME to the root of your NAS directory." exit(1) # Rebuild benchmarks # NAS must be built sequentially compileJobIds = [] for input in inputs: for b in benchmarks: x = compilationTimeCompare(b, input) compileJobIds.append(x) util.waitForJobs(x) time.sleep(1) #Wait for files to be copied back buildRoot = scrape_build.processAll([util.getFileNameForJob(j) for j in compileJobIds]) # Run the benchmarks os.environ["TIME"] = '{"real":%e, "user":%U, "sys":%S, "mem":%M }' for input in inputs: runJobIds = [] runJobIds.extend([statsRun(b, numthreads, input, "contech") for b in benchmarks]) runJobIds.extend([nativeRun(b, numthreads, input) for b in benchmarks]) util.waitForJobs(runJobIds) root = buildRoot + scrape_run.processAll([util.getFileNameForJob(j) for j in runJobIds]) # Aggregate output table = aggregate_scrape_run.aggregate(root) aggregate_scrape_run.computeSlowdown(table) aggregate_scrape_run.generateCsv(table, "results-{}.csv".format(input))
def main(args): input = "qscrape" root = scrape_run.processAll(args[1:]) # Aggregate output table = aggregate_scrape_run.aggregate(root) aggregate_scrape_run.computeSlowdown(table) aggregate_scrape_run.generateRunCsv(table, "results-{}.csv".format(input))
def regressContech(inputs, numthreads, benchmarks, ro): # Rebuild benchmarks if ro == False: compileJobIds = [compilationTimeCompare(b) for b in benchmarks] buildRoot = scrape_build.processAll(compileJobIds) # Run the benchmarks os.environ["TIME"] = '{"real":%e, "user":%U, "sys":%S, "mem":%M }' for input in inputs: runJobIds = [] runJobIds.extend( [statsRun(b, numthreads, input, "contech") for b in benchmarks]) runJobIds.extend([nativeRun(b, numthreads, input) for b in benchmarks]) root = buildRoot + scrape_run.processAll(runJobIds) # Aggregate output table = aggregate_scrape_run.aggregate(root) aggregate_scrape_run.computeSlowdown(table) aggregate_scrape_run.generateCsv(table, "results-{}.csv".format(input))
def regressContech(inputs, numthreads, benchmarks): # Rebuild benchmarks compileJobIds = [compilationTimeCompare(b) for b in benchmarks] util.waitForJobs(compileJobIds) buildRoot = scrape_build.processAll([util.getFileNameForJob(j) for j in compileJobIds]) # Run the benchmarks os.environ["TIME"] = '{"real":%e, "user":%U, "sys":%S, "mem":%M }' for input in inputs: runJobIds = [] runJobIds.extend([statsRun(b, numthreads, input, "contech") for b in benchmarks]) runJobIds.extend([nativeRun(b, numthreads, input) for b in benchmarks]) util.waitForJobs(runJobIds) root = buildRoot + scrape_run.processAll([util.getFileNameForJob(j) for j in runJobIds]) # Aggregate output table = aggregate_scrape_run.aggregate(root) aggregate_scrape_run.computeSlowdown(table) aggregate_scrape_run.generateCsv(table, "results-{}.csv".format(input))