elResSweep = [32, 64, 96, 128, 196, 256]#, 64, 128]#256 defParams = SimParams(nsteps=5) mRun = ModelRun("RayTayOverturn", "RayleighTaylorBenchmark.xml", simParams=defParams) mSuites = [] for elRes in elResSweep: mRun.paramOverrides["elementResI"] = elRes mRun.paramOverrides["elementResJ"] = elRes outPathBase = os.path.join('output','perfScalingTest', 'res%04d' % elRes) if not os.path.exists(outPathBase): os.makedirs(outPathBase) mSuite = ModelSuite(outPathBase, templateMRun=mRun) mSuite.elRes = elRes mSuite.procRange = [1, 2, 4, 8]#, 4]#, 8]#, 2, 4] mSuite.addVariant("nproc", JobParamVariant("nproc", mSuite.procRange)) mSuite.subOutputPathGenFunc = msuite.getSubdir_RunIndexAndText mSuite.generateRuns(iterGen=itertools.izip) mSuites.append(mSuite) #----------------------------- def reporting(mSuite, mResults): print "With element res %d" % mSuite.elRes speedups = modelplots.getSpeedups(mSuite.runs, mResults) print "Speedups were: %s" % speedups csvName = os.path.join(mSuite.outputPathBase, "runPerfInfo.csv") csvFile = open(csvName, "wb") wtr = csv.writer(csvFile) wtr.writerow(["Run", "nproc", "walltime (sec)", "max mem (MB)", "speedup"])