def main(): from scripts.utils import create_project_tarball, get_stencil_num, parse_results from scripts.conf.conf import machine_conf, machine_info import os, sys import time, datetime # user params dry_run = 1 if len(sys.argv) < 2 else int(sys.argv[1]) # dry run time_stamp = datetime.datetime.fromtimestamp( time.time()).strftime('%Y%m%d_%H_%M') exp_name = "thread_scaling_at_%s_%s" % (machine_info['hostname'], time_stamp) tarball_dir = 'results/' + exp_name if (dry_run == 0): create_project_tarball(tarball_dir, "project_" + exp_name) target_dir = 'results/' + exp_name # parse the results to obtain the selected parameters by the auto tuner params = parse_results() count = 0 for group in ['MEM']: if ((machine_info['hostname'] == 'IVB_10core') and (group == 'TLB_DATA')): group = 'TLB' # for k,v in params.iteritems(): # if k[2]==896: print k,v count = count + thread_scaling_test( target_dir, exp_name, params=params, group=group, dry_run=dry_run) print "experiments count =" + str(count)
def main(): from scripts.utils import create_project_tarball, get_stencil_num, parse_results from scripts.conf.conf import machine_conf, machine_info import os, sys import time,datetime # user params dry_run = 1 if len(sys.argv)<2 else int(sys.argv[1]) # dry run time_stamp = datetime.datetime.fromtimestamp(time.time()).strftime('%Y%m%d_%H_%M') exp_name = "thread_scaling_at_%s_%s" % (machine_info['hostname'], time_stamp) tarball_dir='results/'+exp_name if(dry_run==0): create_project_tarball(tarball_dir, "project_"+exp_name) target_dir='results/' + exp_name # parse the results to obtain the selected parameters by the auto tuner params = parse_results() count=0 for group in ['MEM']: if( (machine_info['hostname']=='IVB_10core') and (group=='TLB_DATA') ): group='TLB' # for k,v in params.iteritems(): # if k[2]==896: print k,v count= count + thread_scaling_test(target_dir, exp_name, params=params, group=group, dry_run=dry_run) print "experiments count =" + str(count)
def main(): from scripts.utils import create_project_tarball, get_stencil_num, parse_results from scripts.conf.conf import machine_conf, machine_info import os, sys import time, datetime # user params dry_run = 1 if len(sys.argv) < 2 else int(sys.argv[1]) # dry run time_stamp = datetime.datetime.fromtimestamp( time.time()).strftime('%Y%m%d_%H_%M') exp_name = "cache_size_vs_code_balance_at_%s_%s" % ( machine_info['hostname'], time_stamp) tarball_dir = 'results/' + exp_name if (dry_run == 0): create_project_tarball(tarball_dir, "project_" + exp_name) target_dir = 'results/' + exp_name th = 1 pin_str = "S0:0-%d " % (th - 1) count = 0 group = 'MEM' if ((machine_info['hostname'] == 'IVB_10core') and (group == 'TLB_DATA')): group = 'TLB' machine_conf[ 'pinning_args'] = "-m -g " + group + " -C " + pin_str + ' -s 0x03 --' count = count + igs_test( target_dir, exp_name, th=th, group=group, dry_run=dry_run) print "experiments count =" + str(count)
def main(): from scripts.utils import create_project_tarball, get_stencil_num, parse_results from scripts.conf.conf import machine_conf, machine_info import os, sys import time, datetime # user params dry_run = 1 if len(sys.argv) < 2 else int(sys.argv[1]) # dry run time_stamp = datetime.datetime.fromtimestamp(time.time()).strftime("%Y%m%d_%H_%M") exp_name = "cache_size_vs_code_balance_at_%s_%s" % (machine_info["hostname"], time_stamp) tarball_dir = "results/" + exp_name if dry_run == 0: create_project_tarball(tarball_dir, "project_" + exp_name) target_dir = "results/" + exp_name th = 1 pin_str = "S0:0-%d " % (th - 1) count = 0 group = "MEM" if (machine_info["hostname"] == "IVB_10core") and (group == "TLB_DATA"): group = "TLB" machine_conf["pinning_args"] = "-m -g " + group + " -C " + pin_str + " -s 0x03 --" count = count + igs_test(target_dir, exp_name, th=th, group=group, dry_run=dry_run) print "experiments count =" + str(count)
def main(): from scripts.utils import create_project_tarball, get_stencil_num from scripts.conf.conf import machine_conf, machine_info import os, sys from csv import DictReader import time,datetime dry_run = 1 if len(sys.argv)<2 else int(sys.argv[1]) time_stamp = datetime.datetime.fromtimestamp(time.time()).strftime('%Y%m%d_%H_%M') exp_name = "pluto_thread_scaling_at_%s_%s" % (machine_info['hostname'], time_stamp) tarball_dir='results/'+exp_name if(dry_run==0): create_project_tarball(tarball_dir, "test_"+exp_name) target_dir='results/' + exp_name # parse the results to find out which of the already exist data = [] data_file = os.path.join('results', 'summary.csv') try: with open(data_file, 'rb') as output_file: raw_data = DictReader(output_file) for k in raw_data: kernel = get_stencil_num(k) if(kernel==0): k['stencil'] ='3d25pt' elif(kernel==1): k['stencil'] ='3d7pt' elif(kernel==4): k['stencil'] ='3d25pt_var' elif(kernel==5): k['stencil'] ='3d7pt_var' else: raise data.append(k) except: pass param_l = dict() for k in data: try: param_l[(k['stencil'], int(k['Global NX']), k['LIKWID performance counter'] ) ] = ([int(k['PLUTO tile size of loop 1']), int(k['PLUTO tile size of loop 3']), int(k['PLUTO tile size of loop 4'])], int(k['Number of time steps']) ) except: print k raise count = 0 for group in ['MEM', 'L2']: # for group in ['MEM', 'L2', 'L3', 'DATA', 'TLB_DATA', 'ENERGY']: if(machine_info['hostname']=='Haswell_18core'): machine_conf['pinning_args'] = " -m -g " + group + " -C S1:0-" elif(machine_info['hostname']=='IVB_10core'): if group=='TLB_DATA': group='TLB' machine_conf['pinning_args'] = " -g " + group + " -C S0:0-" # for k,v in param_l.iteritems(): print k,v count = count + thread_scaling_test(dry_run, target_dir, exp_name, param_l=param_l, group=group) print "experiments count =" + str(count)
def main(): from scripts.utils import create_project_tarball, get_stencil_num, parse_results from scripts.conf.conf import machine_conf, machine_info import os, sys import time, datetime # user params dry_run = 1 if len(sys.argv) < 2 else int(sys.argv[1]) # dry run is_tgs_only = 0 if (len(sys.argv) >= 3): if (sys.argv[2] == 'tgs'): # whether to test all TGS combinations is_tgs_only = 1 sockets = 1 # number of processors to use in the experiments time_stamp = datetime.datetime.fromtimestamp( time.time()).strftime('%Y%m%d_%H_%M') tgs_s = '' if is_tgs_only == 0 else '_tgs' exp_name = "increasing_grid_size_sockets_%d%s_at_%s_%s" % ( sockets, tgs_s, machine_info['hostname'], time_stamp) tarball_dir = 'results/' + exp_name if (dry_run == 0): create_project_tarball(tarball_dir, "project_" + exp_name) target_dir = 'results/' + exp_name # parse the results to obtain the selected parameters by the auto tuner params = parse_results(is_tgs_only=is_tgs_only) #update the pinning information to use all cores th = machine_info['n_cores'] * sockets if sockets == 1: pin_str = "S0:0-%d " % (th - 1) if sockets == 2: pin_str = "S0:0-%d@S1:0-%d -i " % (th / 2 - 1, th / 2 - 1) count = 0 # for group in ['MEM']: # for group in ['MEM', 'L2', 'L3', 'TLB_DATA', 'DATA', 'ENERGY']: for group in ['DATA']: # for group in ['MEM', 'ENERGY', 'L2', 'L3', 'DATA']: if ((machine_info['hostname'] == 'IVB_10core') and (group == 'TLB_DATA')): group = 'TLB' machine_conf[ 'pinning_args'] = "-m -g " + group + " -C " + pin_str + ' -s 0x03 --' count = count + igs_test(target_dir, exp_name, th=th, params=params, group=group, dry_run=dry_run, is_tgs_only=is_tgs_only) print "experiments count =" + str(count)
def main(): from scripts.utils import create_project_tarball, get_stencil_num from scripts.conf.conf import machine_conf, machine_info import os, sys from csv import DictReader import time, datetime dry_run = 1 if len(sys.argv) < 2 else int(sys.argv[1]) time_stamp = datetime.datetime.fromtimestamp( time.time()).strftime('%Y%m%d_%H_%M') exp_name = "pochoir_increasing_grid_size_at_%s_%s" % ( machine_info['hostname'], time_stamp) tarball_dir = 'results/' + exp_name if (dry_run == 0): create_project_tarball(tarball_dir, "project_" + exp_name) target_dir = 'results/' + exp_name # parse the results to find out which of the already exist data = [] data_file = os.path.join('results', 'summary.csv') try: with open(data_file, 'rb') as output_file: raw_data = DictReader(output_file) for k in raw_data: k['stencil'] = get_stencil_num(k) data.append(k) except: pass params = set() for k in data: try: params.add((k['stencil'], int(k['Global NX']))) except: print k raise #update the pinning information to use all cores th = machine_info['n_cores'] count = 0 for group in ['MEM', 'TLB_DATA', 'L2', 'L3', 'DATA']: #, 'ENERGY']: if (machine_info['hostname'] == 'IVB_10core'): if group == 'TLB_DATA': group = 'TLB' machine_conf[ 'pinning_args'] = " -m -g " + group + " -c " + "%d-%d " % ( 0, th - 1) + '-- numactl --physcpubind=%d-%d' % (0, th - 1) # for k in params: print k count = count + igs_test( dry_run, target_dir, exp_name, th=th, params=params, group=group) print "experiments count =" + str(count)
def main(): from scripts.utils import create_project_tarball, get_stencil_num from scripts.conf.conf import machine_conf, machine_info import os, sys from csv import DictReader import time,datetime dry_run = 1 if len(sys.argv)<2 else int(sys.argv[1]) time_stamp = datetime.datetime.fromtimestamp(time.time()).strftime('%Y%m%d_%H_%M') exp_name = "pochoir_increasing_grid_size_at_%s_%s" % (machine_info['hostname'], time_stamp) tarball_dir='results/'+exp_name if(dry_run==0): create_project_tarball(tarball_dir, "project_"+exp_name) target_dir='results/' + exp_name # parse the results to find out which of the already exist data = [] data_file = os.path.join('results', 'summary.csv') try: with open(data_file, 'rb') as output_file: raw_data = DictReader(output_file) for k in raw_data: k['stencil'] = get_stencil_num(k) data.append(k) except: pass params = set() for k in data: try: params.add( (k['stencil'], int(k['Global NX'])) ) except: print k raise #update the pinning information to use all cores th = machine_info['n_cores'] count = 0 for group in ['MEM', 'TLB_DATA', 'L2', 'L3', 'DATA']:#, 'ENERGY']: if(machine_info['hostname']=='IVB_10core'): if group=='TLB_DATA': group='TLB' machine_conf['pinning_args'] = " -m -g " + group + " -c " + "%d-%d "%(0, th-1) + '-- numactl --physcpubind=%d-%d'%(0,th-1) # for k in params: print k count = count + igs_test(dry_run, target_dir, exp_name, th=th, params=params, group=group) print "experiments count =" + str(count)
def main(): from scripts.utils import create_project_tarball, get_stencil_num, parse_results from scripts.conf.conf import machine_conf, machine_info import os, sys import time,datetime # user params dry_run = 1 if len(sys.argv)<2 else int(sys.argv[1]) # dry run is_tgs_only=0 if(len(sys.argv)>=3): if(sys.argv[2]=='tgs'): # whether to test all TGS combinations is_tgs_only=1 sockets=1 # number of processors to use in the experiments time_stamp = datetime.datetime.fromtimestamp(time.time()).strftime('%Y%m%d_%H_%M') tgs_s = '' if is_tgs_only==0 else '_tgs' exp_name = "increasing_grid_size_sockets_%d%s_at_%s_%s" % (sockets,tgs_s,machine_info['hostname'], time_stamp) tarball_dir='results/'+exp_name if(dry_run==0): create_project_tarball(tarball_dir, "project_"+exp_name) target_dir='results/' + exp_name # parse the results to obtain the selected parameters by the auto tuner params = parse_results(is_tgs_only=is_tgs_only) #update the pinning information to use all cores th = machine_info['n_cores']*sockets if sockets == 1: pin_str = "S0:0-%d "%(th-1) if sockets == 2: pin_str = "S0:0-%d@S1:0-%d -i "%(th/2-1, th/2-1) count=0 # for group in ['MEM']: # for group in ['MEM', 'L2', 'L3', 'TLB_DATA', 'DATA', 'ENERGY']: for group in ['DATA']: # for group in ['MEM', 'ENERGY', 'L2', 'L3', 'DATA']: if( (machine_info['hostname']=='IVB_10core') and (group=='TLB_DATA') ): group='TLB' machine_conf['pinning_args'] = "-m -g " + group + " -C " + pin_str + ' -s 0x03 --' count= count + igs_test(target_dir, exp_name, th=th, params=params, group=group, dry_run=dry_run,is_tgs_only=is_tgs_only) print "experiments count =" + str(count)
def main(): from scripts.utils import create_project_tarball, get_stencil_num from scripts.conf.conf import machine_conf, machine_info import os, sys from csv import DictReader import time, datetime dry_run = 1 if len(sys.argv) < 2 else int(sys.argv[1]) time_stamp = datetime.datetime.fromtimestamp(time.time()).strftime("%Y%m%d_%H_%M") exp_name = "pochoir_thread_scaling_at_%s_%s" % (machine_info["hostname"], time_stamp) tarball_dir = "results/" + exp_name if dry_run == 0: create_project_tarball(tarball_dir, "project_" + exp_name) target_dir = "results/" + exp_name # parse the results to find out which of the already exist data = [] data_file = os.path.join("results", "summary.csv") try: with open(data_file, "rb") as output_file: raw_data = DictReader(output_file) for k in raw_data: k["stencil"] = get_stencil_num(k) data.append(k) except: pass params = set() for k in data: try: params.add((k["stencil"], int(k["Global NX"]))) except: print k raise count = 0 for group in ["MEM", "L2"]: # 'TLB_DATA', 'L3', 'DATA', 'ENERGY']: # for k in params: print k count = count + thread_scaling_test(dry_run, target_dir, exp_name, params=params, group=group) print "experiments count =" + str(count)
def main(): from scripts.utils import create_project_tarball, get_stencil_num from scripts.conf.conf import machine_conf, machine_info import os, sys from csv import DictReader import time,datetime dry_run = 1 if len(sys.argv)<2 else int(sys.argv[1]) time_stamp = datetime.datetime.fromtimestamp(time.time()).strftime('%Y%m%d_%H_%M') exp_name = "pochoir_thread_scaling_at_%s_%s" % (machine_info['hostname'], time_stamp) tarball_dir='results/'+exp_name if(dry_run==0): create_project_tarball(tarball_dir, "project_"+exp_name) target_dir='results/' + exp_name # parse the results to find out which of the already exist data = [] data_file = os.path.join('results', 'summary.csv') try: with open(data_file, 'rb') as output_file: raw_data = DictReader(output_file) for k in raw_data: k['stencil'] = get_stencil_num(k) data.append(k) except: pass params = set() for k in data: try: params.add( (k['stencil'], int(k['Global NX'])) ) except: print k raise count = 0 for group in ['MEM', 'L2']:# 'TLB_DATA', 'L3', 'DATA', 'ENERGY']: # for k in params: print k count = count + thread_scaling_test(dry_run, target_dir, exp_name, params=params, group=group) print "experiments count =" + str(count)
def main(): from scripts.utils import create_project_tarball, get_stencil_num from scripts.conf.conf import machine_conf, machine_info import os, sys from csv import DictReader import time, datetime dry_run = 1 if len(sys.argv) < 2 else int(sys.argv[1]) time_stamp = datetime.datetime.fromtimestamp( time.time()).strftime('%Y%m%d_%H_%M') exp_name = "pluto_thread_scaling_at_%s_%s" % (machine_info['hostname'], time_stamp) tarball_dir = 'results/' + exp_name if (dry_run == 0): create_project_tarball(tarball_dir, "test_" + exp_name) target_dir = 'results/' + exp_name # parse the results to find out which of the already exist data = [] data_file = os.path.join('results', 'summary.csv') try: with open(data_file, 'rb') as output_file: raw_data = DictReader(output_file) for k in raw_data: kernel = get_stencil_num(k) if (kernel == 0): k['stencil'] = '3d25pt' elif (kernel == 1): k['stencil'] = '3d7pt' elif (kernel == 4): k['stencil'] = '3d25pt_var' elif (kernel == 5): k['stencil'] = '3d7pt_var' else: raise data.append(k) except: pass param_l = dict() for k in data: try: param_l[(k['stencil'], int(k['Global NX']), k['LIKWID performance counter'])] = ([ int(k['PLUTO tile size of loop 1']), int(k['PLUTO tile size of loop 3']), int(k['PLUTO tile size of loop 4']) ], int(k['Number of time steps'])) except: print k raise count = 0 for group in ['MEM', 'L2']: # for group in ['MEM', 'L2', 'L3', 'DATA', 'TLB_DATA', 'ENERGY']: if (machine_info['hostname'] == 'Haswell_18core'): machine_conf['pinning_args'] = " -m -g " + group + " -C S1:0-" elif (machine_info['hostname'] == 'IVB_10core'): if group == 'TLB_DATA': group = 'TLB' machine_conf['pinning_args'] = " -g " + group + " -C S0:0-" # for k,v in param_l.iteritems(): print k,v count = count + thread_scaling_test( dry_run, target_dir, exp_name, param_l=param_l, group=group) print "experiments count =" + str(count)