def copy_files(): cmtcenter = "cmtfile" eventfile = "XEVENTID" scratch_dir = "../.." specfemdir = os.path.join(scratch_dir, "specfem_stuff") datacenter = os.path.join(scratch_dir, "RUN_BASE") check_exist(eventfile) check_exist(cmtcenter) check_exist(specfemdir) check_exist(datacenter) eventlist = read_txt_into_list(eventfile) print("Number of events: %d" % len(eventlist)) for _idx, event in enumerate(eventlist): idx = _idx + 1 cmtfile = os.path.join(cmtcenter, event) targetdir = os.path.join(datacenter, "event_%03d" % idx) check_exist(cmtfile) check_exist(targetdir) print("*"*20) print("event: %s" % event) print("cmtfile: %s" % cmtfile) print("targetdir: %s" % targetdir) copy_specfem_stuff(specfemdir, targetdir) # copy_cmtfile(cmtfile, targetdir) # copy_derivative_cmtfile(cmtfile, "cmtfile") check_mesh(targetdir)
def copy_files(): cmtcenter = "cmtfile" eventfile = "XEVENTID" scratch_dir = "../.." specfemdir = os.path.join(scratch_dir, "specfem_stuff") datacenter = os.path.join(scratch_dir, "RUN_BASE") check_exist(eventfile) check_exist(cmtcenter) check_exist(specfemdir) check_exist(datacenter) eventlist = read_txt_into_list(eventfile) print("Number of events: %d" % len(eventlist)) for _idx, event in enumerate(eventlist): idx = _idx + 1 cmtfile = os.path.join(cmtcenter, event) targetdir = os.path.join(datacenter, "event_%03d" % idx) check_exist(cmtfile) check_exist(targetdir) print("*" * 20) print("event: %s" % event) print("cmtfile: %s" % cmtfile) print("targetdir: %s" % targetdir) copy_specfem_stuff(specfemdir, targetdir) # copy_cmtfile(cmtfile, targetdir) # copy_derivative_cmtfile(cmtfile, "cmtfile") check_mesh(targetdir)
def create_job_pbs(nevents_per_job, walltime_per_simulation, deriv_cmt_list): nnodes_per_simulation = extract_number_of_nodes() # User parameter walltime = "%d:00:00" % int(walltime_per_simulation*nevents_per_job) eventlist_file = "./XEVENTID" job_template = "job_solver_bundle.init.bash" sub_eventfile_prefix = "XEVENTID_" sub_sbatch_prefix = "job_solver_bundle.pbs." # remove old files filelist = glob.glob(sub_eventfile_prefix+"*") for fn in filelist: os.remove(fn) filelist = glob.glob(sub_sbatch_prefix+"*") for fn in filelist: os.remove(fn) eventlist = read_txt_into_list(eventlist_file) nevents = len(eventlist) njobs = int(math.ceil(float(nevents) / nevents_per_job)) print "====== Create job scripts =======" print "Number of events:", nevents print "Number of jobs:", njobs for ijob in range(njobs): # determine index print "-----------" print "Jobid: %d" % (ijob + 1) start_idx = ijob * nevents_per_job end_idx = (ijob + 1) * nevents_per_job if ijob == njobs-1: end_idx = nevents print "start and end idx: [%d, %d)" % (start_idx, end_idx) # create sub-eventlist file eventfile = "%s%d" % (sub_eventfile_prefix, ijob+1) print "eventlist file: %s" % eventfile write_list_to_txt(eventlist[start_idx:end_idx], eventfile) # create job pbs script outputfn = "%s%d" % (sub_sbatch_prefix, ijob+1) print "jobs batch file: %s" % outputfn modify_job_sbatch_file(job_template, outputfn, eventfile, start_idx, nnodes_per_simulation, walltime, deriv_cmt_list)
def create_job_pbs(nevents_per_job, walltime_per_simulation, deriv_cmt_list): nnodes_per_simulation = extract_number_of_nodes() # User parameter walltime = "%d:00:00" % int(walltime_per_simulation * nevents_per_job) eventlist_file = "./XEVENTID" job_template = "job_solver_bundle.init.bash" sub_eventfile_prefix = "XEVENTID_" sub_sbatch_prefix = "job_solver_bundle.pbs." # remove old files filelist = glob.glob(sub_eventfile_prefix + "*") for fn in filelist: os.remove(fn) filelist = glob.glob(sub_sbatch_prefix + "*") for fn in filelist: os.remove(fn) eventlist = read_txt_into_list(eventlist_file) nevents = len(eventlist) njobs = int(math.ceil(float(nevents) / nevents_per_job)) print "====== Create job scripts =======" print "Number of events:", nevents print "Number of jobs:", njobs for ijob in range(njobs): # determine index print "-----------" print "Jobid: %d" % (ijob + 1) start_idx = ijob * nevents_per_job end_idx = (ijob + 1) * nevents_per_job if ijob == njobs - 1: end_idx = nevents print "start and end idx: [%d, %d)" % (start_idx, end_idx) # create sub-eventlist file eventfile = "%s%d" % (sub_eventfile_prefix, ijob + 1) print "eventlist file: %s" % eventfile write_list_to_txt(eventlist[start_idx:end_idx], eventfile) # create job pbs script outputfn = "%s%d" % (sub_sbatch_prefix, ijob + 1) print "jobs batch file: %s" % outputfn modify_job_sbatch_file(job_template, outputfn, eventfile, start_idx, nnodes_per_simulation, walltime, deriv_cmt_list)
def create_job_folder(template_folder, tag, eventlist_dict, cmtfolder, stafolder, generate_deriv_cmt, deriv_cmt_list): targetdir_list = [] print("*"*20 + "\nCreat job sub folder") for _i in range(len(eventlist_dict)): idx = _i + 1 targetdir = "job_" + tag + "_%02d" % idx targetdir_list.append(targetdir) check_job_folder_exist(targetdir_list) for _i, targetdir in enumerate(targetdir_list): idx = _i + 1 print("="*5 + "\nJob id: %d" % idx) # copy eventlist file eventlist_file = eventlist_dict[idx] targetfile = os.path.join(targetdir, "XEVENTID") copyfile(eventlist_file, targetfile) # copy original cmt file and station file targetcmtdir = os.path.join(targetdir, "cmtfile") targetstadir = os.path.join(targetdir, "station") print("copy cmt:[%s --> %s]" % (cmtfolder, targetcmtdir)) print("copy stattion:[%s --> %s]" % (stafolder, targetstadir)) events = read_txt_into_list(eventlist_file) for _event in events: copy_cmtfiles(_event, cmtfolder, targetcmtdir, generate_deriv_cmt, deriv_cmt_list) copy_stations(_event, stafolder, targetstadir) print("Copy dir:[%s --> %s]" % (template_folder, targetdir)) # copy scripts template copytree(template_folder, targetdir) # copy config.yaml file copyfile("config.yml", os.path.join(targetdir, "config.yml"))
def create_job_folder(template_folder, tag, eventlist_dict, cmtfolder, stafolder, generate_deriv_cmt, deriv_cmt_list): targetdir_list = [] print("*" * 20 + "\nCreat job sub folder") for _i in range(len(eventlist_dict)): idx = _i + 1 targetdir = "job_" + tag + "_%02d" % idx targetdir_list.append(targetdir) check_job_folder_exist(targetdir_list) for _i, targetdir in enumerate(targetdir_list): idx = _i + 1 print("=" * 5 + "\nJob id: %d" % idx) # copy eventlist file eventlist_file = eventlist_dict[idx] targetfile = os.path.join(targetdir, "XEVENTID") copyfile(eventlist_file, targetfile) # copy original cmt file and station file targetcmtdir = os.path.join(targetdir, "cmtfile") targetstadir = os.path.join(targetdir, "station") print("copy cmt:[%s --> %s]" % (cmtfolder, targetcmtdir)) print("copy stattion:[%s --> %s]" % (stafolder, targetstadir)) events = read_txt_into_list(eventlist_file) for _event in events: copy_cmtfiles(_event, cmtfolder, targetcmtdir, generate_deriv_cmt, deriv_cmt_list) copy_stations(_event, stafolder, targetstadir) print("Copy dir:[%s --> %s]" % (template_folder, targetdir)) # copy scripts template copytree(template_folder, targetdir) # copy config.yaml file copyfile("config.yml", os.path.join(targetdir, "config.yml"))
def construct_filelist(base, eventlist): filelist = [] for event in eventlist: filelist.append(os.path.join(base, "%s.adjoint.misfit.json" % event)) return filelist if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('-f', action='store', dest='event_file', required=True, help="event list file") args = parser.parse_args() base = "/lustre/atlas/proj-shared/geo111/rawdata/asdf/adjsrc/sum" eventlist = read_txt_into_list(args.event_file) print("Number of event: %d" % len(eventlist)) filelist = construct_filelist(base, eventlist) print("filelist: %s" % filelist) misfits = sum_adjoint_misfits(filelist) outputfile = os.path.join(base, "adjoint_misfit.summary.json") print("output json file: %s" % outputfile) dump_json(misfits, outputfile)
return misfits def construct_filelist(base, eventlist): filelist = [] for event in eventlist: filelist.append(os.path.join(base, "%s.adjoint.misfit.json" % event)) return filelist if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('-f', action='store', dest='event_file', required=True, help="event list file") args = parser.parse_args() base = "/lustre/atlas/proj-shared/geo111/rawdata/asdf/adjsrc/sum" eventlist = read_txt_into_list(args.event_file) print("Number of event: %d" % len(eventlist)) filelist = construct_filelist(base, eventlist) print("filelist: %s" % filelist) misfits = sum_adjoint_misfits(filelist) outputfile = os.path.join(base, "adjoint_misfit.summary.json") print("output json file: %s" % outputfile) dump_json(misfits, outputfile)