def main(iargs=None): inps = putils.cmd_line_parse(iargs) config = putils.get_config_defaults(config_file='job_defaults.cfg') os.chdir(inps.work_dir) job_file_name = 'create_runfiles' job_name = job_file_name if inps.wall_time == 'None': inps.wall_time = config[job_file_name]['walltime'] wait_seconds, new_wall_time = putils.add_pause_to_walltime( inps.wall_time, inps.wait_time) ######################################### # Submit job ######################################### if inps.submit_flag: js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir, new_wall_time) sys.exit(0) time.sleep(wait_seconds) if not iargs is None: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:])) else: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::])) try: dem_file = glob.glob('DEM/*.wgs84')[0] inps.template['topsStack.demDir'] = dem_file except: raise SystemExit('DEM does not exist') inps.topsStack_template = pathObj.correct_for_isce_naming_convention(inps) runObj = CreateRun(inps) runObj.run_stack_workflow() run_file_list = putils.make_run_list(inps.work_dir) with open(inps.work_dir + '/run_files_list', 'w') as run_file: for item in run_file_list: run_file.writelines(item + '\n') if inps.template['topsStack.workflow'] in ['interferogram', 'slc']: runObj.run_post_stack() return None
def main(iargs=None): inps = putils.cmd_line_parse(iargs, script='smallbaseline_wrapper') config = putils.get_config_defaults(config_file='job_defaults.cfg') job_file_name = 'smallbaseline_wrapper' job_name = job_file_name if inps.wall_time == 'None': inps.wall_time = config[job_file_name]['walltime'] wait_seconds, new_wall_time = putils.add_pause_to_walltime( inps.wall_time, inps.wait_time) ######################################### # Submit job ######################################### if inps.submit_flag: js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir, new_wall_time) sys.exit(0) if not iargs is None: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:])) else: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::])) os.chdir(inps.work_dir) time.sleep(wait_seconds) try: with open('out_mintpy.o', 'w') as f: with contextlib.redirect_stdout(f): smallbaselineApp.main([inps.customTemplateFile]) except: with open('out_mintpy.e', 'w') as g: with contextlib.redirect_stderr(g): smallbaselineApp.main([inps.customTemplateFile]) inps.mintpy_dir = os.path.join(inps.work_dir, pathObj.mintpydir) putils.set_permission_dask_files(directory=inps.mintpy_dir) # Email Mintpy results if inps.email: email_results.main([inps.customTemplateFile]) return None
def main(iargs=None): inps = putils.cmd_line_parse(iargs, script='download_rsmas') config = putils.get_config_defaults(config_file='job_defaults.cfg') if not iargs is None: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:])) else: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::])) logfile_name = inps.work_dir + '/asfserial_rsmas.log' logger = RsmasLogger(file_name=logfile_name) ######################################### # Submit job ######################################### if inps.submit_flag: job_file_name = 'download_asfserial_rsmas' job_name = inps.custom_template_file.split(os.sep)[-1].split('.')[0] work_dir = inps.work_dir if inps.wall_time == 'None': inps.wall_time = config['download_rsmas']['walltime'] js.submit_script(job_name, job_file_name, sys.argv[:], work_dir, inps.wall_time) os.chdir(inps.work_dir) if not inps.template['topsStack.slcDir'] is None: inps.slc_dir = inps.template['topsStack.slcDir'] else: inps.slc_dir = os.path.join(inps.work_dir, 'SLC') project_slc_dir = os.path.join(inps.work_dir, 'SLC') os.chdir(inps.slc_dir) try: os.remove(os.path.expanduser('~') + '/.bulk_download_cookiejar.txt') except OSError: pass generate_files_csv(project_slc_dir, inps.custom_template_file) succesful = run_download_asf_serial(project_slc_dir, logger) change_file_permissions() logger.log(loglevel.INFO, "SUCCESS: %s", str(succesful)) logger.log(loglevel.INFO, "------------------------------------") return None
def main(iargs=None): inps = putils.cmd_line_parse(iargs, script='download_rsmas') config = putils.get_config_defaults(config_file='job_defaults.cfg') if not iargs is None: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:])) else: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::])) logfile_name = inps.work_dir + '/ssara_rsmas.log' logger = RsmasLogger(file_name=logfile_name) if not inps.template['topsStack.slcDir'] is None: inps.slc_dir = inps.template['topsStack.slcDir'] else: inps.slc_dir = os.path.join(inps.work_dir, 'SLC') project_slc_dir = os.path.join(inps.work_dir, 'SLC') ######################################### # Submit job ######################################### if inps.submit_flag: job_file_name = 'download_ssara_rsmas' job_name = inps.custom_template_file.split(os.sep)[-1].split('.')[0] if inps.wall_time == 'None': inps.wall_time = config['download_rsmas']['walltime'] js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir, inps.wall_time) sys.exit(0) if not os.path.isdir(project_slc_dir): os.makedirs(project_slc_dir) os.chdir(inps.slc_dir) logger.log(loglevel.INFO, "DATASET: %s", str(inps.custom_template_file.split('/')[-1].split(".")[0])) logger.log(loglevel.INFO, "DATE: %s", datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f")) succesful = run_ssara(project_slc_dir, inps.custom_template_file, inps.delta_lat, logger) logger.log(loglevel.INFO, "SUCCESS: %s", str(succesful)) logger.log(loglevel.INFO, "------------------------------------") return None
def main(iargs=None): start_time = time.time() inps = process_rsmas_cmd_line_parse(iargs) inps = check_directories_and_inputs(inps) command_line = os.path.basename(sys.argv[0]) + ' ' + ' '.join(sys.argv[1:]) message_rsmas.log(inps.work_dir, '##### NEW RUN #####') message_rsmas.log(inps.work_dir, command_line) time.sleep(putils.pause_seconds(inps.wait_time)) ######################################### # Submit job ######################################### if inps.submit_flag: job_file_name = 'process_rsmas' job = js.submit_script(inps.project_name, job_file_name, sys.argv[:], inps.work_dir) # run_operations.py needs this print statement for now. # This is not for debugging purposes. # DO NOT REMOVE. print(job) else: objInsar = RsmasInsar(inps) objInsar.run(steps=inps.runSteps) # Timing m, s = divmod(time.time() - start_time, 60) print('\nTotal time: {:02.0f} mins {:02.1f} secs'.format(m, s)) return
def main(iargs=None): inps = putils.cmd_line_parse(iargs, script='smallbaseline_wrapper') time.sleep(putils.pause_seconds(inps.wait_time)) ######################################### # Submit job ######################################### if inps.submit_flag: job_name = 'smallbaseline_wrapper' job_file_name = job_name js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir) sys.exit(0) if not iargs is None: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:])) else: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::])) os.chdir(inps.work_dir) try: with open('out_mintpy.o', 'w') as f: with contextlib.redirect_stdout(f): smallbaselineApp.main([inps.custom_template_file]) except: with open('out_mintpy.e', 'w') as g: with contextlib.redirect_stderr(g): smallbaselineApp.main([inps.custom_template_file]) inps.mintpy_dir = os.path.join(inps.work_dir, pathObj.mintpydir) putils.set_permission_dask_files(directory=inps.mintpy_dir) # Email Mintpy results if inps.email: email_results.main([inps.custom_template_file, '--mintpy']) return None
def main(iargs=None): """Calculate horz and vert.""" inps = command_line_parse(iargs) inps.project_name = get_project_name(inps.customTemplateFile) inps.work_dir = get_work_directory(None, inps.project_name) if not os.path.isdir(inps.work_dir): os.mkdir(inps.work_dir) os.chdir(inps.work_dir) command = os.path.basename(__file__) + ' ' + iargs[0] message_rsmas.log(command) ######################################### # Submit job ######################################### if inps.submit_flag: job_file_name = 'download_rsmas' work_dir = os.getcwd() job_name = inps.customTemplateFile.split(os.sep)[-1].split('.')[0] wall_time = '24:00' js.submit_script(job_name, job_file_name, sys.argv[:], work_dir, wall_time) sys.exit(0) inps = putils.create_or_update_template(inps) if not os.path.isdir(inps.work_dir): os.makedirs(inps.work_dir) import pdb pdb.set_trace() if bool(inps.template['horzvert']): generate_verthorz(inps.customTemplateFile) #if not os.path.isdir(slc_dir): # os.makedirs(slc_dir) os.chdir(inps.work_dir)
def main(iargs=None): start_time = time.time() inps = process_rsmas_cmd_line_parse(iargs) template_file = pathObj.auto_template # print default template if inps.print_template: raise SystemExit(open(template_file, 'r').read()) inps = check_directories_and_inputs(inps) command_line = os.path.basename(sys.argv[0]) + ' ' + ' '.join(sys.argv[1:]) message_rsmas.log(inps.work_dir, '##### NEW RUN #####') message_rsmas.log(inps.work_dir, command_line) config = putils.get_config_defaults(config_file='job_defaults.cfg') job_file_name = 'process_rsmas' if inps.wall_time == 'None': inps.wall_time = config[job_file_name]['walltime'] wait_seconds, new_wall_time = putils.add_pause_to_walltime( inps.wall_time, inps.wait_time) ######################################### # Submit job ######################################### if inps.submit_flag: job = js.submit_script(inps.project_name, job_file_name, sys.argv[:], inps.work_dir, new_wall_time) # run_operations.py needs this print statement for now. # This is not for debugging purposes. # DO NOT REMOVE. print(job) else: time.sleep(wait_seconds) objInsar = RsmasInsar(inps) objInsar.run(steps=inps.runSteps) # Timing m, s = divmod(time.time() - start_time, 60) print('\nTotal time: {:02.0f} mins {:02.1f} secs'.format(m, s)) return
def main(iargs=None): """Downloads data with ssara and asfserial scripts.""" inps = putils.cmd_line_parse(iargs, script='download_rsmas') config = putils.get_config_defaults(config_file='job_defaults.cfg') job_file_name = 'download_rsmas' job_name = job_file_name if inps.wall_time == 'None': inps.wall_time = config[job_file_name]['walltime'] wait_seconds, new_wall_time = putils.add_pause_to_walltime(inps.wall_time, inps.wait_time) ######################################### # Submit job ######################################### if inps.submit_flag: js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir, new_wall_time) sys.exit(0) time.sleep(wait_seconds) if not iargs is None: message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:])) else: message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::])) if not inps.template['topsStack.slcDir'] is None: slc_dir = inps.template['topsStack.slcDir'] else: slc_dir = os.path.join(inps.work_dir, 'SLC') if not os.path.isdir(inps.work_dir): os.makedirs(inps.work_dir) if not os.path.isdir(slc_dir): os.makedirs(slc_dir) # if satellite is not Sentinel (not tried yet) if 'SenDT' not in inps.project_name and 'SenAT' not in inps.project_name: ssara_call = ['ssara_federated_query.py'] + inps.ssaraopt + ['--print', '--download'] ssara_process = subprocess.Popen(ssara_call, shell=True).wait() completion_status = ssara_process.poll() return download('ssara', inps.custom_template_file, slc_dir, outnum=1) download('asfserial', inps.custom_template_file, slc_dir, outnum = 1) for i_download in [2,3]: download_success = run_check_download(slc_dir = slc_dir) if not download_success: print('check_download.py: There were bad files, download again') message_rsmas.log(inps.work_dir,'check_download.py: there were bad files, download again') download('ssara', inps.custom_template_file, slc_dir, outnum = i_download) download('asfserial', inps.custom_template_file, slc_dir, outnum = i_download)
def main(iargs=None): inps = putils.cmd_line_parse(iargs, script='execute_runfiles') os.chdir(inps.work_dir) config = putils.get_config_defaults(config_file='job_defaults.cfg') job_file_name = 'execute_runfiles' job_name = job_file_name if inps.wall_time == 'None': inps.wall_time = config[job_file_name]['walltime'] wait_seconds, new_wall_time = putils.add_pause_to_walltime( inps.wall_time, inps.wait_time) ######################################### # Submit job ######################################### if inps.submit_flag: js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir, new_wall_time) sys.exit(0) time.sleep(wait_seconds) run_file_list = putils.read_run_list(inps.work_dir) if inps.end_run == 0: inps.end_run = len(run_file_list) if not inps.start_run == 0: inps.start_run = inps.start_run - 1 if not iargs is None: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:])) else: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::])) run_file_list = run_file_list[inps.start_run:inps.end_run] supported_schedulers = ['LSF', 'PBS', 'SLURM'] if os.getenv('JOBSCHEDULER') in supported_schedulers: for item in run_file_list: step_name = '_' step_name = step_name.join(item.split('_')[3::]) try: memorymax = config[step_name]['memory'] except: memorymax = config['DEFAULT']['memory'] try: # FA 26 Dec commented out as it seemed wrong #if config[step_name]['adjust'] == 'True': # walltimelimit = putils.walltime_adjust(inps, config[step_name]['walltime']) #else: # walltimelimit = config[step_name]['walltime'] walltimelimit = config[step_name]['walltime'] except: walltimelimit = config['DEFAULT']['walltime'] queuename = os.getenv('QUEUENAME') putils.remove_last_job_running_products(run_file=item) if os.getenv('JOBSCHEDULER') in ['SLURM', 'sge']: js.submit_job_with_launcher(batch_file=item, work_dir=os.path.join( inps.work_dir, 'run_files'), memory=memorymax, walltime=walltimelimit, queue=queuename) else: jobs = js.submit_batch_jobs(batch_file=item, out_dir=os.path.join( inps.work_dir, 'run_files'), work_dir=inps.work_dir, memory=memorymax, walltime=walltimelimit, queue=queuename) putils.remove_zero_size_or_length_error_files(run_file=item) putils.rerun_job_if_exit_code_140(run_file=item) putils.raise_exception_if_job_exited(run_file=item) putils.concatenate_error_files(run_file=item, work_dir=inps.work_dir) putils.move_out_job_files_to_stdout(run_file=item) date_str = datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d:%H%M%S') print(date_str + ' * Job {} completed'.format(item)) date_str = datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d:%H%M%S') print(date_str + ' * all jobs from {} to {} have been completed'.format( os.path.basename(run_file_list[0]), os.path.basename(run_file_list[-1]))) else: for item in run_file_list: with open(item, 'r') as f: command_lines = f.readlines() for command_line in command_lines: os.system(command_line) return None
if __name__ == "__main__": inps = command_line_parse(sys.argv[1:]) inps.project_name = putils.get_project_name( custom_template_file=inps.template) inps.work_dir = putils.get_work_directory(None, inps.project_name) inps.slc_dir = inps.work_dir + "/SLC" ######################################### # Submit job ######################################### if inps.submit_flag: job_file_name = 'download_ssara_rsmas' job_name = inps.template.split(os.sep)[-1].split('.')[0] work_dir = inps.work_dir wall_time = '24:00' js.submit_script(job_name, job_file_name, sys.argv[:], work_dir, wall_time) sys.exit(0) os.chdir(inps.work_dir) message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::])) if not os.path.isdir(inps.slc_dir): os.makedirs(inps.slc_dir) os.chdir(inps.slc_dir) succesful = run_ssara(inps.work_dir, inps.template, inps.delta_lat)
def main(iargs=None): inps = putils.cmd_line_parse(iargs, script='download_rsmas') config = putils.get_config_defaults(config_file='job_defaults.cfg') if not iargs is None: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:])) else: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::])) logfile_name = inps.work_dir + '/asfserial_rsmas.log' logger = RsmasLogger(file_name=logfile_name) ######################################### # Submit job ######################################### if inps.submit_flag: job_file_name = 'download_asfserial_rsmas' job_name = inps.custom_template_file.split(os.sep)[-1].split('.')[0] work_dir = inps.work_dir if inps.wall_time == 'None': inps.wall_time = config['download_rsmas']['walltime'] js.submit_script(job_name, job_file_name, sys.argv[:], work_dir, inps.wall_time) os.chdir(inps.work_dir) if not inps.template['topsStack.slcDir'] is None: inps.slc_dir = inps.template['topsStack.slcDir'] else: inps.slc_dir = os.path.join(inps.work_dir, 'SLC') project_slc_dir = os.path.join(inps.work_dir, 'SLC') os.chdir(inps.slc_dir) try: os.remove(os.path.expanduser('~') + '/.bulk_download_cookiejar.txt') except OSError: pass dataset_template = Template(inps.custom_template_file) dataset_template.options.update( PathFind.correct_for_ssara_date_format(dataset_template.options)) subprocess.Popen("rm new_files.csv", shell=True).wait() standardTuple = (inps, dataset_template) if inps.seasonalStartDate is not None and inps.seasonalEndDate is not None: ogStartYearInt = int( dataset_template.options['ssaraopt.startDate'][:4]) if int(inps.seasonalStartDate) > int(inps.seasonalEndDate): y = 1 else: y = 0 YearRange = int(dataset_template.options['ssaraopt.endDate'] [:4]) - ogStartYearInt + 1 if YearRange > 1 and y == 1: YearRange = YearRange - 1 seasonalStartDateAddOn = '-' + inps.seasonalStartDate[: 2] + '-' + inps.seasonalStartDate[ 2:] seasonalEndDateAddOn = '-' + inps.seasonalEndDate[: 2] + '-' + inps.seasonalEndDate[ 2:] ogEndDate = dataset_template.options['ssaraopt.endDate'] for x in range(YearRange): seasonalTuple = standardTuple + (x, ogStartYearInt, y, YearRange, seasonalStartDateAddOn, seasonalEndDateAddOn, ogEndDate) generate_files_csv(project_slc_dir, inps.custom_template_file, seasonalTuple) y += 1 else: generate_files_csv(project_slc_dir, inps.custom_template_file, standardTuple) succesful = run_download_asf_serial(project_slc_dir, logger) change_file_permissions() logger.log(loglevel.INFO, "SUCCESS: %s", str(succesful)) logger.log(loglevel.INFO, "------------------------------------") return None
def main(iargs=None): """ generates interferograms and coherence images in GeoTiff format """ inps = putils.cmd_line_parse(iargs) config = putils.get_config_defaults(config_file='job_defaults.cfg') job_file_name = 'ifgramStack_to_ifgram_and_coherence' job_name = job_file_name if inps.wall_time == 'None': inps.wall_time = config[job_file_name]['walltime'] wait_seconds, new_wall_time = putils.add_pause_to_walltime( inps.wall_time, inps.wait_time) ######################################### # Submit job ######################################### if inps.submit_flag: js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir, new_wall_time) sys.exit(0) time.sleep(wait_seconds) message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:])) out_dir = inps.work_dir + '/' + inps.out_dir if not os.path.isdir(out_dir): os.makedirs(out_dir) try: file = glob.glob(inps.work_dir + '/mintpy/inputs/ifgramStack.h5')[0] except: raise Exception('ERROR in ' + os.path.basename(__file__) + ': file ifgramStack.h5 not found') # modify network so that only one connection left arg_string = file + ' --max-conn-num 1' print('modify_network.py', arg_string) mintpy.modify_network.main(arg_string.split()) if not os.path.isdir(inps.work_dir + '/mintpy/geo'): os.makedirs(inps.work_dir + '/mintpy/geo') # geocode ifgramStack geo_file = os.path.dirname( os.path.dirname(file)) + '/geo/geo_' + os.path.basename(file) lookup_file = os.path.dirname( os.path.dirname(file)) + '/inputs/geometryRadar.h5' template_file = os.path.dirname( os.path.dirname(file)) + '/smallbaselineApp_template.txt' arg_string = file + ' -t ' + template_file + ' -l ' + lookup_file + ' -o ' + geo_file print('geocode.py', arg_string) mintpy.geocode.main(arg_string.split()) # loop over all interferograms obj = ifgramStack(geo_file) obj.open() date12_list = obj.get_date12_list() # dummy_data, atr = readfile.read(geo_file) for i in range(len(date12_list)): date_str = date12_list[i] print('Working on ... ' + date_str) data_coh = readfile.read(file, datasetName='coherence-' + date_str)[0] data_unw = readfile.read(file, datasetName='unwrapPhase-' + date_str)[0] fname_coh = out_dir + '/coherence_' + date_str + '.tif' fname_unw = out_dir + '/interferogram_' + date_str + '.tif' create_geotiff(obj, data=data_coh, outfile=fname_coh, type='coherence', work_dir=inps.work_dir) create_geotiff(obj, data=data_unw, outfile=fname_unw, type='interferogram', work_dir=inps.work_dir) return
def main(iargs=None): inps = putils.cmd_line_parse(iargs, script='download_rsmas') config = putils.get_config_defaults(config_file='job_defaults.cfg') if not iargs is None: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:])) else: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::])) logfile_name = inps.work_dir + '/asfserial_rsmas.log' global logger logger = RsmasLogger(file_name=logfile_name) ######################################### # Submit job ######################################### if inps.submit_flag: job_file_name = 'download_asfserial_rsmas' job_name = inps.custom_template_file.split(os.sep)[-1].split('.')[0] work_dir = inps.work_dir if inps.wall_time == 'None': inps.wall_time = config['download_rsmas']['walltime'] js.submit_script(job_name, job_file_name, sys.argv[:], work_dir, inps.wall_time) os.chdir(inps.work_dir) if not inps.template['topsStack.slcDir'] is None: inps.slc_dir = inps.template['topsStack.slcDir'] else: inps.slc_dir = os.path.join(inps.work_dir, 'SLC') global project_slc_dir project_slc_dir = os.path.join(inps.work_dir, 'SLC') os.chdir(inps.slc_dir) try: os.remove(os.path.expanduser('~') + '/.bulk_download_cookiejar.txt') except OSError: pass dataset_template = Template(inps.custom_template_file) dataset_template.options.update( PathFind.correct_for_ssara_date_format(dataset_template.options)) subprocess.Popen("rm " + project_slc_dir + "/new_files*.csv", shell=True).wait() seasonal_start_date = None seasonal_end_date = None try: if dataset_template.options[ 'seasonalStartDate'] is not None and dataset_template.options[ 'seasonalEndDate'] is not None: seasonal_start_date = dataset_template.options['seasonalStartDate'] seasonal_end_date = dataset_template.options['seasonalEndDate'] except: pass if inps.seasonalStartDate is not None and inps.seasonalEndDate is not None: seasonal_start_date = inps.seasonalStartDate seasonal_end_date = inps.seasonalEndDate if seasonal_start_date is not None and seasonal_end_date is not None: generate_seasonal_files_csv(dataset_template, seasonal_start_date, seasonal_end_date) else: generate_files_csv(project_slc_dir, dataset_template) parallel = False try: if dataset_template.options['parallel'] == 'yes': parallel = True except: pass """if inps.parallel == 'yes': parallel = True""" threads = os.cpu_count() try: if dataset_template.options['threads'] is not None: threads = int(dataset_template.options['threads']) except: pass """if inps.processes is not None: processes = inps.processes""" if parallel: run_parallel_download_asf_serial(project_slc_dir, threads) else: succesful = run_download_asf_serial(project_slc_dir, logger) logger.log(loglevel.INFO, "SUCCESS: %s", str(succesful)) change_file_permissions() logger.log(loglevel.INFO, "------------------------------------") subprocess.Popen("rm " + project_slc_dir + "/new_files*.csv", shell=True).wait() return None
def main(iargs=None): inps = putils.cmd_line_parse(iargs, script='ingest_insarmaps') config = putils.get_config_defaults(config_file='job_defaults.cfg') job_file_name = 'ingest_insarmaps' job_name = job_file_name if inps.wall_time == 'None': inps.wall_time = config[job_file_name]['walltime'] wait_seconds, new_wall_time = putils.add_pause_to_walltime(inps.wall_time, inps.wait_time) ######################################### # Submit job ######################################### if inps.submit_flag: js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir, new_wall_time) time.sleep(wait_seconds) os.chdir(inps.work_dir) if not iargs is None: message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:])) else: message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::])) hdfeos_file = glob.glob(inps.work_dir + '/mintpy/S1*.he5') hdfeos_file.append(glob.glob(inps.work_dir +'/mintpy/SUBSET_*/S1*.he5')) hdfeos_file = hdfeos_file[0] json_folder = inps.work_dir + '/mintpy/JSON' mbtiles_file = json_folder + '/' + os.path.splitext(os.path.basename(hdfeos_file))[0] + '.mbtiles' if os.path.isdir(json_folder): shutil.rmtree(json_folder) command1 = 'hdfeos5_2json_mbtiles.py ' + hdfeos_file + ' ' + json_folder + ' |& tee out_insarmaps.log' command2 = 'json_mbtiles2insarmaps.py -u ' + password.insaruser + ' -p ' + password.insarpass + ' --host ' + \ 'insarmaps.miami.edu -P rsmastest -U rsmas\@gmail.com --json_folder ' + \ json_folder + ' --mbtiles_file ' + mbtiles_file + ' |& tee -a out_insarmaps.log' with open(inps.work_dir + '/mintpy/run_insarmaps', 'w') as f: f.write(command1 + '\n') f.write(command2 + '\n') out_file = 'out_insarmaps' message_rsmas.log(inps.work_dir, command1) command1 = '('+command1+' | tee '+out_file+'.o) 3>&1 1>&2 2>&3 | tee '+out_file+'.e' status = subprocess.Popen(command1, shell=True).wait() if status is not 0: raise Exception('ERROR in hdfeos5_2json_mbtiles.py') # TODO: Change subprocess call to get back error code and send error code to logger message_rsmas.log(inps.work_dir, command2) command2 = '('+command2+' | tee -a '+out_file+'.o) 3>&1 1>&2 2>&3 | tee -a '+out_file+'.e' status = subprocess.Popen(command2, shell=True).wait() if status is not 0: raise Exception('ERROR in json_mbtiles2insarmaps.py') # Email insarmaps results: if inps.email: email_results.main([inps.custom_template_file, '--insarmap']) return None
def main(iargs=None): """ create orth and geo rectifying run jobs and submit them. """ inps = putils.cmd_line_parse(iargs) inps.geom_masterDir = os.path.join(inps.work_dir, pathObj.geomlatlondir) inps.master = os.path.join(inps.work_dir, pathObj.masterdir) try: inps.dem = glob.glob('{}/DEM/*.wgs84'.format(inps.work_dir))[0] except: print('DEM not exists!') sys.exit(1) if not os.path.exists(inps.geom_masterDir): os.mkdir(inps.geom_masterDir) config = putils.get_config_defaults(config_file='job_defaults.cfg') job_file_name = 'export_ortho_geo' job_name = job_file_name if inps.wall_time == 'None': inps.wall_time = config[job_file_name]['walltime'] wait_seconds, new_wall_time = putils.add_pause_to_walltime(inps.wall_time, inps.wait_time) ######################################### # Submit job ######################################### if inps.submit_flag: js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir, new_wall_time) sys.exit(0) time.sleep(wait_seconds) if not iargs is None: message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:])) else: message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::])) demZero = create_demZero(inps.dem, inps.geom_masterDir) swathList = ut.getSwathList(inps.master) create_georectified_lat_lon(swathList, inps.master, inps.geom_masterDir, demZero) merge_burst_lat_lon(inps) multilook_images(inps) run_file_list = make_run_list_amplitude(inps) for item in run_file_list: step_name = 'amplitude_ortho_geo' try: memorymax = config[step_name]['memory'] except: memorymax = config['DEFAULT']['memory'] try: if config[step_name]['adjust'] == 'True': walltimelimit = putils.walltime_adjust(config[step_name]['walltime']) else: walltimelimit = config[step_name]['walltime'] except: walltimelimit = config['DEFAULT']['walltime'] queuename = os.getenv('QUEUENAME') putils.remove_last_job_running_products(run_file=item) jobs = js.submit_batch_jobs(batch_file=item, out_dir=os.path.join(inps.work_dir, 'run_files'), work_dir=inps.work_dir, memory=memorymax, walltime=walltimelimit, queue=queuename) putils.remove_zero_size_or_length_error_files(run_file=item) putils.raise_exception_if_job_exited(run_file=item) putils.concatenate_error_files(run_file=item, work_dir=inps.work_dir) putils.move_out_job_files_to_stdout(run_file=item) return
def main(iargs=None): inps = putils.cmd_line_parse(iargs, script='execute_runfiles') os.chdir(inps.work_dir) config = putils.get_config_defaults(config_file='job_defaults.cfg') job_file_name = 'execute_runfiles' job_name = job_file_name if inps.wall_time == 'None': inps.wall_time = config[job_file_name]['walltime'] wait_seconds, new_wall_time = putils.add_pause_to_walltime( inps.wall_time, inps.wait_time) ######################################### # Submit job ######################################### if inps.submit_flag: js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir, new_wall_time) sys.exit(0) time.sleep(wait_seconds) command_line = os.path.basename(sys.argv[0]) + ' ' + ' '.join(sys.argv[1:]) message_rsmas.log(inps.work_dir, command_line) run_file_list = putils.read_run_list(inps.work_dir) if inps.endrun == 0: inps.endrun = len(run_file_list) if not inps.startrun == 0: inps.startrun = inps.startrun - 1 run_file_list = run_file_list[inps.startrun:inps.endrun] if os.getenv('JOBSCHEDULER') == 'LSF' or os.getenv( 'JOBSCHEDULER') == 'PBS': for item in run_file_list: step_name = '_' step_name = step_name.join(item.split('_')[3::]) try: memorymax = config[step_name]['memory'] except: memorymax = config['DEFAULT']['memory'] try: if config[step_name]['adjust'] == 'True': walltimelimit = putils.walltime_adjust( inps, config[step_name]['walltime']) else: walltimelimit = config[step_name]['walltime'] except: walltimelimit = config['DEFAULT']['walltime'] queuename = os.getenv('QUEUENAME') putils.remove_last_job_running_products(run_file=item) jobs = js.submit_batch_jobs(batch_file=item, out_dir=os.path.join( inps.work_dir, 'run_files'), work_dir=inps.work_dir, memory=memorymax, walltime=walltimelimit, queue=queuename) putils.remove_zero_size_or_length_error_files(run_file=item) putils.raise_exception_if_job_exited(run_file=item) putils.concatenate_error_files(run_file=item, work_dir=inps.work_dir) putils.move_out_job_files_to_stdout(run_file=item) else: for item in run_file_list: with open(item, 'r') as f: command_lines = f.readlines() for command_line in command_lines: os.system(command_line) return None
def main(iargs=None): inps = putils.cmd_line_parse(iargs, script='execute_runfiles') os.chdir(inps.work_dir) time.sleep(putils.pause_seconds(inps.wait_time)) ######################################### # Submit job ######################################### if inps.submit_flag: job_name = 'execute_runfiles' job_file_name = job_name js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir) sys.exit(0) run_file_list = putils.read_run_list(inps.work_dir) if inps.end_run == 0: inps.end_run = len(run_file_list) if not inps.start_run == 0: inps.start_run = inps.start_run - 1 if not iargs is None: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:])) else: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::])) run_file_list = run_file_list[inps.start_run:inps.end_run] for item in run_file_list: putils.remove_last_job_running_products(run_file=item) job_status = js.submit_batch_jobs(batch_file=item, out_dir=os.path.join( inps.work_dir, 'run_files'), work_dir=inps.work_dir) if job_status: putils.remove_zero_size_or_length_error_files(run_file=item) putils.rerun_job_if_exit_code_140(run_file=item) putils.raise_exception_if_job_exited(run_file=item) putils.concatenate_error_files(run_file=item, work_dir=inps.work_dir) putils.move_out_job_files_to_stdout(run_file=item) date_str = datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d:%H%M%S') print(date_str + ' * Job {} completed'.format(item)) date_str = datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d:%H%M%S') print(date_str + ' * all jobs from {} to {} have been completed'.format( os.path.basename(run_file_list[0]), os.path.basename(run_file_list[-1]))) return None