def main(iargs=None): start_time = time.time() inps = process_rsmas_cmd_line_parse(iargs) inps = check_directories_and_inputs(inps) command_line = os.path.basename(sys.argv[0]) + ' ' + ' '.join(sys.argv[1:]) message_rsmas.log(inps.work_dir, '##### NEW RUN #####') message_rsmas.log(inps.work_dir, command_line) time.sleep(putils.pause_seconds(inps.wait_time)) ######################################### # Submit job ######################################### if inps.submit_flag: job_obj = JOB_SUBMIT(inps) job_file_name = 'process_rsmas' job = job_obj.submit_script(inps.project_name, job_file_name, sys.argv[:]) # run_operations.py needs this print statement for now. # This is not for debugging purposes. # DO NOT REMOVE. print(job) else: objInsar = RsmasInsar(inps) objInsar.run(steps=inps.runSteps) # Timing m, s = divmod(time.time() - start_time, 60) print('\nTotal time: {:02.0f} mins {:02.1f} secs'.format(m, s)) return
def main(iargs=None): inps = putils.cmd_line_parse(iargs) if not iargs is None: input_arguments = iargs else: input_arguments = sys.argv[1::] message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(input_arguments)) os.chdir(inps.work_dir) time.sleep(putils.pause_seconds(inps.wait_time)) ######################################### # Submit job ######################################### if inps.submit_flag: job_obj = JOB_SUBMIT(inps) job_name = 'create_runfiles' job_file_name = job_name if '--submit' in input_arguments: input_arguments.remove('--submit') command = [os.path.abspath(__file__)] + input_arguments job_obj.submit_script(job_name, job_file_name, command) sys.exit(0) try: dem_file = glob.glob('DEM/*.wgs84')[0] inps.template['topsStack.demDir'] = dem_file except: raise SystemExit('DEM does not exist') # check for orbits orbit_dir = os.getenv('SENTINEL_ORBITS') # make run file inps.topsStack_template = pathObj.correct_for_isce_naming_convention(inps) runObj = CreateRun(inps) runObj.run_stack_workflow() run_file_list = putils.make_run_list(inps.work_dir) with open(inps.work_dir + '/run_files_list', 'w') as run_file: for item in run_file_list: run_file.writelines(item + '\n') local_orbit = os.path.join(inps.work_dir, 'orbits') precise_orbits_in_local = glob.glob(local_orbit + '/*/*POEORB*') if len(precise_orbits_in_local) > 0: for orbit_file in precise_orbits_in_local: os.system('cp {} {}'.format(orbit_file, orbit_dir)) return None
def main(iargs=None): inps = putils.cmd_line_parse(iargs, script='minopy_wrapper') if not iargs is None: input_arguments = iargs else: input_arguments = sys.argv[1::] message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(input_arguments)) time.sleep(putils.pause_seconds(inps.wait_time)) ######################################### # Submit job ######################################### if inps.submit_flag: job_obj = JOB_SUBMIT(inps) job_name = 'minopy_wrapper' job_file_name = job_name if '--submit' in input_arguments: input_arguments.remove('--submit') command = [os.path.abspath(__file__)] + input_arguments job_obj.submit_script(job_name, job_file_name, command) os.chdir(inps.work_dir) try: with open('out_minopy.o', 'w') as f: with contextlib.redirect_stdout(f): smallbaselineApp.main( [inps.custom_template_file, '--dir', pathObj.mintpydir]) except: with open('out_minopy.e', 'w') as g: with contextlib.redirect_stderr(g): smallbaselineApp.main( [inps.custom_template_file, '--dir', pathObj.mintpydir]) inps.mintpy_dir = os.path.join(inps.work_dir, pathObj.mintpydir) putils.set_permission_dask_files(directory=inps.mintpy_dir) # Email Minopy results if inps.email: email_results.main([inps.custom_template_file, '--minopy']) return None
def main(iargs=None): inps = putils.cmd_line_parse(iargs) os.chdir(inps.work_dir) time.sleep(putils.pause_seconds(inps.wait_time)) ######################################### # Submit job ######################################### if inps.submit_flag: job_name = 'create_runfiles' job_file_name = job_name js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir) sys.exit(0) if not iargs is None: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:])) else: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::])) try: dem_file = glob.glob('DEM/*.wgs84')[0] inps.template['topsStack.demDir'] = dem_file except: raise SystemExit('DEM does not exist') inps.topsStack_template = pathObj.correct_for_isce_naming_convention(inps) runObj = CreateRun(inps) runObj.run_stack_workflow() run_file_list = putils.make_run_list(inps.work_dir) with open(inps.work_dir + '/run_files_list', 'w') as run_file: for item in run_file_list: run_file.writelines(item + '\n') if inps.template['topsStack.workflow'] in ['interferogram', 'slc']: runObj.run_post_stack() return None
def main(iargs=None): inps = putils.cmd_line_parse(iargs, script='smallbaseline_wrapper') time.sleep(putils.pause_seconds(inps.wait_time)) ######################################### # Submit job ######################################### if inps.submit_flag: job_name = 'smallbaseline_wrapper' job_file_name = job_name js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir) sys.exit(0) if not iargs is None: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:])) else: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::])) os.chdir(inps.work_dir) try: with open('out_mintpy.o', 'w') as f: with contextlib.redirect_stdout(f): smallbaselineApp.main([inps.custom_template_file]) except: with open('out_mintpy.e', 'w') as g: with contextlib.redirect_stderr(g): smallbaselineApp.main([inps.custom_template_file]) inps.mintpy_dir = os.path.join(inps.work_dir, pathObj.mintpydir) putils.set_permission_dask_files(directory=inps.mintpy_dir) # Email Mintpy results if inps.email: email_results.main([inps.custom_template_file, '--mintpy']) return None
def main(iargs=None): """ generates interferograms and coherence images in GeoTiff format """ inps = putils.cmd_line_parse(iargs) if not iargs is None: input_arguments = iargs else: input_arguments = sys.argv[1::] message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(input_arguments)) time.sleep(putils.pause_seconds(inps.wait_time)) ######################################### # Submit job ######################################### if inps.submit_flag: job_obj = JOB_SUBMIT(inps) job_name = 'ifgramStack_to_ifgram_and_coherence' job_file_name = job_name if '--submit' in input_arguments: input_arguments.remove('--submit') command = [os.path.abspath(__file__)] + input_arguments job_obj.submit_script(job_name, job_file_name, command) sys.exit(0) out_dir = inps.work_dir + '/' + pathObj.tiffdir if not os.path.isdir(out_dir): os.makedirs(out_dir) try: file = glob.glob(inps.work_dir + '/mintpy/inputs/ifgramStack.h5')[0] except: raise Exception('ERROR in ' + os.path.basename(__file__) + ': file ifgramStack.h5 not found') # modify network so that only one connection left arg_string = file + ' --max-conn-num 1' print('modify_network.py', arg_string) mintpy.modify_network.main(arg_string.split()) if not os.path.isdir(inps.work_dir + '/mintpy/geo'): os.makedirs(inps.work_dir + '/mintpy/geo') # geocode ifgramStack geo_file = os.path.dirname( os.path.dirname(file)) + '/geo/geo_' + os.path.basename(file) lookup_file = os.path.dirname( os.path.dirname(file)) + '/inputs/geometryRadar.h5' template_file = os.path.dirname( os.path.dirname(file)) + '/smallbaselineApp_template.txt' arg_string = file + ' -t ' + template_file + ' -l ' + lookup_file + ' -o ' + geo_file print('geocode.py', arg_string) mintpy.geocode.main(arg_string.split()) # loop over all interferograms obj = ifgramStack(geo_file) obj.open() date12_list = obj.get_date12_list() # dummy_data, atr = readfile.read(geo_file) for i in range(len(date12_list)): date_str = date12_list[i] print('Working on ... ' + date_str) data_coh = readfile.read(file, datasetName='coherence-' + date_str)[0] data_unw = readfile.read(file, datasetName='unwrapPhase-' + date_str)[0] fname_coh = out_dir + '/coherence_' + date_str + '.tif' fname_unw = out_dir + '/interferogram_' + date_str + '.tif' create_geotiff(obj, data=data_coh, outfile=fname_coh, type='coherence', work_dir=inps.work_dir) create_geotiff(obj, data=data_unw, outfile=fname_unw, type='interferogram', work_dir=inps.work_dir) return
def main(iargs=None): """Downloads data with ssara and asfserial scripts.""" inps = putils.cmd_line_parse(iargs, script='download_rsmas') if not iargs is None: input_arguments = iargs else: input_arguments = sys.argv[1::] message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(input_arguments)) time.sleep(putils.pause_seconds(inps.wait_time)) ######################################### # Submit job ######################################### if inps.submit_flag: job_obj = JOB_SUBMIT(inps) job_name = 'download_rsmas' job_file_name = job_name if '--submit' in input_arguments: input_arguments.remove('--submit') command = [os.path.abspath(__file__)] + input_arguments job_obj.submit_script(job_name, job_file_name, command) sys.exit(0) if inps.prefix == 'tops': if not inps.template[inps.prefix + 'Stack.slcDir'] is None: download_dir = inps.template[inps.prefix + 'Stack.slcDir'] else: download_dir = os.path.join(inps.work_dir, 'SLC') else: if not inps.template['raw_image_dir'] in [None, 'None']: download_dir = inps.template['raw_image_dir'] else: download_dir = os.path.join(inps.work_dir, 'RAW_data') os.makedirs(inps.work_dir, exist_ok=True) os.makedirs(download_dir, exist_ok=True) if 'SenDT' not in inps.project_name and 'SenAT' not in inps.project_name or os.getenv('SSARA_ASF') == 'False': try: inps.template['ssaraopt.intersectsWithPoint'] inps.ssaraopt = ' '.join(add_point_to_ssaraopt(inps.template, inps.ssaraopt.split(' '))) except: inps.ssaraopt = ' '.join(add_polygon_to_ssaraopt(inps.template, inps.ssaraopt.split(' '), delta_lat=inps.delta_lat)) command = 'ssara_federated_query.py ' + inps.ssaraopt + ' --print' + ' --download' os.chdir(download_dir) message_rsmas.log(download_dir, command) status = subprocess.Popen(command, shell=True).wait() if status is not 0: raise Exception('ERROR in ssara_federated_query.py') os.chdir(inps.work_dir) return if os.getenv('SSARA_ASF') == 'False': return download('ssara', inps.custom_template_file, download_dir, outnum=1) #download('asfserial', inps.custom_template_file, download_dir, outnum = 1) for i_download in [2, 3]: download_success = run_check_download(download_dir = download_dir) if not download_success: print('check_download.py: There were bad files, download again') message_rsmas.log(inps.work_dir,'check_download.py: there were bad files, download again') download('ssara', inps.custom_template_file, download_dir, outnum = i_download)
def main(iargs=None): """ create orth and geo rectifying run jobs and submit them. """ inps = putils.cmd_line_parse(iargs) import s1a_isce_utils as ut import mergeBursts as mb if not iargs is None: input_arguments = iargs else: input_arguments = sys.argv[1::] message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(input_arguments)) inps.geom_masterDir = os.path.join(inps.work_dir, pathObj.geomlatlondir) inps.master = os.path.join(inps.work_dir, pathObj.masterdir) try: inps.dem = glob.glob('{}/DEM/*.wgs84'.format(inps.work_dir))[0] except: print('DEM not exists!') sys.exit(1) if not os.path.exists(inps.geom_masterDir): os.mkdir(inps.geom_masterDir) time.sleep(putils.pause_seconds(inps.wait_time)) inps.out_dir = os.path.join(inps.work_dir, 'run_files') job_obj = JOB_SUBMIT(inps) ######################################### # Submit job ######################################### if inps.submit_flag: job_name = 'export_ortho_geo' job_file_name = job_name if '--submit' in input_arguments: input_arguments.remove('--submit') command = [os.path.abspath(__file__)] + input_arguments job_obj.submit_script(job_name, job_file_name, command) sys.exit(0) pic_dir = os.path.join(inps.work_dir, pathObj.tiffdir) if not os.path.exists(pic_dir): os.mkdir(pic_dir) demZero = create_demZero(inps.dem, inps.geom_masterDir) swathList = ut.getSwathList(inps.master) create_georectified_lat_lon(swathList, inps.master, inps.geom_masterDir, demZero) merge_burst_lat_lon(inps) multilook_images(inps) run_file_list = make_run_list(inps) for item in run_file_list: putils.remove_last_job_running_products(run_file=item) job_status = job_obj.submit_batch_jobs(batch_file=item) if job_status: putils.remove_zero_size_or_length_error_files(run_file=item) putils.rerun_job_if_exit_code_140(run_file=item, inps_dict=inps) putils.raise_exception_if_job_exited(run_file=item) putils.concatenate_error_files(run_file=item, work_dir=inps.work_dir) putils.move_out_job_files_to_stdout(run_file=item) #upload_to_s3(pic_dir) minsar.upload_data_products.main( [inps.custom_template_file, '--imageProducts']) return
def main(iargs=None): inps = putils.cmd_line_parse(iargs, script='execute_runfiles') if not iargs is None: input_arguments = iargs else: input_arguments = sys.argv[1::] message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(input_arguments)) os.chdir(inps.work_dir) time.sleep(putils.pause_seconds(inps.wait_time)) if inps.prefix == 'stripmap': inps.num_bursts = 1 inps.out_dir = os.path.join(inps.work_dir, 'run_files') job_obj = JOB_SUBMIT(inps) ######################################### # Submit job ######################################### if inps.submit_flag: job_name = 'execute_runfiles' job_file_name = job_name if '--submit' in input_arguments: input_arguments.remove('--submit') command = [os.path.abspath(__file__)] + input_arguments job_obj.submit_script(job_name, job_file_name, command) sys.exit(0) run_file_list = putils.read_run_list(inps.work_dir) if inps.end_run == 0: inps.end_run = len(run_file_list) if not inps.start_run == 0: inps.start_run = inps.start_run - 1 if inps.step: inps.start_run = inps.step - 1 inps.end_run = inps.step run_file_list = run_file_list[inps.start_run:inps.end_run] for item in run_file_list: putils.remove_last_job_running_products(run_file=item) job_status = job_obj.submit_batch_jobs(batch_file=item) if job_status: putils.remove_zero_size_or_length_error_files(run_file=item) putils.rerun_job_if_exit_code_140(run_file=item, inps_dict=inps) putils.raise_exception_if_job_exited(run_file=item) putils.concatenate_error_files(run_file=item, work_dir=inps.work_dir) putils.move_out_job_files_to_stdout(run_file=item) date_str = datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d:%H%M%S') print(date_str + ' * Job {} completed'.format(item)) date_str = datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d:%H%M%S') print(date_str + ' * all jobs from {} to {} have been completed'.format( os.path.basename(run_file_list[0]), os.path.basename( run_file_list[-1]))) return None
def main(iargs=None): inps = putils.cmd_line_parse(iargs, script='ingest_insarmaps') if not iargs is None: input_arguments = iargs else: input_arguments = sys.argv[1::] message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(input_arguments)) time.sleep(putils.pause_seconds(inps.wait_time)) ######################################### # Submit job ######################################### if inps.submit_flag: job_obj = JOB_SUBMIT(inps) job_name = 'ingest_insarmaps' job_file_name = job_name if '--submit' in input_arguments: input_arguments.remove('--submit') command = [os.path.abspath(__file__)] + input_arguments job_obj.submit_script(job_name, job_file_name, command) sys.exit(0) os.chdir(inps.work_dir) hdfeos_file = glob.glob(inps.work_dir + '/mintpy/S1*.he5') hdfeos_file.append(glob.glob(inps.work_dir +'/mintpy/SUBSET_*/S1*.he5')) hdfeos_file = hdfeos_file[0] json_folder = inps.work_dir + '/mintpy/JSON' mbtiles_file = json_folder + '/' + os.path.splitext(os.path.basename(hdfeos_file))[0] + '.mbtiles' if os.path.isdir(json_folder): shutil.rmtree(json_folder) command1 = 'hdfeos5_2json_mbtiles.py ' + hdfeos_file + ' ' + json_folder + ' |& tee out_insarmaps.log' command2 = 'json_mbtiles2insarmaps.py -u ' + password.insaruser + ' -p ' + password.insarpass + ' --host ' + \ 'insarmaps.miami.edu -P rsmastest -U rsmas\@gmail.com --json_folder ' + \ json_folder + ' --mbtiles_file ' + mbtiles_file + ' |& tee -a out_insarmaps.log' command3 = 'upload_data_products.py --mintpyProducts ' + ' ' + inps.custom_template_file + ' |& tee out_insarmaps.log' command4 = 'summarize_job_run_times.py ' + ' ' + inps.custom_template_file command1 = 'hdfeos5_2json_mbtiles.py ' + hdfeos_file + ' ' + json_folder command2 = 'json_mbtiles2insarmaps.py -u ' + password.insaruser + ' -p ' + password.insarpass + ' --host ' + \ 'insarmaps.miami.edu -P rsmastest -U rsmas\@gmail.com --json_folder ' + \ json_folder + ' --mbtiles_file ' + mbtiles_file command3 = 'upload_data_products.py --mintpyProducts ' + ' ' + inps.custom_template_file command4 = 'summarize_job_run_times.py ' + ' ' + inps.custom_template_file with open(inps.work_dir + '/run_ingest_insarmaps', 'w') as f: f.write(command1 + '\n') f.write(command2 + '\n') f.write(command3 + '\n') f.write(command4 + '\n') out_file = 'out_insarmaps' message_rsmas.log(inps.work_dir, command1) command1 = '('+command1+' | tee '+out_file+'.o) 3>&1 1>&2 2>&3 | tee '+out_file+'.e' status = subprocess.Popen(command1, shell=True).wait() if status is not 0: raise Exception('ERROR in hdfeos5_2json_mbtiles.py') # TODO: Change subprocess call to get back error code and send error code to logger message_rsmas.log(inps.work_dir, command2) command2 = '('+command2+' | tee -a '+out_file+'.o) 3>&1 1>&2 2>&3 | tee -a '+out_file+'.e' status = subprocess.Popen(command2, shell=True).wait() if status is not 0: raise Exception('ERROR in json_mbtiles2insarmaps.py') out_file = 'out_upload_data_products' message_rsmas.log(inps.work_dir, command3) command3 = '('+command3+' | tee -a '+out_file+'.o) 3>&1 1>&2 2>&3 | tee -a '+out_file+'.e' status = subprocess.Popen(command3, shell=True).wait() if status is not 0: raise Exception('ERROR in upload_data_products.py') message_rsmas.log(inps.work_dir, command4) status = subprocess.Popen(command4, shell=True).wait() if status is not 0: raise Exception('ERROR in summarize_job_run_times_products.py') # Email insarmaps results: if inps.email: message_rsmas.log(inps.work_dir, 'email_results.py --insarmaps ' + inps.custom_template_file) email_results.main([inps.custom_template_file, '--insarmaps']) return None
def main(iargs=None): inps = putils.cmd_line_parse(iargs, script='smallbaseline_wrapper') if not iargs is None: input_arguments = iargs else: input_arguments = sys.argv[1::] message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(input_arguments)) time.sleep(putils.pause_seconds(inps.wait_time)) ######################################### # stripmap prep to isce ######################################### if inps.template['acquisition_mode']=='stripmap': inps.dsetDir = inps.work_dir +'/Igrams'; inps.slcDir = inps.work_dir +'/merged/SLC'; inps.geometryDir = inps.work_dir +'/geom_master'; inps.baselineDir = inps.work_dir +'/baselines'; masterDate= inps.template['stripmapStack.master'] if masterDate=='None': command1= 'cp -r '+inps.slcDir+'/'+os.listdir(inps.slcDir)[0]+'/'+'masterShelve '+inps.work_dir+'/.'; else: command1= 'cp -r '+inps.slcDir+'/' + masterDate+'/'+'masterShelve '+inps.work_dir+'/.'; print(command1);subprocess.Popen(command1, shell=True).wait(); inps.metaFile= inps.work_dir+'/' +'masterShelve/data.dat'; command2= 'prep_isce.py -d '+inps.dsetDir+' -m '+inps.metaFile+' -b '+inps.baselineDir+' -g '+inps.geometryDir; print(command2) subprocess.Popen(command2, shell=True).wait(); ######################################### # Submit job ######################################### if inps.submit_flag: job_obj = JOB_SUBMIT(inps) job_name = 'smallbaseline_wrapper' job_file_name = job_name if '--submit' in input_arguments: input_arguments.remove('--submit') command = [os.path.abspath(__file__)] + input_arguments job_obj.submit_script(job_name, job_file_name, command) sys.exit(0) os.chdir(inps.work_dir) try: with open('out_mintpy.o', 'w') as f: with contextlib.redirect_stdout(f): smallbaselineApp.main([inps.custom_template_file]) except: with open('out_mintpy.e', 'w') as g: with contextlib.redirect_stderr(g): smallbaselineApp.main([inps.custom_template_file]) inps.mintpy_dir = os.path.join(inps.work_dir, pathObj.mintpydir) putils.set_permission_dask_files(directory=inps.mintpy_dir) # Email Mintpy results if inps.email: email_results.main([inps.custom_template_file, '--mintpy']) return None
def main(iargs=None): inps = putils.cmd_line_parse(iargs, script='create_runfiles') if not iargs is None: input_arguments = iargs else: input_arguments = sys.argv[1::] message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(input_arguments)) os.chdir(inps.work_dir) time.sleep(putils.pause_seconds(inps.wait_time)) inps.out_dir = inps.work_dir job_obj = JOB_SUBMIT(inps) ######################################### # Submit job ######################################### if inps.submit_flag: job_name = 'create_runfiles' job_file_name = job_name if '--submit' in input_arguments: input_arguments.remove('--submit') command = [os.path.abspath(__file__)] + input_arguments job_obj.submit_script(job_name, job_file_name, command) sys.exit(0) try: dem_file = glob.glob('DEM/*.wgs84')[0] inps.template[inps.prefix + 'Stack.demDir'] = dem_file except: raise SystemExit('DEM does not exist') slc_dir = inps.template[inps.prefix + 'Stack.slcDir'] os.makedirs(slc_dir, exist_ok=True) if int( get_size(slc_dir) / 1024**2 ) < 500: # calculate slc_dir size in MB and see if there are SLCs according to size # Unpack Raw data: if not inps.template['raw_image_dir'] in [None, 'None']: raw_image_dir = inps.template['raw_image_dir'] else: raw_image_dir = os.path.join(inps.work_dir, 'RAW_data') if os.path.exists(raw_image_dir): unpackObj = Sensors( raw_image_dir, slc_dir, remove_file='False', multiple_raw_frame=inps.template['multiple_raw_frame']) unpack_run_file = unpackObj.start() unpackObj.close() job_obj.write_batch_jobs(batch_file=unpack_run_file) job_status = job_obj.submit_batch_jobs(batch_file=unpack_run_file) if not job_status: raise Exception('ERROR: Unpacking was failed') else: raise Exception('ERROR: No data (SLC or Raw) available') # make run file: run_dir = os.path.join(inps.work_dir, 'run_files') config_dir = os.path.join(inps.work_dir, 'configs') for directory in [run_dir, config_dir]: if os.path.exists(directory): shutil.rmtree(directory) inps.Stack_template = pathObj.correct_for_isce_naming_convention(inps) runObj = CreateRun(inps) runObj.run_stack_workflow() run_file_list = putils.make_run_list(inps.work_dir) with open(inps.work_dir + '/run_files_list', 'w') as run_file: for item in run_file_list: run_file.writelines(item + '\n') if inps.prefix == 'tops': # check for orbits orbit_dir = os.getenv('SENTINEL_ORBITS') local_orbit = os.path.join(inps.work_dir, 'orbits') precise_orbits_in_local = glob.glob(local_orbit + '/*/*POEORB*') if len(precise_orbits_in_local) > 0: for orbit_file in precise_orbits_in_local: os.system('cp {} {}'.format(orbit_file, orbit_dir)) # Writing job files if inps.write_jobs: for item in run_file_list: job_obj.write_batch_jobs(batch_file=item) if inps.template['processingMethod'] == 'smallbaseline': job_name = 'smallbaseline_wrapper' job_file_name = job_name command = [ 'smallbaselineApp.py', inps.custom_template_file, '--dir', 'mintpy' ] job_obj.submit_script(job_name, job_file_name, command, writeOnly='True') else: job_name = 'minopy_wrapper' job_file_name = job_name command = [ 'minopyApp.py', inps.custom_template_file, '--dir', 'minopy' ] job_obj.submit_script(job_name, job_file_name, command, writeOnly='True') job_name = 'insarmaps' job_file_name = job_name command = ['ingest_insarmaps.py', inps.custom_template_file] job_obj.submit_script(job_name, job_file_name, command, writeOnly='True') return None
def main(iargs=None): """ create orth and geo rectifying run jobs and submit them. """ inps = putils.cmd_line_parse(iargs) inps.geom_masterDir = os.path.join(inps.work_dir, pathObj.geomlatlondir) inps.master = os.path.join(inps.work_dir, pathObj.masterdir) try: inps.dem = glob.glob('{}/DEM/*.wgs84'.format(inps.work_dir))[0] except: print('DEM not exists!') sys.exit(1) if not os.path.exists(inps.geom_masterDir): os.mkdir(inps.geom_masterDir) time.sleep(putils.pause_seconds(inps.wait_time)) ######################################### # Submit job ######################################### if inps.submit_flag: job_name = 'export_ortho_geo' job_file_name = job_name js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir) sys.exit(0) pic_dir = os.path.join(inps.work_dir, pathObj.tiffdir) if not os.path.exists(pic_dir): os.mkdir(pic_dir) if not iargs is None: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:])) else: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::])) demZero = create_demZero(inps.dem, inps.geom_masterDir) swathList = ut.getSwathList(inps.master) create_georectified_lat_lon(swathList, inps.master, inps.geom_masterDir, demZero) merge_burst_lat_lon(inps) multilook_images(inps) run_file_list = make_run_list(inps) for item in run_file_list: step_name = 'amplitude_ortho_geo' try: memorymax = config[step_name]['memory'] except: memorymax = config['DEFAULT']['memory'] try: if config[step_name]['adjust'] == 'True': walltimelimit = putils.walltime_adjust( config[step_name]['walltime']) else: walltimelimit = config[step_name]['walltime'] except: walltimelimit = config['DEFAULT']['walltime'] queuename = os.getenv('QUEUENAME') putils.remove_last_job_running_products(run_file=item) jobs = js.submit_batch_jobs(batch_file=item, out_dir=os.path.join( inps.work_dir, 'run_files'), work_dir=inps.work_dir, memory=memorymax, walltime=walltimelimit, queue=queuename) putils.remove_zero_size_or_length_error_files(run_file=item) putils.raise_exception_if_job_exited(run_file=item) putils.concatenate_error_files(run_file=item, work_dir=inps.work_dir) putils.move_out_job_files_to_stdout(run_file=item) #upload_to_s3(pic_dir) minsar.upload_data_products.main( [inps.custom_template_file, '--image_products']) return
def main(iargs=None): """ create orth and geo rectifying run jobs and submit them. """ inps = putils.cmd_line_parse(iargs) if 'stripmap' in inps.prefix: sys.path.append(os.path.join(os.getenv('ISCE_STACK'), 'stripmapStack')) else: sys.path.append(os.path.join(os.getenv('ISCE_STACK'), 'topsStack')) from s1a_isce_utils import loadProduct, getSwathList import mergeBursts if not iargs is None: input_arguments = iargs else: input_arguments = sys.argv[1::] message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(input_arguments)) inps.geom_referenceDir = os.path.join(inps.work_dir, pathObj.geomlatlondir) inps.reference = os.path.join(inps.work_dir, pathObj.referencedir) try: inps.dem = glob.glob('{}/DEM/*.wgs84'.format(inps.work_dir))[0] except: print('DEM not exists!') sys.exit(1) if not os.path.exists(inps.geom_referenceDir): os.mkdir(inps.geom_referenceDir) time.sleep(putils.pause_seconds(inps.wait_time)) inps.out_dir = os.path.join(inps.work_dir, 'run_files') job_obj = JOB_SUBMIT(inps) ######################################### # Submit job ######################################### if inps.submit_flag: job_name = 'export_ortho_geo' job_file_name = job_name if '--submit' in input_arguments: input_arguments.remove('--submit') command = [os.path.abspath(__file__)] + input_arguments job_obj.submit_script(job_name, job_file_name, command) pic_dir = os.path.join(inps.work_dir, pathObj.tiffdir) if not os.path.exists(pic_dir): os.mkdir(pic_dir) demZero = create_demZero(inps.dem, inps.geom_referenceDir) swathList = getSwathList(inps.reference) create_georectified_lat_lon(swathList, inps.reference, inps.geom_referenceDir, demZero, loadProduct) merge_burst_lat_lon(inps, mergeBursts) multilook_images(inps, mergeBursts) run_file_list = make_run_list(inps) for item in run_file_list: putils.remove_last_job_running_products(run_file=item) job_obj.write_batch_jobs(batch_file=item) job_status = job_obj.submit_batch_jobs(batch_file=item) if job_status: putils.remove_zero_size_or_length_error_files(run_file=item) putils.rerun_job_if_exit_code_140(run_file=item, inps_dict=inps) putils.raise_exception_if_job_exited(run_file=item) putils.concatenate_error_files(run_file=item, work_dir=inps.work_dir) putils.move_out_job_files_to_stdout(run_file=item) return
def main(iargs=None): inps = putils.cmd_line_parse(iargs, script='execute_runfiles') os.chdir(inps.work_dir) time.sleep(putils.pause_seconds(inps.wait_time)) ######################################### # Submit job ######################################### if inps.submit_flag: job_name = 'execute_runfiles' job_file_name = job_name js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir) sys.exit(0) run_file_list = putils.read_run_list(inps.work_dir) if inps.end_run == 0: inps.end_run = len(run_file_list) if not inps.start_run == 0: inps.start_run = inps.start_run - 1 if not iargs is None: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:])) else: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::])) run_file_list = run_file_list[inps.start_run:inps.end_run] for item in run_file_list: putils.remove_last_job_running_products(run_file=item) job_status = js.submit_batch_jobs(batch_file=item, out_dir=os.path.join( inps.work_dir, 'run_files'), work_dir=inps.work_dir) if job_status: putils.remove_zero_size_or_length_error_files(run_file=item) putils.rerun_job_if_exit_code_140(run_file=item) putils.raise_exception_if_job_exited(run_file=item) putils.concatenate_error_files(run_file=item, work_dir=inps.work_dir) putils.move_out_job_files_to_stdout(run_file=item) date_str = datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d:%H%M%S') print(date_str + ' * Job {} completed'.format(item)) date_str = datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d:%H%M%S') print(date_str + ' * all jobs from {} to {} have been completed'.format( os.path.basename(run_file_list[0]), os.path.basename(run_file_list[-1]))) return None
def main(iargs=None): """Downloads data with ssara and asfserial scripts.""" inps = putils.cmd_line_parse(iargs, script='download_rsmas') time.sleep(putils.pause_seconds(inps.wait_time)) ######################################### # Submit job ######################################### if inps.submit_flag: job_name = 'download_rsmas' job_file_name = job_name js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir) sys.exit(0) if not iargs is None: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:])) else: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::])) if not inps.template['topsStack.slcDir'] is None: slc_dir = inps.template['topsStack.slcDir'] else: slc_dir = os.path.join(inps.work_dir, 'SLC') if not os.path.isdir(inps.work_dir): os.makedirs(inps.work_dir) if not os.path.isdir(slc_dir): os.makedirs(slc_dir) # if satellite is not Sentinel (not tried yet) if 'SenDT' not in inps.project_name and 'SenAT' not in inps.project_name: ssara_call = ['ssara_federated_query.py' ] + inps.ssaraopt + ['--print', '--download'] ssara_process = subprocess.Popen(ssara_call, shell=True).wait() completion_status = ssara_process.poll() return download('ssara', inps.custom_template_file, slc_dir, outnum=1) download('asfserial', inps.custom_template_file, slc_dir, outnum=1) for i_download in [2, 3]: download_success = run_check_download(slc_dir=slc_dir) if not download_success: print('check_download.py: There were bad files, download again') message_rsmas.log( inps.work_dir, 'check_download.py: there were bad files, download again') download('ssara', inps.custom_template_file, slc_dir, outnum=i_download) download('asfserial', inps.custom_template_file, slc_dir, outnum=i_download)