def main(iargs=None): inps = putils.cmd_line_parse(iargs, script='execute_runfiles') os.chdir(inps.work_dir) config = putils.get_config_defaults(config_file='job_defaults.cfg') job_file_name = 'execute_runfiles' job_name = job_file_name if inps.wall_time == 'None': inps.wall_time = config[job_file_name]['walltime'] wait_seconds, new_wall_time = putils.add_pause_to_walltime( inps.wall_time, inps.wait_time) ######################################### # Submit job ######################################### if inps.submit_flag: js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir, new_wall_time) sys.exit(0) time.sleep(wait_seconds) command_line = os.path.basename(sys.argv[0]) + ' ' + ' '.join(sys.argv[1:]) message_rsmas.log(inps.work_dir, command_line) run_file_list = putils.read_run_list(inps.work_dir) if inps.endrun == 0: inps.endrun = len(run_file_list) if not inps.startrun == 0: inps.startrun = inps.startrun - 1 run_file_list = run_file_list[inps.startrun:inps.endrun] if os.getenv('JOBSCHEDULER') == 'LSF' or os.getenv( 'JOBSCHEDULER') == 'PBS': for item in run_file_list: step_name = '_' step_name = step_name.join(item.split('_')[3::]) try: memorymax = config[step_name]['memory'] except: memorymax = config['DEFAULT']['memory'] try: if config[step_name]['adjust'] == 'True': walltimelimit = putils.walltime_adjust( inps, config[step_name]['walltime']) else: walltimelimit = config[step_name]['walltime'] except: walltimelimit = config['DEFAULT']['walltime'] queuename = os.getenv('QUEUENAME') putils.remove_last_job_running_products(run_file=item) jobs = js.submit_batch_jobs(batch_file=item, out_dir=os.path.join( inps.work_dir, 'run_files'), work_dir=inps.work_dir, memory=memorymax, walltime=walltimelimit, queue=queuename) putils.remove_zero_size_or_length_error_files(run_file=item) putils.raise_exception_if_job_exited(run_file=item) putils.concatenate_error_files(run_file=item, work_dir=inps.work_dir) putils.move_out_job_files_to_stdout(run_file=item) else: for item in run_file_list: with open(item, 'r') as f: command_lines = f.readlines() for command_line in command_lines: os.system(command_line) return None
def main(iargs=None): inps = cmd_line_parser(iargs) work_dir = os.path.dirname(os.path.abspath(inps.job_files[0])) project_dir = os.path.dirname(work_dir) known_issues_file = os.path.join(os.getenv('RSMASINSAR_HOME'), 'docs/known_issues.md') error_happened = False error_strings = [ 'No annotation xml file found in zip file', 'There appears to be a gap between slices. Cannot stitch them successfully', 'no element found: line', 'Exiting ...', 'Segmentation fault', 'Bus', 'Aborted', 'ERROR', 'Error', 'FileNotFoundError', 'IOErr', 'Traceback' ] different_number_of_bursts_string = [ 'has different number of bursts', ] job_names = [] for job_file in inps.job_files: tmp = job_file.split('.') job_names.append('.'.join(tmp[0:-1])) job_file = inps.job_files[0] job_name = job_names[0] if 'run_' in job_name: run_file_base = '_'.join(job_name.split('_')[:-1]) else: run_file_base = job_name matched_error_strings = [] matched_data_problem_strings = [] for job_name in job_names: print('checking *.e, *.o from ' + job_name + '.job') # preprocess *.e files if 'filter_coherence' in job_name or 'run_09_igram' in job_name: # run_09_igram is for stripmap putils.remove_line_counter_lines_from_error_files( run_file=job_name) putils.remove_zero_size_or_length_error_files(run_file=job_name) putils.remove_launcher_message_from_error_file(run_file=job_name) putils.remove_zero_size_or_length_error_files(run_file=job_name) putils.remove_timeout_error_files(run_file=job_name) # analyze *.e and *.o files error_files = natsorted(glob.glob(job_name + '*.e')) out_files = natsorted(glob.glob(job_name + '*.o')) if 'extract_stack_valid_region' in job_name: for file in out_files: string = different_number_of_bursts_string[0] if check_words_in_file(file, string): matched_data_problem_strings.append('Warning: \"' + string + '\" found in ' + file + '\n') print('Warning: \"' + string + '\" found in ' + file) for file in error_files + out_files: for error_string in error_strings: if check_words_in_file(file, error_string): if skip_error(file, error_string): break matched_error_strings.append('Error: \"' + error_string + '\" found in ' + file + '\n') print('Error: \"' + error_string + '\" found in ' + file) if len(matched_error_strings) != 0: with open(run_file_base + '_error_matches.e', 'w') as f: f.write(''.join(matched_error_strings)) else: print("no known error found") if len(matched_data_problem_strings) != 0: with open(run_file_base + '_data_problem_matches.e', 'w') as f: f.write(''.join(matched_data_problem_strings)) else: print("no known data problem found") if 'run_' in job_name: putils.concatenate_error_files(run_file=run_file_base, work_dir=project_dir) else: out_error_file = os.path.dirname( error_files[-1]) + '/out_' + os.path.basename(error_files[-1]) #Path(out_error_file) shutil.copy(error_files[-1], out_error_file) if len(matched_error_strings) + len(matched_data_problem_strings) != 0: print( 'For known issues see https://github.com/geodesymiami/rsmas_insar/tree/master/docs/known_issues.md' ) raise RuntimeError('Error in run_file: ' + run_file_base) # move only if there was no error if len(os.path.dirname(run_file_base)) == 0: run_file = os.getcwd() + '/' + run_file_base putils.move_out_job_files_to_stdout(run_file=run_file_base) return
def main(iargs=None): inps = putils.cmd_line_parse(iargs, script='execute_runfiles') os.chdir(inps.work_dir) config = putils.get_config_defaults(config_file='job_defaults.cfg') job_file_name = 'execute_runfiles' job_name = job_file_name if inps.wall_time == 'None': inps.wall_time = config[job_file_name]['walltime'] wait_seconds, new_wall_time = putils.add_pause_to_walltime( inps.wall_time, inps.wait_time) ######################################### # Submit job ######################################### if inps.submit_flag: js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir, new_wall_time) sys.exit(0) time.sleep(wait_seconds) run_file_list = putils.read_run_list(inps.work_dir) if inps.end_run == 0: inps.end_run = len(run_file_list) if not inps.start_run == 0: inps.start_run = inps.start_run - 1 if not iargs is None: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:])) else: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::])) run_file_list = run_file_list[inps.start_run:inps.end_run] supported_schedulers = ['LSF', 'PBS', 'SLURM'] if os.getenv('JOBSCHEDULER') in supported_schedulers: for item in run_file_list: step_name = '_' step_name = step_name.join(item.split('_')[3::]) try: memorymax = config[step_name]['memory'] except: memorymax = config['DEFAULT']['memory'] try: # FA 26 Dec commented out as it seemed wrong #if config[step_name]['adjust'] == 'True': # walltimelimit = putils.walltime_adjust(inps, config[step_name]['walltime']) #else: # walltimelimit = config[step_name]['walltime'] walltimelimit = config[step_name]['walltime'] except: walltimelimit = config['DEFAULT']['walltime'] queuename = os.getenv('QUEUENAME') putils.remove_last_job_running_products(run_file=item) if os.getenv('JOBSCHEDULER') in ['SLURM', 'sge']: js.submit_job_with_launcher(batch_file=item, work_dir=os.path.join( inps.work_dir, 'run_files'), memory=memorymax, walltime=walltimelimit, queue=queuename) else: jobs = js.submit_batch_jobs(batch_file=item, out_dir=os.path.join( inps.work_dir, 'run_files'), work_dir=inps.work_dir, memory=memorymax, walltime=walltimelimit, queue=queuename) putils.remove_zero_size_or_length_error_files(run_file=item) putils.rerun_job_if_exit_code_140(run_file=item) putils.raise_exception_if_job_exited(run_file=item) putils.concatenate_error_files(run_file=item, work_dir=inps.work_dir) putils.move_out_job_files_to_stdout(run_file=item) date_str = datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d:%H%M%S') print(date_str + ' * Job {} completed'.format(item)) date_str = datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d:%H%M%S') print(date_str + ' * all jobs from {} to {} have been completed'.format( os.path.basename(run_file_list[0]), os.path.basename(run_file_list[-1]))) else: for item in run_file_list: with open(item, 'r') as f: command_lines = f.readlines() for command_line in command_lines: os.system(command_line) return None
def main(iargs=None): """ create orth and geo rectifying run jobs and submit them. """ inps = putils.cmd_line_parse(iargs) import s1a_isce_utils as ut import mergeBursts as mb if not iargs is None: input_arguments = iargs else: input_arguments = sys.argv[1::] message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(input_arguments)) inps.geom_masterDir = os.path.join(inps.work_dir, pathObj.geomlatlondir) inps.master = os.path.join(inps.work_dir, pathObj.masterdir) try: inps.dem = glob.glob('{}/DEM/*.wgs84'.format(inps.work_dir))[0] except: print('DEM not exists!') sys.exit(1) if not os.path.exists(inps.geom_masterDir): os.mkdir(inps.geom_masterDir) time.sleep(putils.pause_seconds(inps.wait_time)) inps.out_dir = os.path.join(inps.work_dir, 'run_files') job_obj = JOB_SUBMIT(inps) ######################################### # Submit job ######################################### if inps.submit_flag: job_name = 'export_ortho_geo' job_file_name = job_name if '--submit' in input_arguments: input_arguments.remove('--submit') command = [os.path.abspath(__file__)] + input_arguments job_obj.submit_script(job_name, job_file_name, command) sys.exit(0) pic_dir = os.path.join(inps.work_dir, pathObj.tiffdir) if not os.path.exists(pic_dir): os.mkdir(pic_dir) demZero = create_demZero(inps.dem, inps.geom_masterDir) swathList = ut.getSwathList(inps.master) create_georectified_lat_lon(swathList, inps.master, inps.geom_masterDir, demZero) merge_burst_lat_lon(inps) multilook_images(inps) run_file_list = make_run_list(inps) for item in run_file_list: putils.remove_last_job_running_products(run_file=item) job_status = job_obj.submit_batch_jobs(batch_file=item) if job_status: putils.remove_zero_size_or_length_error_files(run_file=item) putils.rerun_job_if_exit_code_140(run_file=item, inps_dict=inps) putils.raise_exception_if_job_exited(run_file=item) putils.concatenate_error_files(run_file=item, work_dir=inps.work_dir) putils.move_out_job_files_to_stdout(run_file=item) #upload_to_s3(pic_dir) minsar.upload_data_products.main( [inps.custom_template_file, '--imageProducts']) return
def main(iargs=None): inps = cmd_line_parser(iargs) work_dir = os.path.dirname(os.path.abspath(inps.job_files[0])) project_dir = os.path.dirname(work_dir) known_issues_file = os.path.join(os.getenv('RSMASINSAR_HOME'), 'docs/known_issues.md') error_happened = False error_strings = [ 'No annotation xml file found in zip file', 'There appears to be a gap between slices. Cannot stitch them successfully', 'no element found: line', 'Exiting ...', 'Segmentation fault', 'Bus', 'Aborted', 'ERROR', 'Error', 'FileNotFoundError', 'IOErr', 'Traceback' ] job_file = inps.job_files[0] job_name = job_file.split('.')[0] job_files = inps.job_files if 'run_' in job_name: run_file = '_'.join(job_name.split('_')[:-1]) else: run_file = job_name matched_error_strings = [] for job_file in job_files: print('checking: ' + job_file) job_name = job_file.split('.')[0] if 'filter_coherence' in job_name: putils.remove_line_counter_lines_from_error_files(run_file=job_name) if 'run_' in job_name: putils.remove_zero_size_or_length_error_files(run_file=job_name) error_files = glob.glob(job_name + '*.e') out_files = glob.glob(job_name + '*.o') error_files = natsorted(error_files) out_files = natsorted(out_files) for file in error_files + out_files: for error_string in error_strings: if check_words_in_file(file, error_string): matched_error_strings.append('Error: \"' + error_string + '\" found in ' + file + '\n') print( 'Error: \"' + error_string + '\" found in ' + file ) if len(matched_error_strings) != 0: with open(run_file + '_error_matches.e', 'w') as f: f.write(''.join(matched_error_strings)) else: print("no error found") if 'run_' in job_name: putils.concatenate_error_files(run_file=run_file, work_dir=project_dir) else: out_error_file = os.path.dirname(error_files[-1]) + '/out_' + os.path.basename(error_files[-1]) shutil.copy(error_files[-1], out_error_file) if len(matched_error_strings) != 0: print('For known issues see https://github.com/geodesymiami/rsmas_insar/tree/master/docs/known_issues.md') raise RuntimeError('Error in run_file: ' + run_file) # move only if there was no error if len(os.path.dirname(run_file))==0: run_file = os.getcwd() + '/' + run_file putils.move_out_job_files_to_stdout(run_file=run_file) return
def main(iargs=None): inps = putils.cmd_line_parse(iargs, script='execute_runfiles') if not iargs is None: input_arguments = iargs else: input_arguments = sys.argv[1::] message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(input_arguments)) os.chdir(inps.work_dir) time.sleep(putils.pause_seconds(inps.wait_time)) if inps.prefix == 'stripmap': inps.num_bursts = 1 inps.out_dir = os.path.join(inps.work_dir, 'run_files') job_obj = JOB_SUBMIT(inps) ######################################### # Submit job ######################################### if inps.submit_flag: job_name = 'execute_runfiles' job_file_name = job_name if '--submit' in input_arguments: input_arguments.remove('--submit') command = [os.path.abspath(__file__)] + input_arguments job_obj.submit_script(job_name, job_file_name, command) sys.exit(0) run_file_list = putils.read_run_list(inps.work_dir) if inps.end_run == 0: inps.end_run = len(run_file_list) if not inps.start_run == 0: inps.start_run = inps.start_run - 1 if inps.step: inps.start_run = inps.step - 1 inps.end_run = inps.step run_file_list = run_file_list[inps.start_run:inps.end_run] for item in run_file_list: putils.remove_last_job_running_products(run_file=item) job_status = job_obj.submit_batch_jobs(batch_file=item) if job_status: putils.remove_zero_size_or_length_error_files(run_file=item) putils.rerun_job_if_exit_code_140(run_file=item, inps_dict=inps) putils.raise_exception_if_job_exited(run_file=item) putils.concatenate_error_files(run_file=item, work_dir=inps.work_dir) putils.move_out_job_files_to_stdout(run_file=item) date_str = datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d:%H%M%S') print(date_str + ' * Job {} completed'.format(item)) date_str = datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d:%H%M%S') print(date_str + ' * all jobs from {} to {} have been completed'.format( os.path.basename(run_file_list[0]), os.path.basename( run_file_list[-1]))) return None
def main(iargs=None): """ create orth and geo rectifying run jobs and submit them. """ inps = putils.cmd_line_parse(iargs) inps.geom_masterDir = os.path.join(inps.work_dir, pathObj.geomlatlondir) inps.master = os.path.join(inps.work_dir, pathObj.masterdir) try: inps.dem = glob.glob('{}/DEM/*.wgs84'.format(inps.work_dir))[0] except: print('DEM not exists!') sys.exit(1) if not os.path.exists(inps.geom_masterDir): os.mkdir(inps.geom_masterDir) config = putils.get_config_defaults(config_file='job_defaults.cfg') job_file_name = 'export_ortho_geo' job_name = job_file_name if inps.wall_time == 'None': inps.wall_time = config[job_file_name]['walltime'] wait_seconds, new_wall_time = putils.add_pause_to_walltime(inps.wall_time, inps.wait_time) ######################################### # Submit job ######################################### if inps.submit_flag: js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir, new_wall_time) sys.exit(0) time.sleep(wait_seconds) if not iargs is None: message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:])) else: message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::])) demZero = create_demZero(inps.dem, inps.geom_masterDir) swathList = ut.getSwathList(inps.master) create_georectified_lat_lon(swathList, inps.master, inps.geom_masterDir, demZero) merge_burst_lat_lon(inps) multilook_images(inps) run_file_list = make_run_list_amplitude(inps) for item in run_file_list: step_name = 'amplitude_ortho_geo' try: memorymax = config[step_name]['memory'] except: memorymax = config['DEFAULT']['memory'] try: if config[step_name]['adjust'] == 'True': walltimelimit = putils.walltime_adjust(config[step_name]['walltime']) else: walltimelimit = config[step_name]['walltime'] except: walltimelimit = config['DEFAULT']['walltime'] queuename = os.getenv('QUEUENAME') putils.remove_last_job_running_products(run_file=item) jobs = js.submit_batch_jobs(batch_file=item, out_dir=os.path.join(inps.work_dir, 'run_files'), work_dir=inps.work_dir, memory=memorymax, walltime=walltimelimit, queue=queuename) putils.remove_zero_size_or_length_error_files(run_file=item) putils.raise_exception_if_job_exited(run_file=item) putils.concatenate_error_files(run_file=item, work_dir=inps.work_dir) putils.move_out_job_files_to_stdout(run_file=item) return
def main(iargs=None): inps = cmd_line_parser(iargs) work_dir = os.path.dirname(os.path.abspath(inps.job_files[0])) project_dir = os.path.dirname(work_dir) if inps.copy_to_tmp: run_files_dir = project_dir + '/run_files_tmp' else: run_files_dir = project_dir + '/run_files' if 'minopy' in project_dir: project_dir = os.path.dirname(os.path.abspath(project_dir)) known_issues_file = os.path.join(os.getenv('RSMASINSAR_HOME'), 'docs/known_issues.md') error_happened = False data_problems_strings_out_files = [ 'There appears to be a gap between slices. Cannot stitch them successfully', 'SLCs are sliced differently with different versions of the processor', 'No annotation xml file found in zip file', 'mismatched tag: line 77, column 6', 'no element found: line', 'not well-formed (invalid token)' ] data_problems_strings_error_files = [ 'does not exist in the file system, and is not recognized as a supported dataset name' ] data_problems_strings_run_04 = [ 'FileNotFoundError: [Errno 2] No such file or directory:' ] different_number_of_bursts_string = [ 'has different number of bursts', ] error_strings = [ 'Segmentation fault', 'Bus', 'Aborted', #'ERROR', FA 11/21: commented because ' Error', 'FileNotFoundError', 'IOErr', 'Traceback' ] #'Exiting ...', # remove if above works fine (5/21) #'FileNotFoundError: [Errno 2] No such file or directory: '/tmp/secondarys/20190917/IW2.xml' # Explanation 11/2021 for 'ERROR' removal from error_strings # The data problem error message below appeared in a run_02*.e file. I therefore separated into data_problems_strings_out_files and data_problems_strings_error_files. # I had to remove 'ERROR' as `ERROR 4` remains in an run_*.e file it still raises an exception. # If `ERROR` needs to be in `error_strings` an alternative could be to remove the problem run_02*.e file # ERROR 4: `/vsizip/S1A_IW_SLC__1SDV_20161115T141647_20161115T141714_013954_016796_D68D.zip/S1A_IW_SLC__1SDV_20161115T141647_20161115T141714_013954_016796_D68D.SAFE/measurement/s1a-iw2-slc-vv-20161115t141647-20161115t141712-013954-016796-005.tiff' does not exist in the file system, and is not recognized as a supported dataset name. job_names = [] for job_file in inps.job_files: tmp = job_file.split('.') job_names.append('.'.join(tmp[0:-1])) job_file = inps.job_files[0] job_name = job_names[0] if 'run_' in job_name: run_file_base = '_'.join(job_name.split('_')[:-1]) else: run_file_base = job_name matched_error_strings = [] matched_data_problem_strings = [] for job_name in job_names: print('checking *.e, *.o from ' + job_name + '.job') # preprocess *.e files if 'filter_coherence' in job_name or 'run_09_igram' in job_name or 'minopy_generate_ifgram' in job_name: # run_09_igram is for stripmap putils.remove_line_counter_lines_from_error_files( run_file=job_name) # 5/21: sometimes not working. Move before loop using run_file_base ?? putils.remove_zero_size_or_length_error_files(run_file=job_name) putils.remove_launcher_message_from_error_file(run_file=job_name) putils.remove_ssh_warning_message_from_error_file(run_file=job_name) putils.remove_zero_size_or_length_error_files(run_file=job_name) putils.remove_timeout_error_files(run_file=job_name) # analyze *.e and *.o files error_files = natsorted(glob.glob(job_name + '*.e')) out_files = natsorted(glob.glob(job_name + '*.o')) if 'unpack_secondary_slc' in job_name: for file in out_files: for string in data_problems_strings_out_files: if check_words_in_file(file, string): date = file.split("_")[-2] print('WARNING: \"' + string + '\" found in ' + os.path.basename(file) + ': removing ' + date + ' from run_files ') putils.run_remove_date_from_run_files( run_files_dir=run_files_dir, date=date, start_run_file=3) with open(run_files_dir + '/removed_dates.txt', 'a') as rd: rd.writelines('run_02: removing ' + date + ', \"' + string + '\" found in ' + os.path.basename(file) + ' \n') num_lines = sum(1 for line in open(run_files_dir + '/removed_dates.txt')) for file in error_files: for string in data_problems_strings_error_files: if check_words_in_file(file, string): date = file.split("_")[-2] print('WARNING: \"' + string + '\" found in ' + os.path.basename(file) + ': removing ' + date + ' from run_files ') putils.run_remove_date_from_run_files( run_files_dir=run_files_dir, date=date, start_run_file=3) with open(run_files_dir + '/removed_dates.txt', 'a') as rd: rd.writelines('run_02: removing ' + date + ', \"' + string + '\" found in ' + os.path.basename(file) + ' \n') try: num_lines = sum( 1 for line in open(run_files_dir + '/removed_dates.txt')) if (num_lines >= 10): shutil.copy( run_files_dir + '/removed_dates.txt', project_dir + '/out_' + os.path.basename(job_name) + '.e') raise RuntimeError('Too many bad data: ', num_lines) except: pass # this covers missing frames: run_files are generated although a frame in the middle is missing if 'fullBurst_geo2rdr' in job_name: for file in error_files: for string in data_problems_strings_run_04: if check_words_in_file(file, string): date = file.split("_")[-2] print('WARNING: \"' + string + '\" found in ' + os.path.basename(file) + ': removing ' + date + ' from run_files ') putils.run_remove_date_from_run_files( run_files_dir=run_files_dir, date=date, start_run_file=5) secondary_date_dir = project_dir + '/coreg_secondarys/' + date try: shutil.rmtree(secondary_date_dir) except: pass with open(run_files_dir + '/removed_dates.txt', 'a') as rd: rd.writelines('run_04: removing ' + date + ', \"' + string + '\" found in ' + os.path.basename(file) + ' \n') rd.writelines('run_04: removing directory ' + secondary_date_dir + ' \n') out_dir = run_files_dir + '/stdout_run_04_fullBurst_geo2rdr' os.makedirs(out_dir, exist_ok=True) shutil.move(file, out_dir + '/' + os.path.basename(file)) error_files.remove(file) if 'extract_stack_valid_region' in job_name: for file in out_files: string = different_number_of_bursts_string[0] if check_words_in_file(file, string): #matched_data_problem_strings.append('Warning: \"' + string + '\" found in ' + file + '\n') print('Warning: \"' + string + '\" found in ' + file) with open(file) as fo: problem_dates = [] lines = fo.readlines() for line in lines: if 'WARNING:' in line: date = line.split(' ')[1] problem_dates.append(date) problem_dates = list(set(problem_dates)) problem_dates = natsorted(problem_dates) for date in problem_dates: print('WARNING: \"' + string + '\" found in ' + os.path.basename(file) + ': removing ' + date + ' from run_files ') putils.run_remove_date_from_run_files( run_files_dir=run_files_dir, date=date, start_run_file=7) with open(run_files_dir + '/removed_dates.txt', 'a') as rd: rd.writelines('run_06: removing ' + date + ', \"' + string + '\" found in ' + os.path.basename(file) + ' \n') # exit if too many removed dates num_lines = sum(1 for line in open(run_files_dir + '/removed_dates.txt')) if (num_lines >= 30): #shutil.copy(run_files_dir + '/removed_dates.txt', project_dir + '/out_run_06_removed_dates.txt') shutil.copy( run_files_dir + '/removed_dates.txt', project_dir + '/out_' + os.path.basename(job_name) + '.e') raise RuntimeError( 'Too many dates with missing bursts (limit is 30): ', num_lines) for file in error_files + out_files: for error_string in error_strings: if check_words_in_file(file, error_string): if skip_error(file, error_string): break matched_error_strings.append('Error: \"' + error_string + '\" found in ' + file + '\n') print('Error: \"' + error_string + '\" found in ' + file) if len(matched_data_problem_strings) != 0: with open(run_file_base + '_data_problem_matches.e', 'w') as f: f.write(''.join(matched_data_problem_strings)) elif len(matched_error_strings) != 0: with open(run_file_base + '_error_matches.e', 'w') as f: f.write(''.join(matched_error_strings)) else: print("no error found") if 'run_' in job_name: putils.concatenate_error_files(run_file=run_file_base, work_dir=project_dir) else: out_error_file = work_dir + '/out_' + os.path.basename(job_name) + '.e' if len(error_files) == 0: Path(out_error_file).touch() else: shutil.copy(error_files[-1], out_error_file) # exit for errors if len(matched_error_strings) + len(matched_data_problem_strings) != 0: print( 'For known issues see https://github.com/geodesymiami/rsmas_insar/tree/master/docs/known_issues.md' ) raise RuntimeError('Error in run_file: ' + run_file_base) # move only if there was no error if len(os.path.dirname(run_file_base)) == 0: run_file = os.getcwd() + '/' + run_file_base putils.move_out_job_files_to_stdout(run_file=run_file_base) return
def main(iargs=None): """ create orth and geo rectifying run jobs and submit them. """ inps = putils.cmd_line_parse(iargs) if 'stripmap' in inps.prefix: sys.path.append(os.path.join(os.getenv('ISCE_STACK'), 'stripmapStack')) else: sys.path.append(os.path.join(os.getenv('ISCE_STACK'), 'topsStack')) from s1a_isce_utils import loadProduct, getSwathList import mergeBursts if not iargs is None: input_arguments = iargs else: input_arguments = sys.argv[1::] message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(input_arguments)) inps.geom_referenceDir = os.path.join(inps.work_dir, pathObj.geomlatlondir) inps.reference = os.path.join(inps.work_dir, pathObj.referencedir) try: inps.dem = glob.glob('{}/DEM/*.wgs84'.format(inps.work_dir))[0] except: print('DEM not exists!') sys.exit(1) if not os.path.exists(inps.geom_referenceDir): os.mkdir(inps.geom_referenceDir) time.sleep(putils.pause_seconds(inps.wait_time)) inps.out_dir = os.path.join(inps.work_dir, 'run_files') job_obj = JOB_SUBMIT(inps) ######################################### # Submit job ######################################### if inps.submit_flag: job_name = 'export_ortho_geo' job_file_name = job_name if '--submit' in input_arguments: input_arguments.remove('--submit') command = [os.path.abspath(__file__)] + input_arguments job_obj.submit_script(job_name, job_file_name, command) pic_dir = os.path.join(inps.work_dir, pathObj.tiffdir) if not os.path.exists(pic_dir): os.mkdir(pic_dir) demZero = create_demZero(inps.dem, inps.geom_referenceDir) swathList = getSwathList(inps.reference) create_georectified_lat_lon(swathList, inps.reference, inps.geom_referenceDir, demZero, loadProduct) merge_burst_lat_lon(inps, mergeBursts) multilook_images(inps, mergeBursts) run_file_list = make_run_list(inps) for item in run_file_list: putils.remove_last_job_running_products(run_file=item) job_obj.write_batch_jobs(batch_file=item) job_status = job_obj.submit_batch_jobs(batch_file=item) if job_status: putils.remove_zero_size_or_length_error_files(run_file=item) putils.rerun_job_if_exit_code_140(run_file=item, inps_dict=inps) putils.raise_exception_if_job_exited(run_file=item) putils.concatenate_error_files(run_file=item, work_dir=inps.work_dir) putils.move_out_job_files_to_stdout(run_file=item) return
def main(iargs=None): inps = putils.cmd_line_parse(iargs, script='execute_runfiles') os.chdir(inps.work_dir) time.sleep(putils.pause_seconds(inps.wait_time)) ######################################### # Submit job ######################################### if inps.submit_flag: job_name = 'execute_runfiles' job_file_name = job_name js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir) sys.exit(0) run_file_list = putils.read_run_list(inps.work_dir) if inps.end_run == 0: inps.end_run = len(run_file_list) if not inps.start_run == 0: inps.start_run = inps.start_run - 1 if not iargs is None: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:])) else: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::])) run_file_list = run_file_list[inps.start_run:inps.end_run] for item in run_file_list: putils.remove_last_job_running_products(run_file=item) job_status = js.submit_batch_jobs(batch_file=item, out_dir=os.path.join( inps.work_dir, 'run_files'), work_dir=inps.work_dir) if job_status: putils.remove_zero_size_or_length_error_files(run_file=item) putils.rerun_job_if_exit_code_140(run_file=item) putils.raise_exception_if_job_exited(run_file=item) putils.concatenate_error_files(run_file=item, work_dir=inps.work_dir) putils.move_out_job_files_to_stdout(run_file=item) date_str = datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d:%H%M%S') print(date_str + ' * Job {} completed'.format(item)) date_str = datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d:%H%M%S') print(date_str + ' * all jobs from {} to {} have been completed'.format( os.path.basename(run_file_list[0]), os.path.basename(run_file_list[-1]))) return None