Exemplo n.º 1
0
def rerun_job_if_exit_code_140(run_file, inps_dict):
    """Find files that exited because walltime exceeed and run again with twice the walltime"""
    from minsar.job_submission import JOB_SUBMIT

    inps = inps_dict

    search_string = 'Exited with exit code 140.'
    files, job_files = find_completed_jobs_matching_search_string(
        run_file, search_string)

    if len(files) == 0:
        return

    rerun_file = create_rerun_run_file(job_files)

    wall_time = extract_walltime_from_job_file(job_files[0])
    memory = extract_memory_from_job_file(job_files[0])
    new_wall_time = multiply_walltime(wall_time, factor=2)

    print(new_wall_time)
    print(memory)

    for file in files:
        os.remove(file.replace('.o*', '.e*'))

    move_out_job_files_to_stdout(run_file)

    # renaming of stdout dir as otherwise it will get deleted in later move_out_job_files_to_stdout step
    stdout_dir = os.path.dirname(run_file) + '/stdout_' + os.path.basename(
        run_file)
    os.rename(stdout_dir, stdout_dir + '_pre_rerun')

    remove_last_job_running_products(run_file)
    #queuename = os.getenv('QUEUENAME')

    inps.wall_time = new_wall_time
    inps.work_dir = os.path.dirname(os.path.dirname(rerun_file))
    inps.out_dir = os.path.dirname(rerun_file)
    #inps.queue = queuename

    job_obj = JOB_SUBMIT(inps)
    job_obj.write_batch_jobs(batch_file=rerun_file)
    jobs = job_obj.submit_batch_jobs(batch_file=rerun_file)

    remove_zero_size_or_length_error_files(run_file=rerun_file)
    raise_exception_if_job_exited(run_file=rerun_file)
    concatenate_error_files(run_file=rerun_file,
                            work_dir=os.path.dirname(
                                os.path.dirname(run_file)))
    move_out_job_files_to_stdout(rerun_file)
Exemplo n.º 2
0
def main(iargs=None):
    inps = putils.cmd_line_parse(iargs, script='create_runfiles')
    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    os.chdir(inps.work_dir)

    #time.sleep(putils.pause_seconds(inps.wait_time))

    inps.out_dir = inps.work_dir
    inps.num_data = 1

    job_obj = JOB_SUBMIT(inps)
    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_name = 'create_runfiles'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    try:
        dem_dir = inps.template[inps.prefix + 'Stack.demDir']
    except:
        dem_dir = 'DEM'

    try:
        dem_file = glob.glob(dem_dir + '/*.wgs84')[0]
        inps.template[inps.prefix + 'Stack.demDir'] = dem_file
    except:
        raise SystemExit('DEM does not exist')

    slc_dir = inps.template[inps.prefix + 'Stack.slcDir']
    os.makedirs(slc_dir, exist_ok=True)

    if int(
            get_size(slc_dir) / 1024**2
    ) < 500:  # calculate slc_dir size in MB and see if there are SLCs according to size

        # Unpack Raw data:
        if not inps.template['raw_image_dir'] in [None, 'None']:
            raw_image_dir = inps.template['raw_image_dir']
        else:
            raw_image_dir = os.path.join(inps.work_dir, 'RAW_data')

        if os.path.exists(raw_image_dir):
            unpackObj = Sensors(
                raw_image_dir,
                slc_dir,
                remove_file='False',
                multiple_raw_frame=inps.template['multiple_raw_frame'])
            unpack_run_file = unpackObj.start()
            unpackObj.close()

            job_obj.write_batch_jobs(batch_file=unpack_run_file)
            job_status = job_obj.submit_batch_jobs(batch_file=unpack_run_file)

            if not job_status:
                raise Exception('ERROR: Unpacking was failed')
        else:
            raise Exception('ERROR: No data (SLC or Raw) available')

    # make run file:
    run_files_dirname = "run_files"
    config_dirnane = "configs"

    if inps.copy_to_tmp:
        run_files_dirname += "_tmp"
        config_dirnane += "_tmp"

    run_dir = os.path.join(inps.work_dir, run_files_dirname)
    config_dir = os.path.join(inps.work_dir, config_dirnane)

    for directory in [run_dir, config_dir]:
        if os.path.exists(directory):
            shutil.rmtree(directory)

    inps.Stack_template = pathObj.correct_for_isce_naming_convention(inps)
    if inps.ignore_stack and os.path.exists(inps.work_dir +
                                            '/coreg_secondarys'):
        shutil.rmtree(inps.work_dir + '/tmp_coreg_secondarys',
                      ignore_errors=True)
        shutil.move(inps.work_dir + '/coreg_secondarys',
                    inps.work_dir + '/tmp_coreg_secondarys')

    runObj = CreateRun(inps)
    runObj.run_stack_workflow()

    if inps.ignore_stack and os.path.exists(inps.work_dir +
                                            '/tmp_coreg_secondarys'):
        shutil.move(inps.work_dir + '/tmp_coreg_secondarys',
                    inps.work_dir + '/coreg_secondarys')

    if os.path.isfile(run_dir + '/run_06_extract_stack_valid_region'):
        with open(run_dir + '/run_06_extract_stack_valid_region', 'r') as f:
            line = f.readlines()
        with open(run_dir + '/run_06_extract_stack_valid_region', 'w') as f:
            f.writelines(['rm -rf ./stack; '] + line)

    run_file_list = putils.make_run_list(inps.work_dir)
    with open(inps.work_dir + '/run_files_list', 'w') as run_file:
        for item in run_file_list:
            run_file.writelines(item + '\n')

    if inps.copy_to_tmp:
        run_file_list = [
            item.replace("/run_files/", "/run_files_tmp/")
            for item in run_file_list
        ]
        with open(inps.work_dir + '/run_files_tmp_list', 'w') as run_file:
            for item in run_file_list:
                run_file.writelines(item + '\n')
        shutil.copytree(pathObj.rundir, run_dir)

    if inps.prefix == 'tops':
        # check for orbits
        orbit_dir = os.getenv('SENTINEL_ORBITS')
        local_orbit = os.path.join(inps.work_dir, 'orbits')
        precise_orbits_in_local = glob.glob(local_orbit + '/*/*POEORB*')
        if len(precise_orbits_in_local) > 0:
            for orbit_file in precise_orbits_in_local:
                os.system('cp {} {}'.format(orbit_file, orbit_dir))

    # Writing job files
    if inps.write_jobs:
        for item in run_file_list:
            job_obj.write_batch_jobs(batch_file=item)

        if inps.template['processingMethod'] == 'smallbaseline':
            job_name = 'smallbaseline_wrapper'
            job_file_name = job_name
            command = [
                'smallbaselineApp.py', inps.custom_template_file, '--dir',
                'mintpy'
            ]

            # pre_command = ["""[[ $(ls mintpy/time* | wc -l) -eq 1 ]] && rm mintpy/time*"""]
            pre_command = ["clean_mintpy_dir.bash;"]
            command = pre_command + command

            job_obj.submit_script(job_name,
                                  job_file_name,
                                  command,
                                  writeOnly='True')
        else:
            job_name = 'minopy_wrapper'
            job_file_name = job_name
            command = [
                'minopyApp.py', inps.custom_template_file, '--dir', 'minopy'
            ]
            job_obj.submit_script(job_name,
                                  job_file_name,
                                  command,
                                  writeOnly='True')

        job_name = 'insarmaps'
        job_file_name = job_name
        command = ['ingest_insarmaps.py', inps.custom_template_file]
        job_obj.submit_script(job_name,
                              job_file_name,
                              command,
                              writeOnly='True')

    print("copy_to_tmp: {}".format(inps.copy_to_tmp))
    if inps.copy_to_tmp:
        #run_dir_tmp = os.path.join(inps.work_dir, 'run_files_tmp')
        config_dir_tmp = os.path.join(inps.work_dir, 'configs_tmp')
        shutil.copytree(os.path.join(inps.work_dir, "configs"), config_dir_tmp)

        cmd = "update_configs_for_tmp.bash {}".format(inps.work_dir)
        subprocess.Popen(cmd, shell=True)

    return None
Exemplo n.º 3
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='execute_runfiles')

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    os.chdir(inps.work_dir)

    time.sleep(putils.pause_seconds(inps.wait_time))

    if inps.prefix == 'stripmap':
        inps.num_bursts = 1

    inps.out_dir = os.path.join(inps.work_dir, 'run_files')
    job_obj = JOB_SUBMIT(inps)

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_name = 'execute_runfiles'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    run_file_list = putils.read_run_list(inps.work_dir)

    if inps.end_run == 0:
        inps.end_run = len(run_file_list)

    if not inps.start_run == 0:
        inps.start_run = inps.start_run - 1

    if inps.step:
        inps.start_run = inps.step - 1
        inps.end_run = inps.step

    run_file_list = run_file_list[inps.start_run:inps.end_run]

    for item in run_file_list:
        putils.remove_last_job_running_products(run_file=item)

        job_obj.write_batch_jobs(batch_file=item)
        job_status = job_obj.submit_batch_jobs(batch_file=item)

        if job_status:

            putils.remove_zero_size_or_length_error_files(run_file=item)
            putils.rerun_job_if_exit_code_140(run_file=item, inps_dict=inps)
            putils.raise_exception_if_job_exited(run_file=item)
            putils.concatenate_error_files(run_file=item,
                                           work_dir=inps.work_dir)
            putils.move_out_job_files_to_stdout(run_file=item)

            date_str = datetime.datetime.strftime(datetime.datetime.now(),
                                                  '%Y%m%d:%H%M%S')
            print(date_str + ' * Job {} completed'.format(item))

    date_str = datetime.datetime.strftime(datetime.datetime.now(),
                                          '%Y%m%d:%H%M%S')
    print(date_str + ' * all jobs from {} to {} have been completed'.format(
        os.path.basename(run_file_list[0]), os.path.basename(
            run_file_list[-1])))

    return
Exemplo n.º 4
0
def main(iargs=None):
    inps = putils.cmd_line_parse(iargs, script='create_runfiles')
    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    os.chdir(inps.work_dir)

    time.sleep(putils.pause_seconds(inps.wait_time))

    inps.out_dir = inps.work_dir
    job_obj = JOB_SUBMIT(inps)
    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_name = 'create_runfiles'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    try:
        dem_file = glob.glob('DEM/*.wgs84')[0]
        inps.template[inps.prefix + 'Stack.demDir'] = dem_file
    except:
        raise SystemExit('DEM does not exist')

    slc_dir = inps.template[inps.prefix + 'Stack.slcDir']
    os.makedirs(slc_dir, exist_ok=True)

    if int(
            get_size(slc_dir) / 1024**2
    ) < 500:  # calculate slc_dir size in MB and see if there are SLCs according to size

        # Unpack Raw data:
        if not inps.template['raw_image_dir'] in [None, 'None']:
            raw_image_dir = inps.template['raw_image_dir']
        else:
            raw_image_dir = os.path.join(inps.work_dir, 'RAW_data')

        if os.path.exists(raw_image_dir):
            unpackObj = Sensors(
                raw_image_dir,
                slc_dir,
                remove_file='False',
                multiple_raw_frame=inps.template['multiple_raw_frame'])
            unpack_run_file = unpackObj.start()
            unpackObj.close()

            job_obj.write_batch_jobs(batch_file=unpack_run_file)
            job_status = job_obj.submit_batch_jobs(batch_file=unpack_run_file)

            if not job_status:
                raise Exception('ERROR: Unpacking was failed')
        else:
            raise Exception('ERROR: No data (SLC or Raw) available')

    # make run file:
    run_dir = os.path.join(inps.work_dir, 'run_files')
    config_dir = os.path.join(inps.work_dir, 'configs')
    for directory in [run_dir, config_dir]:
        if os.path.exists(directory):
            shutil.rmtree(directory)

    inps.Stack_template = pathObj.correct_for_isce_naming_convention(inps)
    runObj = CreateRun(inps)
    runObj.run_stack_workflow()

    run_file_list = putils.make_run_list(inps.work_dir)

    with open(inps.work_dir + '/run_files_list', 'w') as run_file:
        for item in run_file_list:
            run_file.writelines(item + '\n')

    if inps.prefix == 'tops':
        # check for orbits
        orbit_dir = os.getenv('SENTINEL_ORBITS')
        local_orbit = os.path.join(inps.work_dir, 'orbits')
        precise_orbits_in_local = glob.glob(local_orbit + '/*/*POEORB*')
        if len(precise_orbits_in_local) > 0:
            for orbit_file in precise_orbits_in_local:
                os.system('cp {} {}'.format(orbit_file, orbit_dir))

    # Writing job files
    if inps.write_jobs:
        for item in run_file_list:
            job_obj.write_batch_jobs(batch_file=item)

        if inps.template['processingMethod'] == 'smallbaseline':
            job_name = 'smallbaseline_wrapper'
            job_file_name = job_name
            command = [
                'smallbaselineApp.py', inps.custom_template_file, '--dir',
                'mintpy'
            ]
            job_obj.submit_script(job_name,
                                  job_file_name,
                                  command,
                                  writeOnly='True')
        else:
            job_name = 'minopy_wrapper'
            job_file_name = job_name
            command = [
                'minopyApp.py', inps.custom_template_file, '--dir', 'minopy'
            ]
            job_obj.submit_script(job_name,
                                  job_file_name,
                                  command,
                                  writeOnly='True')

        job_name = 'insarmaps'
        job_file_name = job_name
        command = ['ingest_insarmaps.py', inps.custom_template_file]
        job_obj.submit_script(job_name,
                              job_file_name,
                              command,
                              writeOnly='True')

    return None
Exemplo n.º 5
0
def main(iargs=None):
    """ create orth and geo rectifying run jobs and submit them. """

    inps = putils.cmd_line_parse(iargs)

    if 'stripmap' in inps.prefix:
        sys.path.append(os.path.join(os.getenv('ISCE_STACK'), 'stripmapStack'))
    else:
        sys.path.append(os.path.join(os.getenv('ISCE_STACK'), 'topsStack'))

    from s1a_isce_utils import loadProduct, getSwathList
    import mergeBursts

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    inps.geom_referenceDir = os.path.join(inps.work_dir, pathObj.geomlatlondir)
    inps.reference = os.path.join(inps.work_dir, pathObj.referencedir)

    try:
        inps.dem = glob.glob('{}/DEM/*.wgs84'.format(inps.work_dir))[0]
    except:
        print('DEM not exists!')
        sys.exit(1)

    if not os.path.exists(inps.geom_referenceDir):
        os.mkdir(inps.geom_referenceDir)

    time.sleep(putils.pause_seconds(inps.wait_time))

    inps.out_dir = os.path.join(inps.work_dir, 'run_files')
    job_obj = JOB_SUBMIT(inps)

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_name = 'export_ortho_geo'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)

    pic_dir = os.path.join(inps.work_dir, pathObj.tiffdir)

    if not os.path.exists(pic_dir):
        os.mkdir(pic_dir)

    demZero = create_demZero(inps.dem, inps.geom_referenceDir)

    swathList = getSwathList(inps.reference)

    create_georectified_lat_lon(swathList, inps.reference,
                                inps.geom_referenceDir, demZero, loadProduct)

    merge_burst_lat_lon(inps, mergeBursts)

    multilook_images(inps, mergeBursts)

    run_file_list = make_run_list(inps)

    for item in run_file_list:

        putils.remove_last_job_running_products(run_file=item)

        job_obj.write_batch_jobs(batch_file=item)
        job_status = job_obj.submit_batch_jobs(batch_file=item)

        if job_status:
            putils.remove_zero_size_or_length_error_files(run_file=item)
            putils.rerun_job_if_exit_code_140(run_file=item, inps_dict=inps)
            putils.raise_exception_if_job_exited(run_file=item)
            putils.concatenate_error_files(run_file=item,
                                           work_dir=inps.work_dir)
            putils.move_out_job_files_to_stdout(run_file=item)

    return