Ejemplo n.º 1
0
def main(iargs=None):
    """ email mintpy or insarmaps results """

    inps = putils.cmd_line_parse(iargs, script='email_results')

    email_address = os.getenv('NOTIFICATIONEMAIL')

    if not iargs is None:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(iargs[:]))
    else:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::]))

    if inps.email_insarmaps_flag:
        email_insarmaps_results(email_address)

        if int(inps.template['cleanopt']) == 4:
            cleanlist = pathObj.isce_clean_list
            putils.remove_directories(cleanlist[4])

        return

    if inps.email_mintpy_flag:
        email_mintpy_results(email_address)
        return

    return None
Ejemplo n.º 2
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs)

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    os.chdir(inps.work_dir)

    time.sleep(putils.pause_seconds(inps.wait_time))

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_obj = JOB_SUBMIT(inps)
        job_name = 'create_runfiles'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    try:
        dem_file = glob.glob('DEM/*.wgs84')[0]
        inps.template['topsStack.demDir'] = dem_file
    except:
        raise SystemExit('DEM does not exist')

    # check for orbits
    orbit_dir = os.getenv('SENTINEL_ORBITS')

    # make run file
    inps.topsStack_template = pathObj.correct_for_isce_naming_convention(inps)
    runObj = CreateRun(inps)
    runObj.run_stack_workflow()

    run_file_list = putils.make_run_list(inps.work_dir)

    with open(inps.work_dir + '/run_files_list', 'w') as run_file:
        for item in run_file_list:
            run_file.writelines(item + '\n')

    local_orbit = os.path.join(inps.work_dir, 'orbits')
    precise_orbits_in_local = glob.glob(local_orbit + '/*/*POEORB*')
    if len(precise_orbits_in_local) > 0:
        for orbit_file in precise_orbits_in_local:
            os.system('cp {} {}'.format(orbit_file, orbit_dir))

    return None
Ejemplo n.º 3
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs)

    config = putils.get_config_defaults(config_file='job_defaults.cfg')

    os.chdir(inps.work_dir)

    job_file_name = 'create_runfiles'
    job_name = job_file_name
    if inps.wall_time == 'None':
        inps.wall_time = config[job_file_name]['walltime']

    wait_seconds, new_wall_time = putils.add_pause_to_walltime(
        inps.wall_time, inps.wait_time)

    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:

        js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir,
                         new_wall_time)
        sys.exit(0)

    time.sleep(wait_seconds)

    if not iargs is None:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(iargs[:]))
    else:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::]))

    try:
        dem_file = glob.glob('DEM/*.wgs84')[0]
        inps.template['topsStack.demDir'] = dem_file
    except:
        raise SystemExit('DEM does not exist')

    inps.topsStack_template = pathObj.correct_for_isce_naming_convention(inps)
    runObj = CreateRun(inps)
    runObj.run_stack_workflow()

    run_file_list = putils.make_run_list(inps.work_dir)

    with open(inps.work_dir + '/run_files_list', 'w') as run_file:
        for item in run_file_list:
            run_file.writelines(item + '\n')

    if inps.template['topsStack.workflow'] in ['interferogram', 'slc']:
        runObj.run_post_stack()

    return None
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='download_rsmas')

    config = putils.get_config_defaults(config_file='job_defaults.cfg')

    if not iargs is None:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(iargs[:]))
    else:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::]))

    logfile_name = inps.work_dir + '/asfserial_rsmas.log'
    logger = RsmasLogger(file_name=logfile_name)

    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:
        job_file_name = 'download_asfserial_rsmas'
        job_name = inps.custom_template_file.split(os.sep)[-1].split('.')[0]
        work_dir = inps.work_dir

        if inps.wall_time == 'None':
            inps.wall_time = config['download_rsmas']['walltime']

        js.submit_script(job_name, job_file_name, sys.argv[:], work_dir,
                         inps.wall_time)

    os.chdir(inps.work_dir)

    if not inps.template['topsStack.slcDir'] is None:
        inps.slc_dir = inps.template['topsStack.slcDir']
    else:
        inps.slc_dir = os.path.join(inps.work_dir, 'SLC')

    project_slc_dir = os.path.join(inps.work_dir, 'SLC')

    os.chdir(inps.slc_dir)

    try:
        os.remove(os.path.expanduser('~') + '/.bulk_download_cookiejar.txt')
    except OSError:
        pass

    generate_files_csv(project_slc_dir, inps.custom_template_file)
    succesful = run_download_asf_serial(project_slc_dir, logger)
    change_file_permissions()
    logger.log(loglevel.INFO, "SUCCESS: %s", str(succesful))
    logger.log(loglevel.INFO, "------------------------------------")

    return None
Ejemplo n.º 5
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='smallbaseline_wrapper')

    config = putils.get_config_defaults(config_file='job_defaults.cfg')

    job_file_name = 'smallbaseline_wrapper'
    job_name = job_file_name

    if inps.wall_time == 'None':
        inps.wall_time = config[job_file_name]['walltime']

    wait_seconds, new_wall_time = putils.add_pause_to_walltime(
        inps.wall_time, inps.wait_time)

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:

        js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir,
                         new_wall_time)
        sys.exit(0)

    if not iargs is None:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(iargs[:]))
    else:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::]))

    os.chdir(inps.work_dir)

    time.sleep(wait_seconds)

    try:
        with open('out_mintpy.o', 'w') as f:
            with contextlib.redirect_stdout(f):
                smallbaselineApp.main([inps.customTemplateFile])
    except:
        with open('out_mintpy.e', 'w') as g:
            with contextlib.redirect_stderr(g):
                smallbaselineApp.main([inps.customTemplateFile])

    inps.mintpy_dir = os.path.join(inps.work_dir, pathObj.mintpydir)
    putils.set_permission_dask_files(directory=inps.mintpy_dir)

    # Email Mintpy results
    if inps.email:
        email_results.main([inps.customTemplateFile])

    return None
Ejemplo n.º 6
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='download_rsmas')

    config = putils.get_config_defaults(config_file='job_defaults.cfg')

    if not iargs is None:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(iargs[:]))
    else:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::]))

    logfile_name = inps.work_dir + '/ssara_rsmas.log'
    logger = RsmasLogger(file_name=logfile_name)

    if not inps.template['topsStack.slcDir'] is None:
        inps.slc_dir = inps.template['topsStack.slcDir']
    else:
        inps.slc_dir = os.path.join(inps.work_dir, 'SLC')

    project_slc_dir = os.path.join(inps.work_dir, 'SLC')
    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:
        job_file_name = 'download_ssara_rsmas'
        job_name = inps.custom_template_file.split(os.sep)[-1].split('.')[0]

        if inps.wall_time == 'None':
            inps.wall_time = config['download_rsmas']['walltime']

        js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir,
                         inps.wall_time)
        sys.exit(0)

    if not os.path.isdir(project_slc_dir):
        os.makedirs(project_slc_dir)
    os.chdir(inps.slc_dir)

    logger.log(loglevel.INFO, "DATASET: %s",
               str(inps.custom_template_file.split('/')[-1].split(".")[0]))
    logger.log(loglevel.INFO, "DATE: %s",
               datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f"))
    succesful = run_ssara(project_slc_dir, inps.custom_template_file,
                          inps.delta_lat, logger)
    logger.log(loglevel.INFO, "SUCCESS: %s", str(succesful))
    logger.log(loglevel.INFO, "------------------------------------")

    return None
Ejemplo n.º 7
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='minopy_wrapper')

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    time.sleep(putils.pause_seconds(inps.wait_time))

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_obj = JOB_SUBMIT(inps)
        job_name = 'minopy_wrapper'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)

    os.chdir(inps.work_dir)

    try:
        with open('out_minopy.o', 'w') as f:
            with contextlib.redirect_stdout(f):
                smallbaselineApp.main(
                    [inps.custom_template_file, '--dir', pathObj.mintpydir])
    except:
        with open('out_minopy.e', 'w') as g:
            with contextlib.redirect_stderr(g):
                smallbaselineApp.main(
                    [inps.custom_template_file, '--dir', pathObj.mintpydir])

    inps.mintpy_dir = os.path.join(inps.work_dir, pathObj.mintpydir)
    putils.set_permission_dask_files(directory=inps.mintpy_dir)

    # Email Minopy results
    if inps.email:
        email_results.main([inps.custom_template_file, '--minopy'])

    return None
Ejemplo n.º 8
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='download_rsmas')

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    logfile_name = inps.work_dir + '/ssara_rsmas.log'
    logger = RsmasLogger(file_name=logfile_name)

    if not inps.template['topsStack.slcDir'] is None:
        inps.slc_dir = inps.template['topsStack.slcDir']
    else:
        inps.slc_dir = os.path.join(inps.work_dir, 'SLC')

    project_slc_dir = os.path.join(inps.work_dir, 'SLC')
    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:
        job_file_name = 'download_ssara_rsmas'
        job_name = inps.custom_template_file.split(os.sep)[-1].split('.')[0]
        job_obj = JOB_SUBMIT(inps)
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    if not os.path.isdir(project_slc_dir):
        os.makedirs(project_slc_dir)
    os.chdir(inps.slc_dir)

    logger.log(loglevel.INFO, "DATASET: %s",
               str(inps.custom_template_file.split('/')[-1].split(".")[0]))
    logger.log(loglevel.INFO, "DATE: %s",
               datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f"))
    succesful = run_ssara(project_slc_dir, inps.custom_template_file,
                          inps.delta_lat, logger)
    logger.log(loglevel.INFO, "SUCCESS: %s", str(succesful))
    logger.log(loglevel.INFO, "------------------------------------")

    return None
Ejemplo n.º 9
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='download_rsmas')

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    logfile_name = inps.work_dir + '/ssara_rsmas.log'
    logger = RsmasLogger(file_name=logfile_name)

    #import pdb; pdb.set_trace()
    if not inps.template[inps.prefix + 'Stack.slcDir'] is None:
        inps.download_dir = inps.template[inps.prefix + 'Stack.slcDir']

    if 'COSMO' in inps.template['ssaraopt.platform']:
        inps.download_dir = os.path.join(inps.work_dir, 'RAW_data')
    else:
        inps.download_dir = os.path.join(inps.work_dir, 'SLC')

    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:
        job_file_name = 'download_ssara_rsmas'
        job_name = inps.custom_template_file.split(os.sep)[-1].split('.')[0]
        job_obj = JOB_SUBMIT(inps)
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    if not os.path.isdir(inps.download_dir):
        os.makedirs(inps.download_dir)
    os.chdir(inps.download_dir)

    succesful = run_ssara(inps.download_dir, inps.custom_template_file,
                          inps.delta_lat, logger)

    return None
Ejemplo n.º 10
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='smallbaseline_wrapper')

    time.sleep(putils.pause_seconds(inps.wait_time))

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_name = 'smallbaseline_wrapper'
        job_file_name = job_name
        js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir)
        sys.exit(0)

    if not iargs is None:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(iargs[:]))
    else:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::]))

    os.chdir(inps.work_dir)

    try:
        with open('out_mintpy.o', 'w') as f:
            with contextlib.redirect_stdout(f):
                smallbaselineApp.main([inps.custom_template_file])
    except:
        with open('out_mintpy.e', 'w') as g:
            with contextlib.redirect_stderr(g):
                smallbaselineApp.main([inps.custom_template_file])

    inps.mintpy_dir = os.path.join(inps.work_dir, pathObj.mintpydir)
    putils.set_permission_dask_files(directory=inps.mintpy_dir)

    # Email Mintpy results
    if inps.email:
        email_results.main([inps.custom_template_file, '--mintpy'])

    return None
def main(iargs=None):
    """ create template files for chunk processing """

    inps = putils.cmd_line_parse(iargs, script='generate_chunk_template_files')

    os.chdir(inps.work_dir)

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    run_generate_chunk_template_files(inps)

    return
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='execute_runfiles')

    os.chdir(inps.work_dir)

    config = putils.get_config_defaults(config_file='job_defaults.cfg')

    job_file_name = 'execute_runfiles'
    job_name = job_file_name

    if inps.wall_time == 'None':
        inps.wall_time = config[job_file_name]['walltime']

    wait_seconds, new_wall_time = putils.add_pause_to_walltime(
        inps.wall_time, inps.wait_time)

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:

        js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir,
                         new_wall_time)
        sys.exit(0)

    time.sleep(wait_seconds)

    run_file_list = putils.read_run_list(inps.work_dir)

    if inps.end_run == 0:
        inps.end_run = len(run_file_list)

    if not inps.start_run == 0:
        inps.start_run = inps.start_run - 1

    if not iargs is None:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(iargs[:]))
    else:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::]))

    run_file_list = run_file_list[inps.start_run:inps.end_run]

    supported_schedulers = ['LSF', 'PBS', 'SLURM']

    if os.getenv('JOBSCHEDULER') in supported_schedulers:

        for item in run_file_list:
            step_name = '_'
            step_name = step_name.join(item.split('_')[3::])
            try:
                memorymax = config[step_name]['memory']
            except:
                memorymax = config['DEFAULT']['memory']

            try:
                # FA 26 Dec commented out as it seemed wrong
                #if config[step_name]['adjust'] == 'True':
                #    walltimelimit = putils.walltime_adjust(inps, config[step_name]['walltime'])
                #else:
                #    walltimelimit = config[step_name]['walltime']
                walltimelimit = config[step_name]['walltime']
            except:
                walltimelimit = config['DEFAULT']['walltime']

            queuename = os.getenv('QUEUENAME')

            putils.remove_last_job_running_products(run_file=item)

            if os.getenv('JOBSCHEDULER') in ['SLURM', 'sge']:

                js.submit_job_with_launcher(batch_file=item,
                                            work_dir=os.path.join(
                                                inps.work_dir, 'run_files'),
                                            memory=memorymax,
                                            walltime=walltimelimit,
                                            queue=queuename)

            else:

                jobs = js.submit_batch_jobs(batch_file=item,
                                            out_dir=os.path.join(
                                                inps.work_dir, 'run_files'),
                                            work_dir=inps.work_dir,
                                            memory=memorymax,
                                            walltime=walltimelimit,
                                            queue=queuename)

            putils.remove_zero_size_or_length_error_files(run_file=item)
            putils.rerun_job_if_exit_code_140(run_file=item)
            putils.raise_exception_if_job_exited(run_file=item)
            putils.concatenate_error_files(run_file=item,
                                           work_dir=inps.work_dir)
            putils.move_out_job_files_to_stdout(run_file=item)

            date_str = datetime.datetime.strftime(datetime.datetime.now(),
                                                  '%Y%m%d:%H%M%S')
            print(date_str + ' * Job {} completed'.format(item))

        date_str = datetime.datetime.strftime(datetime.datetime.now(),
                                              '%Y%m%d:%H%M%S')
        print(date_str +
              ' * all jobs from {} to {} have been completed'.format(
                  os.path.basename(run_file_list[0]),
                  os.path.basename(run_file_list[-1])))

    else:
        for item in run_file_list:
            with open(item, 'r') as f:
                command_lines = f.readlines()
                for command_line in command_lines:
                    os.system(command_line)

    return None
Ejemplo n.º 13
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='ingest_insarmaps')

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    #time.sleep(putils.pause_seconds(inps.wait_time))

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_obj = JOB_SUBMIT(inps)
        job_name = 'ingest_insarmaps'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)

    os.chdir(inps.work_dir)

    hdfeos_file = glob.glob(inps.work_dir + '/mintpy/*.he5')
    hdfeos_file.append(glob.glob(inps.work_dir + '/mintpy/SUBSET_*/*.he5'))
    hdfeos_file = hdfeos_file[0]

    json_folder = inps.work_dir + '/mintpy/JSON'
    mbtiles_file = json_folder + '/' + os.path.splitext(
        os.path.basename(hdfeos_file))[0] + '.mbtiles'

    if os.path.isdir(json_folder):
        shutil.rmtree(json_folder)

    command1 = 'hdfeos5_2json_mbtiles.py ' + hdfeos_file + ' ' + json_folder
    command2 = 'json_mbtiles2insarmaps.py -u ' + password.insaruser + ' -p ' + password.insarpass + ' --host ' + \
               'insarmaps.miami.edu -P rsmastest -U rsmas\@gmail.com --json_folder ' + \
               json_folder + ' --mbtiles_file ' + mbtiles_file

    with open(inps.work_dir + '/run_insarmaps', 'w') as f:
        f.write(command1 + '\n')
        f.write(command2 + '\n')

    out_file = 'out_ingest_insarmaps'
    message_rsmas.log(inps.work_dir, command1)
    #command1 = '('+command1+' | tee '+out_file+'.o) 3>&1 1>&2 2>&3 | tee '+out_file+'.e'
    status = subprocess.Popen(command1, shell=True).wait()
    if status is not 0:
        raise Exception('ERROR in hdfeos5_2json_mbtiles.py')

    # TODO: Change subprocess call to get back error code and send error code to logger
    message_rsmas.log(inps.work_dir, command2)
    #command2 = '('+command2+' | tee -a '+out_file+'.o) 3>&1 1>&2 2>&3 | tee -a '+out_file+'.e'
    status = subprocess.Popen(command2, shell=True).wait()
    if status is not 0:
        raise Exception('ERROR in json_mbtiles2insarmaps.py')

    # Email insarmaps results:
    if inps.email:
        message_rsmas.log(
            inps.work_dir,
            'email_results.py --insarmaps ' + inps.custom_template_file)
        email_results.main([inps.custom_template_file, '--insarmaps'])

    return None
Ejemplo n.º 14
0
def main(iargs=None):

    # set defaults: ssara=True is set in dem_parser, use custom_pemp[late field if given
    inps = cmd_line_parse(iargs, script='dem_rsmas')

    if not iargs is None:
        message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:]))
    else:
        message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::]))

    if not inps.flag_boundingBox and not inps.flag_ssara:
        if 'demMethod' in list(inps.template.keys()):
            if inps.template['demMethod'] == 'ssara':
                inps.flag_ssara = True
                inps.flag_boundingBox = False
            if inps.template['demMethod'] == 'boundingBox':
                inps.flag_ssara = False
                inps.flag_boundingBox = True
    elif inps.flag_boundingBox:
        inps.flag_ssara = False
    else:
        inps.flag_ssara = True

    dem_dir = make_dem_dir(inps.work_dir)

    if dem_dir:

        if inps.flag_ssara:

            call_ssara_dem(inps, dem_dir)

            print('You have finished SSARA!')
        elif inps.flag_boundingBox:
            print('DEM generation using ISCE')
            bbox = inps.template['topsStack.boundingBox'].strip("'")
            bbox = [val for val in bbox.split()]
            south = bbox[0]
            north = bbox[1]
            west = bbox[2]
            east = bbox[3].split('\'')[0]

            south = math.floor(float(south) - 0.5)
            north = math.ceil(float(north) + 0.5)
            west = math.floor(float(west) - 0.5)
            east = math.ceil(float(east) + 0.5 )

            demBbox = str(int(south)) + ' ' + str(int(north)) + ' ' + str(int(west)) + ' ' + str(int(east))
            command = 'dem.py -a stitch -b ' + demBbox + ' -c -u https://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL1.003/2000.02.11/'
            message_rsmas.log(os.getcwd(), command)

            if os.getenv('DOWNLOADHOST') == 'local':
                try:
                    proc = subprocess.Popen(command,  stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=True, universal_newlines=True)
                    output, error = proc.communicate()
                    if proc.returncode is not 0:
                        raise Exception('ERROR starting dem.py subprocess')   # FA 8/19: I don't think this happens, errors are is output
                except subprocess.CalledProcessError as exc:
                    print("Command failed. Exit code, StdErr:", exc.returncode, exc.output)
                    sys.exit('Error produced by dem.py')
                else:
                    if 'Could not create a stitched DEM. Some tiles are missing' in output:
                        os.chdir('..')
                        shutil.rmtree('DEM')
                        sys.exit('Error in dem.py: Tiles are missing. Ocean???')

            else:
                dem_dir = os.getcwd()
                ssh_command_list = ['s.bgood', 'cd {0}'.format(dem_dir), command]
                host = os.getenv('DOWNLOADHOST')
                try:
                    status = ssh_with_commands(host, ssh_command_list)
                except subprocess.CalledProcessError as exc:
                    print("Command failed. Exit code, StdErr:", exc.returncode, exc.output)
                    sys.exit('Error produced by dem.py using ' + host)

            #print('Exit status from dem.py: {0}'.format(status))

            xmlFile = glob.glob('demLat_*.wgs84.xml')[0]

            fin = open(xmlFile, 'r')
            fout = open("tmp.txt", "wt")
            for line in fin:
                fout.write(line.replace('demLat', dem_dir + '/demLat'))
            fin.close()
            fout.close()
            os.rename('tmp.txt', xmlFile)

        else:
            sys.ext('Error unspported demMethod option: ' + inps.template['topsStack.demMethod'])

        print('\n###############################################')
        print('End of dem_rsmas.py')
        print('################################################\n')

    return None
Ejemplo n.º 15
0
def main(iargs=None):
    inps = putils.cmd_line_parse(iargs, script='create_runfiles')
    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    os.chdir(inps.work_dir)

    #time.sleep(putils.pause_seconds(inps.wait_time))

    inps.out_dir = inps.work_dir
    inps.num_data = 1

    job_obj = JOB_SUBMIT(inps)
    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_name = 'create_runfiles'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    try:
        dem_dir = inps.template[inps.prefix + 'Stack.demDir']
    except:
        dem_dir = 'DEM'

    try:
        dem_file = glob.glob(dem_dir + '/*.wgs84')[0]
        inps.template[inps.prefix + 'Stack.demDir'] = dem_file
    except:
        raise SystemExit('DEM does not exist')

    slc_dir = inps.template[inps.prefix + 'Stack.slcDir']
    os.makedirs(slc_dir, exist_ok=True)

    if int(
            get_size(slc_dir) / 1024**2
    ) < 500:  # calculate slc_dir size in MB and see if there are SLCs according to size

        # Unpack Raw data:
        if not inps.template['raw_image_dir'] in [None, 'None']:
            raw_image_dir = inps.template['raw_image_dir']
        else:
            raw_image_dir = os.path.join(inps.work_dir, 'RAW_data')

        if os.path.exists(raw_image_dir):
            unpackObj = Sensors(
                raw_image_dir,
                slc_dir,
                remove_file='False',
                multiple_raw_frame=inps.template['multiple_raw_frame'])
            unpack_run_file = unpackObj.start()
            unpackObj.close()

            job_obj.write_batch_jobs(batch_file=unpack_run_file)
            job_status = job_obj.submit_batch_jobs(batch_file=unpack_run_file)

            if not job_status:
                raise Exception('ERROR: Unpacking was failed')
        else:
            raise Exception('ERROR: No data (SLC or Raw) available')

    # make run file:
    run_files_dirname = "run_files"
    config_dirnane = "configs"

    if inps.copy_to_tmp:
        run_files_dirname += "_tmp"
        config_dirnane += "_tmp"

    run_dir = os.path.join(inps.work_dir, run_files_dirname)
    config_dir = os.path.join(inps.work_dir, config_dirnane)

    for directory in [run_dir, config_dir]:
        if os.path.exists(directory):
            shutil.rmtree(directory)

    inps.Stack_template = pathObj.correct_for_isce_naming_convention(inps)
    if inps.ignore_stack and os.path.exists(inps.work_dir +
                                            '/coreg_secondarys'):
        shutil.rmtree(inps.work_dir + '/tmp_coreg_secondarys',
                      ignore_errors=True)
        shutil.move(inps.work_dir + '/coreg_secondarys',
                    inps.work_dir + '/tmp_coreg_secondarys')

    runObj = CreateRun(inps)
    runObj.run_stack_workflow()

    if inps.ignore_stack and os.path.exists(inps.work_dir +
                                            '/tmp_coreg_secondarys'):
        shutil.move(inps.work_dir + '/tmp_coreg_secondarys',
                    inps.work_dir + '/coreg_secondarys')

    if os.path.isfile(run_dir + '/run_06_extract_stack_valid_region'):
        with open(run_dir + '/run_06_extract_stack_valid_region', 'r') as f:
            line = f.readlines()
        with open(run_dir + '/run_06_extract_stack_valid_region', 'w') as f:
            f.writelines(['rm -rf ./stack; '] + line)

    run_file_list = putils.make_run_list(inps.work_dir)
    with open(inps.work_dir + '/run_files_list', 'w') as run_file:
        for item in run_file_list:
            run_file.writelines(item + '\n')

    if inps.copy_to_tmp:
        run_file_list = [
            item.replace("/run_files/", "/run_files_tmp/")
            for item in run_file_list
        ]
        with open(inps.work_dir + '/run_files_tmp_list', 'w') as run_file:
            for item in run_file_list:
                run_file.writelines(item + '\n')
        shutil.copytree(pathObj.rundir, run_dir)

    if inps.prefix == 'tops':
        # check for orbits
        orbit_dir = os.getenv('SENTINEL_ORBITS')
        local_orbit = os.path.join(inps.work_dir, 'orbits')
        precise_orbits_in_local = glob.glob(local_orbit + '/*/*POEORB*')
        if len(precise_orbits_in_local) > 0:
            for orbit_file in precise_orbits_in_local:
                os.system('cp {} {}'.format(orbit_file, orbit_dir))

    # Writing job files
    if inps.write_jobs:
        for item in run_file_list:
            job_obj.write_batch_jobs(batch_file=item)

        if inps.template['processingMethod'] == 'smallbaseline':
            job_name = 'smallbaseline_wrapper'
            job_file_name = job_name
            command = [
                'smallbaselineApp.py', inps.custom_template_file, '--dir',
                'mintpy'
            ]

            # pre_command = ["""[[ $(ls mintpy/time* | wc -l) -eq 1 ]] && rm mintpy/time*"""]
            pre_command = ["clean_mintpy_dir.bash;"]
            command = pre_command + command

            job_obj.submit_script(job_name,
                                  job_file_name,
                                  command,
                                  writeOnly='True')
        else:
            job_name = 'minopy_wrapper'
            job_file_name = job_name
            command = [
                'minopyApp.py', inps.custom_template_file, '--dir', 'minopy'
            ]
            job_obj.submit_script(job_name,
                                  job_file_name,
                                  command,
                                  writeOnly='True')

        job_name = 'insarmaps'
        job_file_name = job_name
        command = ['ingest_insarmaps.py', inps.custom_template_file]
        job_obj.submit_script(job_name,
                              job_file_name,
                              command,
                              writeOnly='True')

    print("copy_to_tmp: {}".format(inps.copy_to_tmp))
    if inps.copy_to_tmp:
        #run_dir_tmp = os.path.join(inps.work_dir, 'run_files_tmp')
        config_dir_tmp = os.path.join(inps.work_dir, 'configs_tmp')
        shutil.copytree(os.path.join(inps.work_dir, "configs"), config_dir_tmp)

        cmd = "update_configs_for_tmp.bash {}".format(inps.work_dir)
        subprocess.Popen(cmd, shell=True)

    return None
Ejemplo n.º 16
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='upload_data_products')

    if inps.image_products_flag:
       inps.mintpy_products_flag = False
    
    os.chdir(inps.work_dir)

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    # get DATA_SERVER and return if it does not exist

    DATA_SERVER = '[email protected]'

    #try:
    #    DATA_SERVER = os.getenv('DATA_SERVER')
    #except:
    #    return

    project_name = putils.get_project_name(inps.custom_template_file)

    if inps.mintpy_products_flag:

        REMOTE_DIR = '/data/HDF5EOS/'
        destination = DATA_SERVER + ':' + REMOTE_DIR

        scp_list = [
                '/mintpy/pic',
                '/mintpy/*.he5',
                '/mintpy/inputs',
                '/remora_*'
                ]
        
        if inps.mintpy_products_all_flag:
            scp_list = [ '/mintpy' ]

        command = 'ssh ' + DATA_SERVER + ' mkdir -p ' + REMOTE_DIR + project_name + '/mintpy'
        print (command)
        status = subprocess.Popen(command, shell=True).wait()
        if status is not 0:
             raise Exception('ERROR in upload_data_products.py')

        for pattern in scp_list:
            if ( len(glob.glob(inps.work_dir + '/' + pattern)) >= 1 ):
                command = 'scp -r ' + inps.work_dir + pattern + ' ' + destination + project_name + '/'.join(pattern.split('/')[0:-1])
                print (command)
                status = subprocess.Popen(command, shell=True).wait()
                if status is not 0:
                    raise Exception('ERROR in upload_data_products.py')

                print ('\nAdjusting permissions:')
                command = 'ssh ' + DATA_SERVER + ' chmod -R u=rwX,go=rX ' + REMOTE_DIR + project_name 
                print (command)
                status = subprocess.Popen(command, shell=True).wait()
                if status is not 0:
                    raise Exception('ERROR in upload_data_products.py')

    if inps.image_products_flag:
        REMOTE_DIR = '/data/image_products/'
        destination = DATA_SERVER + ':' + REMOTE_DIR

        rsync_list = [
                '/image_products/*',
                ]

        command = 'ssh ' + DATA_SERVER + ' mkdir -p ' + REMOTE_DIR + project_name
        print (command)
        status = subprocess.Popen(command, shell=True).wait()
        if status is not 0:
             raise Exception('ERROR in upload_data_products.py')


        for pattern in rsync_list:
            command = 'rsync -avuz -e ssh --chmod=Du=rwx,Dg=rx,Do=rx,Fu=rw,Fg=r,Fo=r ' + inps.work_dir + pattern + ' ' + destination + project_name + '/'.join(pattern.split('/')[0:-1])
            print (command)
            status = subprocess.Popen(command, shell=True).wait()
            if status is not 0:
                raise Exception('ERROR in upload_data_products.py')

        return None

    return None
Ejemplo n.º 17
0
def main(iargs=None):
    """Downloads data with ssara and asfserial scripts."""

    inps = putils.cmd_line_parse(iargs, script='download_rsmas')

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    time.sleep(putils.pause_seconds(inps.wait_time))

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_obj = JOB_SUBMIT(inps)
        job_name = 'download_rsmas'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    if inps.prefix == 'tops':
        if not inps.template[inps.prefix + 'Stack.slcDir'] is None:
            download_dir = inps.template[inps.prefix + 'Stack.slcDir']
        else:
            download_dir = os.path.join(inps.work_dir, 'SLC')
    else:
        if not inps.template['raw_image_dir'] in [None, 'None']:
            download_dir = inps.template['raw_image_dir']
        else:
            download_dir = os.path.join(inps.work_dir, 'RAW_data')

    os.makedirs(inps.work_dir, exist_ok=True)
    os.makedirs(download_dir, exist_ok=True)

    if 'SenDT' not in inps.project_name and 'SenAT' not in inps.project_name or os.getenv('SSARA_ASF') == 'False':
        
        try:
           inps.template['ssaraopt.intersectsWithPoint']
           inps.ssaraopt = ' '.join(add_point_to_ssaraopt(inps.template, inps.ssaraopt.split(' '))) 
        except:
           inps.ssaraopt = ' '.join(add_polygon_to_ssaraopt(inps.template, inps.ssaraopt.split(' '), delta_lat=inps.delta_lat)) 
        command = 'ssara_federated_query.py ' + inps.ssaraopt + ' --print' + ' --download'

        os.chdir(download_dir)
        message_rsmas.log(download_dir, command)

        status = subprocess.Popen(command, shell=True).wait()

        if status is not 0:
            raise Exception('ERROR in ssara_federated_query.py')

        os.chdir(inps.work_dir)
        return

    if os.getenv('SSARA_ASF') == 'False':
        return

    download('ssara', inps.custom_template_file, download_dir, outnum=1)
    #download('asfserial', inps.custom_template_file, download_dir, outnum = 1)

    for i_download in [2, 3]:
        download_success = run_check_download(download_dir = download_dir)

        if not download_success:
           print('check_download.py: There were bad files, download again')
           message_rsmas.log(inps.work_dir,'check_download.py: there were bad files, download again')

           download('ssara', inps.custom_template_file, download_dir, outnum = i_download)
Ejemplo n.º 18
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='upload_data_products')

    os.chdir(inps.work_dir)

    if not iargs is None:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(iargs[:]))
    else:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::]))

    # get DATA_SERVER and return if it does not exist

    DATA_SERVER = '[email protected]'

    #try:
    #    DATA_SERVER = os.getenv('DATA_SERVER')
    #except:
    #    return

    project_name = putils.get_project_name(inps.custom_template_file)

    if inps.flag_mintpy_products:

        REMOTE_DIR = '/data/HDF5EOS/'
        destination = DATA_SERVER + ':' + REMOTE_DIR

        rsync_list = ['/mintpy/inputs', '/mintpy/pic', '/mintpy/*.he5']

        command = 'ssh ' + DATA_SERVER + ' mkdir -p ' + REMOTE_DIR + project_name
        print(command)
        status = subprocess.Popen(command, shell=True).wait()
        if status is not 0:
            raise Exception('ERROR in upload_data_products.py')

        for pattern in rsync_list:
            command = 'rsync -avuz -e ssh --chmod=Du=rwx,Dg=rx,Do=rx,Fu=rw,Fg=r,Fo=r ' + inps.work_dir + pattern + ' ' + destination + project_name + '/'.join(
                pattern.split('/')[0:-1])
            print(command)
            status = subprocess.Popen(command, shell=True).wait()
            if status is not 0:
                raise Exception('ERROR in upload_data_products.py')

        # temporary rsync of full mintpy folder
        # text for test PR 2
        command = 'rsync -avuz -e ssh --chmod=Du=rwx,Dg=rx,Do=rx,Fu=rw,Fg=r,Fo=r ' + inps.work_dir + '/mintpy ' + destination + project_name + '/full_mintpy'
        print(command)
        status = subprocess.Popen(command, shell=True).wait()
        if status is not 0:
            raise Exception('ERROR in upload_data_products.py')
        return None

    if inps.flag_image_products:
        REMOTE_DIR = '/data/image_products/'
        destination = DATA_SERVER + ':' + REMOTE_DIR

        rsync_list = [
            '/image_products/*',
        ]

        command = 'ssh ' + DATA_SERVER + ' mkdir -p ' + REMOTE_DIR + project_name
        print(command)
        status = subprocess.Popen(command, shell=True).wait()
        if status is not 0:
            raise Exception('ERROR in upload_data_products.py')

        for pattern in rsync_list:
            command = 'rsync -avuz -e ssh --chmod=Du=rwx,Dg=rx,Do=rx,Fu=rw,Fg=r,Fo=r ' + inps.work_dir + pattern + ' ' + destination + project_name + '/'.join(
                pattern.split('/')[0:-1])
            print(command)
            status = subprocess.Popen(command, shell=True).wait()
            if status is not 0:
                raise Exception('ERROR in upload_data_products.py')

        return None

    return None
Ejemplo n.º 19
0
def main(iargs=None):
    """
    Crops SLC images from Isce merged/SLC directory and creates georectified and orthorectified products.
    """

    inps = cmd_line_parse(iargs, script='export_amplitude_tif')

    secondary_dir = os.path.join(inps.work_dir, pathObj.mergedslcdir)
    pic_dir = os.path.join(inps.work_dir, pathObj.tiffdir)

    if not os.path.exists(pic_dir):
        os.mkdir(pic_dir)

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(pic_dir, os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    os.chdir(secondary_dir)

    try:
        os.system('rm '+ inps.input_file + '/geo*')
    except:
        print('geocoding ...')

    slc = inps.input_file

    if inps.im_type == 'ortho':
        inps.geo_reference_dir = os.path.join(inps.work_dir, pathObj.georeferencedir)
    else:
        inps.geo_reference_dir = os.path.join(inps.work_dir, pathObj.geomlatlondir)

    os.chdir(os.path.join(secondary_dir, inps.input_file))

    geocode_file(inps)

    gfile = 'geo_' + slc + '.slc.ml'
    ds = gdal.Open(gfile + '.vrt', gdal.GA_ReadOnly)
    array = np.abs(ds.GetRasterBand(1).ReadAsArray())
    del ds

    ##
    array = np.where(array > 0, 10.0 * np.log10(pow(array, 2)) - 83.0, array)

    if inps.im_type == 'ortho':
        dst_file = 'orthorectified_' + slc + '_backscatter.tif'
    else:
        dst_file = 'georectified_' + slc + '_backscatter.tif'

    data = gdal.Open(gfile, gdal.GA_ReadOnly)
    transform = data.GetGeoTransform()

    ##
    xmlfile = glob.glob(os.path.join(inps.work_dir, pathObj.referencedir, '*.xml'))[0]
    attributes = xmlread(xmlfile)
    Metadata = {'SAT': attributes['missionname'], 'Mode': attributes['passdirection'],
                'Image_Type': '{}_BackScatter'.format(inps.im_type), 'Date': slc}

    raster2geotiff(dst_file, transform, array, Metadata)

    print('Find the output in {}'.format(pic_dir))

    os.system('mv *.tif {}'.format(pic_dir))
    os.system('rm geo*')

    return
Ejemplo n.º 20
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='download_rsmas')

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    logfile_name = inps.work_dir + '/asfserial_rsmas.log'
    global logger
    logger = RsmasLogger(file_name=logfile_name)

    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:
        job_file_name = 'download_asfserial_rsmas'
        job_name = inps.custom_template_file.split(os.sep)[-1].split('.')[0]
        job_obj = JOB_SUBMIT(inps)
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    os.chdir(inps.work_dir)

    if not inps.template[inps.prefix + 'Stack.slcDir'] is None:
        inps.slc_dir = inps.template[inps.prefix + 'Stack.slcDir']
    else:
        inps.slc_dir = os.path.join(inps.work_dir, 'SLC')

    global project_slc_dir
    project_slc_dir = os.path.join(inps.work_dir, 'SLC')

    if not os.path.exists(inps.slc_dir):
        os.mkdir(inps.slc_dir)

    os.chdir(inps.slc_dir)

    try:
        os.remove(os.path.expanduser('~') + '/.bulk_download_cookiejar.txt')
    except OSError:
        pass

    dataset_template = Template(inps.custom_template_file)
    dataset_template.options.update(
        PathFind.correct_for_ssara_date_format(dataset_template.options))
    subprocess.Popen("rm " + project_slc_dir + "/new_files*.csv",
                     shell=True).wait()
    seasonal_start_date = None
    seasonal_end_date = None

    try:
        if dataset_template.options[
                'seasonalStartDate'] is not None and dataset_template.options[
                    'seasonalEndDate'] is not None:
            seasonal_start_date = dataset_template.options['seasonalStartDate']
            seasonal_end_date = dataset_template.options['seasonalEndDate']
    except:
        pass

    if inps.seasonalStartDate is not None and inps.seasonalEndDate is not None:
        seasonal_start_date = inps.seasonalStartDate
        seasonal_end_date = inps.seasonalEndDate

    if seasonal_start_date is not None and seasonal_end_date is not None:
        generate_seasonal_files_csv(dataset_template, seasonal_start_date,
                                    seasonal_end_date)
    else:
        generate_files_csv(project_slc_dir, dataset_template)

    parallel = False

    try:
        if dataset_template.options['parallel'] == 'yes':
            parallel = True
    except:
        pass
    """if inps.parallel == 'yes':
        parallel = True"""

    threads = os.cpu_count()

    try:
        if dataset_template.options['threads'] is not None:
            threads = int(dataset_template.options['threads'])
    except:
        pass
    """if inps.processes is not None:
        processes = inps.processes"""

    if parallel:
        run_parallel_download_asf_serial(project_slc_dir, threads)
    else:
        succesful = run_download_asf_serial(project_slc_dir, logger)
        logger.log(loglevel.INFO, "SUCCESS: %s", str(succesful))

    change_file_permissions()
    logger.log(loglevel.INFO, "------------------------------------")
    subprocess.Popen("rm " + project_slc_dir + "/new_files*.csv",
                     shell=True).wait()

    return None
Ejemplo n.º 21
0
def main(iargs=None):
    inps = putils.cmd_line_parse(iargs, script='create_runfiles')
    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    os.chdir(inps.work_dir)

    time.sleep(putils.pause_seconds(inps.wait_time))

    inps.out_dir = inps.work_dir
    job_obj = JOB_SUBMIT(inps)
    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_name = 'create_runfiles'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    try:
        dem_file = glob.glob('DEM/*.wgs84')[0]
        inps.template[inps.prefix + 'Stack.demDir'] = dem_file
    except:
        raise SystemExit('DEM does not exist')

    slc_dir = inps.template[inps.prefix + 'Stack.slcDir']
    os.makedirs(slc_dir, exist_ok=True)

    if int(
            get_size(slc_dir) / 1024**2
    ) < 500:  # calculate slc_dir size in MB and see if there are SLCs according to size

        # Unpack Raw data:
        if not inps.template['raw_image_dir'] in [None, 'None']:
            raw_image_dir = inps.template['raw_image_dir']
        else:
            raw_image_dir = os.path.join(inps.work_dir, 'RAW_data')

        if os.path.exists(raw_image_dir):
            unpackObj = Sensors(
                raw_image_dir,
                slc_dir,
                remove_file='False',
                multiple_raw_frame=inps.template['multiple_raw_frame'])
            unpack_run_file = unpackObj.start()
            unpackObj.close()

            job_obj.write_batch_jobs(batch_file=unpack_run_file)
            job_status = job_obj.submit_batch_jobs(batch_file=unpack_run_file)

            if not job_status:
                raise Exception('ERROR: Unpacking was failed')
        else:
            raise Exception('ERROR: No data (SLC or Raw) available')

    # make run file:
    run_dir = os.path.join(inps.work_dir, 'run_files')
    config_dir = os.path.join(inps.work_dir, 'configs')
    for directory in [run_dir, config_dir]:
        if os.path.exists(directory):
            shutil.rmtree(directory)

    inps.Stack_template = pathObj.correct_for_isce_naming_convention(inps)
    runObj = CreateRun(inps)
    runObj.run_stack_workflow()

    run_file_list = putils.make_run_list(inps.work_dir)

    with open(inps.work_dir + '/run_files_list', 'w') as run_file:
        for item in run_file_list:
            run_file.writelines(item + '\n')

    if inps.prefix == 'tops':
        # check for orbits
        orbit_dir = os.getenv('SENTINEL_ORBITS')
        local_orbit = os.path.join(inps.work_dir, 'orbits')
        precise_orbits_in_local = glob.glob(local_orbit + '/*/*POEORB*')
        if len(precise_orbits_in_local) > 0:
            for orbit_file in precise_orbits_in_local:
                os.system('cp {} {}'.format(orbit_file, orbit_dir))

    # Writing job files
    if inps.write_jobs:
        for item in run_file_list:
            job_obj.write_batch_jobs(batch_file=item)

        if inps.template['processingMethod'] == 'smallbaseline':
            job_name = 'smallbaseline_wrapper'
            job_file_name = job_name
            command = [
                'smallbaselineApp.py', inps.custom_template_file, '--dir',
                'mintpy'
            ]
            job_obj.submit_script(job_name,
                                  job_file_name,
                                  command,
                                  writeOnly='True')
        else:
            job_name = 'minopy_wrapper'
            job_file_name = job_name
            command = [
                'minopyApp.py', inps.custom_template_file, '--dir', 'minopy'
            ]
            job_obj.submit_script(job_name,
                                  job_file_name,
                                  command,
                                  writeOnly='True')

        job_name = 'insarmaps'
        job_file_name = job_name
        command = ['ingest_insarmaps.py', inps.custom_template_file]
        job_obj.submit_script(job_name,
                              job_file_name,
                              command,
                              writeOnly='True')

    return None
Ejemplo n.º 22
0
def main(iargs=None):
    """ generates interferograms and coherence images in GeoTiff format """

    inps = putils.cmd_line_parse(iargs)

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    time.sleep(putils.pause_seconds(inps.wait_time))

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_obj = JOB_SUBMIT(inps)
        job_name = 'ifgramStack_to_ifgram_and_coherence'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    out_dir = inps.work_dir + '/' + pathObj.tiffdir
    if not os.path.isdir(out_dir):
        os.makedirs(out_dir)
    try:
        file = glob.glob(inps.work_dir + '/mintpy/inputs/ifgramStack.h5')[0]
    except:
        raise Exception('ERROR in ' + os.path.basename(__file__) +
                        ': file ifgramStack.h5 not found')

    # modify network so that only one connection left
    arg_string = file + ' --max-conn-num 1'
    print('modify_network.py', arg_string)
    mintpy.modify_network.main(arg_string.split())

    if not os.path.isdir(inps.work_dir + '/mintpy/geo'):
        os.makedirs(inps.work_dir + '/mintpy/geo')

    # geocode ifgramStack
    geo_file = os.path.dirname(
        os.path.dirname(file)) + '/geo/geo_' + os.path.basename(file)
    lookup_file = os.path.dirname(
        os.path.dirname(file)) + '/inputs/geometryRadar.h5'
    template_file = os.path.dirname(
        os.path.dirname(file)) + '/smallbaselineApp_template.txt'
    arg_string = file + ' -t ' + template_file + ' -l ' + lookup_file + ' -o ' + geo_file
    print('geocode.py', arg_string)
    mintpy.geocode.main(arg_string.split())

    # loop over all interferograms
    obj = ifgramStack(geo_file)
    obj.open()
    date12_list = obj.get_date12_list()
    # dummy_data, atr = readfile.read(geo_file)

    for i in range(len(date12_list)):
        date_str = date12_list[i]
        print('Working on ... ' + date_str)
        data_coh = readfile.read(file, datasetName='coherence-' + date_str)[0]
        data_unw = readfile.read(file,
                                 datasetName='unwrapPhase-' + date_str)[0]

        fname_coh = out_dir + '/coherence_' + date_str + '.tif'
        fname_unw = out_dir + '/interferogram_' + date_str + '.tif'

        create_geotiff(obj,
                       data=data_coh,
                       outfile=fname_coh,
                       type='coherence',
                       work_dir=inps.work_dir)
        create_geotiff(obj,
                       data=data_unw,
                       outfile=fname_unw,
                       type='interferogram',
                       work_dir=inps.work_dir)
    return
Ejemplo n.º 23
0
def main(iargs=None):
    """ create orth and geo rectifying run jobs and submit them. """

    inps = putils.cmd_line_parse(iargs)

    inps.geom_masterDir = os.path.join(inps.work_dir, pathObj.geomlatlondir)
    inps.master = os.path.join(inps.work_dir, pathObj.masterdir)

    try:
        inps.dem = glob.glob('{}/DEM/*.wgs84'.format(inps.work_dir))[0]
    except:
        print('DEM not exists!')
        sys.exit(1)

    if not os.path.exists(inps.geom_masterDir):
        os.mkdir(inps.geom_masterDir)

    config = putils.get_config_defaults(config_file='job_defaults.cfg')

    job_file_name = 'export_ortho_geo'
    job_name = job_file_name

    if inps.wall_time == 'None':
        inps.wall_time = config[job_file_name]['walltime']

    wait_seconds, new_wall_time = putils.add_pause_to_walltime(inps.wall_time, inps.wait_time)

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:

        js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir, new_wall_time)
        sys.exit(0)

    time.sleep(wait_seconds)

    if not iargs is None:
        message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:]))
    else:
        message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::]))

    demZero = create_demZero(inps.dem, inps.geom_masterDir)

    swathList = ut.getSwathList(inps.master)

    create_georectified_lat_lon(swathList, inps.master, inps.geom_masterDir, demZero)

    merge_burst_lat_lon(inps)

    multilook_images(inps)

    run_file_list = make_run_list_amplitude(inps)

    for item in run_file_list:
        step_name = 'amplitude_ortho_geo'
        try:
            memorymax = config[step_name]['memory']
        except:
            memorymax = config['DEFAULT']['memory']

        try:
            if config[step_name]['adjust'] == 'True':
                walltimelimit = putils.walltime_adjust(config[step_name]['walltime'])
            else:
                walltimelimit = config[step_name]['walltime']
        except:
            walltimelimit = config['DEFAULT']['walltime']

        queuename = os.getenv('QUEUENAME')

        putils.remove_last_job_running_products(run_file=item)

        jobs = js.submit_batch_jobs(batch_file=item,
                                    out_dir=os.path.join(inps.work_dir, 'run_files'),
                                    work_dir=inps.work_dir, memory=memorymax,
                                    walltime=walltimelimit, queue=queuename)

        putils.remove_zero_size_or_length_error_files(run_file=item)
        putils.raise_exception_if_job_exited(run_file=item)
        putils.concatenate_error_files(run_file=item, work_dir=inps.work_dir)
        putils.move_out_job_files_to_stdout(run_file=item)
    return
Ejemplo n.º 24
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='smallbaseline_wrapper')

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    time.sleep(putils.pause_seconds(inps.wait_time))
    
    #########################################
    # stripmap prep to isce
    #########################################
    if inps.template['acquisition_mode']=='stripmap':
        inps.dsetDir = inps.work_dir +'/Igrams';
        inps.slcDir  = inps.work_dir +'/merged/SLC';
        inps.geometryDir = inps.work_dir +'/geom_master';
        inps.baselineDir = inps.work_dir +'/baselines';
        masterDate= inps.template['stripmapStack.master']
        if masterDate=='None':
            command1= 'cp -r '+inps.slcDir+'/'+os.listdir(inps.slcDir)[0]+'/'+'masterShelve '+inps.work_dir+'/.';
        else:
            command1= 'cp -r '+inps.slcDir+'/' + masterDate+'/'+'masterShelve '+inps.work_dir+'/.';
        print(command1);subprocess.Popen(command1, shell=True).wait();
        inps.metaFile= inps.work_dir+'/' +'masterShelve/data.dat';
        command2= 'prep_isce.py -d '+inps.dsetDir+' -m '+inps.metaFile+' -b '+inps.baselineDir+' -g '+inps.geometryDir; 
        print(command2)
        subprocess.Popen(command2, shell=True).wait();
    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_obj = JOB_SUBMIT(inps)
        job_name = 'smallbaseline_wrapper'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    os.chdir(inps.work_dir)

    try:
        with open('out_mintpy.o', 'w') as f:
            with contextlib.redirect_stdout(f):
                smallbaselineApp.main([inps.custom_template_file])
    except:
        with open('out_mintpy.e', 'w') as g:
            with contextlib.redirect_stderr(g):
                smallbaselineApp.main([inps.custom_template_file])

    inps.mintpy_dir = os.path.join(inps.work_dir, pathObj.mintpydir)
    putils.set_permission_dask_files(directory=inps.mintpy_dir)

    # Email Mintpy results
    if inps.email:
        email_results.main([inps.custom_template_file, '--mintpy'])

    return None
Ejemplo n.º 25
0
def main(iargs=None):
    """Downloads data with ssara and asfserial scripts."""

    inps = putils.cmd_line_parse(iargs, script='download_rsmas')

    config = putils.get_config_defaults(config_file='job_defaults.cfg')

    job_file_name = 'download_rsmas'
    job_name = job_file_name

    if inps.wall_time == 'None':
        inps.wall_time = config[job_file_name]['walltime']

    wait_seconds, new_wall_time = putils.add_pause_to_walltime(inps.wall_time, inps.wait_time)

    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:
        js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir, new_wall_time)
        sys.exit(0)

    time.sleep(wait_seconds)

    if not iargs is None:
        message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:]))
    else:
        message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::]))

    if not inps.template['topsStack.slcDir'] is None:
        slc_dir = inps.template['topsStack.slcDir']
    else:
        slc_dir = os.path.join(inps.work_dir, 'SLC')

    if not os.path.isdir(inps.work_dir):
        os.makedirs(inps.work_dir)

    if not os.path.isdir(slc_dir):
        os.makedirs(slc_dir)

    # if satellite is not Sentinel (not tried yet)
    if 'SenDT' not in inps.project_name and 'SenAT' not in inps.project_name:

        ssara_call = ['ssara_federated_query.py'] + inps.ssaraopt + ['--print', '--download']
        ssara_process = subprocess.Popen(ssara_call, shell=True).wait()
        completion_status = ssara_process.poll()

        return

    download('ssara', inps.custom_template_file, slc_dir, outnum=1)
    download('asfserial', inps.custom_template_file, slc_dir, outnum = 1)

    for i_download in [2,3]:
        download_success = run_check_download(slc_dir = slc_dir)

        if not download_success:
           print('check_download.py: There were bad files, download again')
           message_rsmas.log(inps.work_dir,'check_download.py: there were bad files, download again')

           download('ssara', inps.custom_template_file, slc_dir, outnum = i_download)
           download('asfserial', inps.custom_template_file, slc_dir, outnum = i_download)
Ejemplo n.º 26
0
def main(iargs=None):
    # set defaults: ssara=True is set in dem_parser, use custom_pemp[late field if given
    inps = cmd_line_parse(iargs, script='dem_rsmas')

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    if not inps.flag_boundingBox and not inps.flag_ssara:
        if 'demMethod' in list(inps.template.keys()):
            if inps.template['demMethod'] == 'ssara':
                inps.flag_ssara = True
                inps.flag_boundingBox = False
            if inps.template['demMethod'] == 'boundingBox':
                inps.flag_ssara = False
                inps.flag_boundingBox = True
    elif inps.flag_boundingBox:
        inps.flag_ssara = False
    else:
        inps.flag_ssara = True

    dem_dir = os.path.join(inps.work_dir, 'DEM')
    if not exist_valid_dem_dir(dem_dir):
        os.mkdir(dem_dir)

    try:
       inps.slc_dir = inps.template['topsStack.slcDir']
    except:
       inps.slc_dir = os.path.join(inps.work_dir, 'SLC')

    # 10/21: inps.template['topsStack.slcDir'] may contain ./SLC  (it would be better to change where topsStack.slcDir is assigned)
    if '.' in inps.slc_dir:
       inps.slc_dir = inps.slc_dir.replace(".",os.getcwd())

    if 'COSMO-SKYMED' in inps.ssaraopt:
       inps.slc_dir = inps.slc_dir.replace('SLC','RAW_data')
      
    # FA 10/2021: We probably should check here whether a DEM/*wgs84 file exist and exit if it does.
    # That could save time. On the other hand, most steps allow to be run even if data exist
    os.chdir(dem_dir)

    if inps.flag_ssara:

        call_ssara_dem(inps, dem_dir)

        print('You have finished SSARA!')
        cmd = 'fixImageXml.py -f -i {}'.format(glob.glob(dem_dir + '/dem*.wgs84')[0])
        os.system(cmd)

    elif inps.flag_boundingBox or inps.flag_ssara_kml:
        print('DEM generation using ISCE')
        if inps.flag_boundingBox:
           bbox = inps.template[inps.prefix + 'Stack.boundingBox'].strip("'")
        if inps.flag_ssara_kml:
           #ssara_kml_file=sorted( glob.glob(inps.work_dir + '/SLC/ssara_search_*.kml') )[-1]
           try:
              ssara_kml_file=sorted( glob.glob(inps.slc_dir + '/ssara_search_*.kml') )[-1]
           except:
              raise FileExistsError('No SLC/ssara_search_*.kml found')
           bbox = get_boundingBox_from_kml.main( [ssara_kml_file, '--delta_lon' , '0'] )
           bbox = bbox.split('SNWE:')[1]
              

        print('bbox:',bbox)
        bbox = [val for val in bbox.split()]

        south = bbox[0]
        north = bbox[1]
        west = bbox[2]
        east = bbox[3].split('\'')[0]

        south = math.floor(float(south) - 0.5)
        north = math.ceil(float(north) + 0.5)
        west = math.floor(float(west) - 0.5)
        east = math.ceil(float(east) + 0.5)

        demBbox = str(int(south)) + ' ' + str(int(north)) + ' ' + str(int(west)) + ' ' + str(int(east))
        command = 'dem.py -a stitch --filling --filling_value 0 -b ' + demBbox + ' -c -u https://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL1.003/2000.02.11/'

        message_rsmas.log(os.getcwd(), command)

        if os.getenv('DOWNLOADHOST') == 'local':
            try:
                proc = subprocess.Popen(command, stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=True,
                                        universal_newlines=True)
                output, error = proc.communicate()
                print(error)
                if proc.returncode is not 0:
                    raise Exception(
                        'ERROR starting dem.py subprocess')  # FA 8/19: I don't think this happens, errors are is output
                print(output)
                print(error, file=sys.stderr)
            except subprocess.CalledProcessError as exc:
                print("Command failed. Exit code, StdErr:", exc.returncode, exc.output)
                sys.exit('Error produced by dem.py')
            else:
                if 'Could not create a stitched DEM. Some tiles are missing' in output:
                    os.chdir('..')
                    shutil.rmtree('DEM')
                    sys.exit('Error in dem.py: Tiles are missing. Ocean???')
        else:
            dem_dir = os.getcwd()
            ssh_command_list = ['s.bgood', 'cd {0}'.format(dem_dir), command]
            host = os.getenv('DOWNLOADHOST')
            try:
                status = ssh_with_commands(host, ssh_command_list)
            except subprocess.CalledProcessError as exc:
                print("Command failed. Exit code, StdErr:", exc.returncode, exc.output)
                sys.exit('Error produced by dem.py using ' + host)

        cmd = 'fixImageXml.py -f -i {}'.format(glob.glob(dem_dir + '/demLat_*.wgs84')[0])
        os.system(cmd)
        # print('Exit status from dem.py: {0}'.format(status))

        # xmlFile = glob.glob('demLat_*.wgs84.xml')[0]

        # fin = open(xmlFile, 'r')
        # fout = open("tmp.txt", "wt")
        # for line in fin:
        #    fout.write(line.replace('demLat', dem_dir + '/demLat'))
        # fin.close()
        # fout.close()
        # os.rename('tmp.txt', xmlFile)

    else:
        sys.exit('Error unspported demMethod option: ' + inps.template['demMethod'])

    print('\n###############################################')
    print('End of dem_rsmas.py')
    print('################################################\n')

    return None
Ejemplo n.º 27
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='ingest_insarmaps')

    config = putils.get_config_defaults(config_file='job_defaults.cfg')

    job_file_name = 'ingest_insarmaps'
    job_name = job_file_name

    if inps.wall_time == 'None':
        inps.wall_time = config[job_file_name]['walltime']

    wait_seconds, new_wall_time = putils.add_pause_to_walltime(inps.wall_time, inps.wait_time)

    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:
        js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir, new_wall_time)

    time.sleep(wait_seconds)

    os.chdir(inps.work_dir)

    if not iargs is None:
        message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:]))
    else:
        message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::]))

    hdfeos_file = glob.glob(inps.work_dir + '/mintpy/S1*.he5')
    hdfeos_file.append(glob.glob(inps.work_dir +'/mintpy/SUBSET_*/S1*.he5'))
    hdfeos_file = hdfeos_file[0]

    json_folder = inps.work_dir + '/mintpy/JSON'
    mbtiles_file = json_folder + '/' + os.path.splitext(os.path.basename(hdfeos_file))[0] + '.mbtiles'

    if os.path.isdir(json_folder):
        shutil.rmtree(json_folder)

    command1 = 'hdfeos5_2json_mbtiles.py ' + hdfeos_file + ' ' + json_folder + ' |& tee out_insarmaps.log'
    command2 = 'json_mbtiles2insarmaps.py -u ' + password.insaruser + ' -p ' + password.insarpass + ' --host ' + \
               'insarmaps.miami.edu -P rsmastest -U rsmas\@gmail.com --json_folder ' + \
               json_folder + ' --mbtiles_file ' + mbtiles_file + ' |& tee -a out_insarmaps.log'

    with open(inps.work_dir + '/mintpy/run_insarmaps', 'w') as f:
        f.write(command1 + '\n')
        f.write(command2 + '\n')

    out_file = 'out_insarmaps'
    message_rsmas.log(inps.work_dir, command1)
    command1 = '('+command1+' | tee '+out_file+'.o) 3>&1 1>&2 2>&3 | tee '+out_file+'.e'
    status = subprocess.Popen(command1, shell=True).wait()
    if status is not 0:
        raise Exception('ERROR in hdfeos5_2json_mbtiles.py')

    # TODO: Change subprocess call to get back error code and send error code to logger
    message_rsmas.log(inps.work_dir, command2)
    command2 = '('+command2+' | tee -a '+out_file+'.o) 3>&1 1>&2 2>&3 | tee -a '+out_file+'.e'
    status = subprocess.Popen(command2, shell=True).wait()
    if status is not 0:
        raise Exception('ERROR in json_mbtiles2insarmaps.py')

    # Email insarmaps results:
    if inps.email:
        email_results.main([inps.custom_template_file, '--insarmap'])

    return None
Ejemplo n.º 28
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='execute_runfiles')

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    os.chdir(inps.work_dir)

    time.sleep(putils.pause_seconds(inps.wait_time))

    if inps.prefix == 'stripmap':
        inps.num_bursts = 1

    inps.out_dir = os.path.join(inps.work_dir, 'run_files')
    job_obj = JOB_SUBMIT(inps)

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_name = 'execute_runfiles'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    run_file_list = putils.read_run_list(inps.work_dir)

    if inps.end_run == 0:
        inps.end_run = len(run_file_list)

    if not inps.start_run == 0:
        inps.start_run = inps.start_run - 1

    if inps.step:
        inps.start_run = inps.step - 1
        inps.end_run = inps.step

    run_file_list = run_file_list[inps.start_run:inps.end_run]

    for item in run_file_list:

        putils.remove_last_job_running_products(run_file=item)

        job_status = job_obj.submit_batch_jobs(batch_file=item)

        if job_status:

            putils.remove_zero_size_or_length_error_files(run_file=item)
            putils.rerun_job_if_exit_code_140(run_file=item, inps_dict=inps)
            putils.raise_exception_if_job_exited(run_file=item)
            putils.concatenate_error_files(run_file=item,
                                           work_dir=inps.work_dir)
            putils.move_out_job_files_to_stdout(run_file=item)

            date_str = datetime.datetime.strftime(datetime.datetime.now(),
                                                  '%Y%m%d:%H%M%S')
            print(date_str + ' * Job {} completed'.format(item))

    date_str = datetime.datetime.strftime(datetime.datetime.now(),
                                          '%Y%m%d:%H%M%S')
    print(date_str + ' * all jobs from {} to {} have been completed'.format(
        os.path.basename(run_file_list[0]), os.path.basename(
            run_file_list[-1])))

    return None
Ejemplo n.º 29
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='execute_runfiles')

    os.chdir(inps.work_dir)

    config = putils.get_config_defaults(config_file='job_defaults.cfg')

    job_file_name = 'execute_runfiles'
    job_name = job_file_name

    if inps.wall_time == 'None':
        inps.wall_time = config[job_file_name]['walltime']

    wait_seconds, new_wall_time = putils.add_pause_to_walltime(
        inps.wall_time, inps.wait_time)

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:

        js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir,
                         new_wall_time)
        sys.exit(0)

    time.sleep(wait_seconds)

    command_line = os.path.basename(sys.argv[0]) + ' ' + ' '.join(sys.argv[1:])
    message_rsmas.log(inps.work_dir, command_line)

    run_file_list = putils.read_run_list(inps.work_dir)

    if inps.endrun == 0:
        inps.endrun = len(run_file_list)

    if not inps.startrun == 0:
        inps.startrun = inps.startrun - 1

    run_file_list = run_file_list[inps.startrun:inps.endrun]

    if os.getenv('JOBSCHEDULER') == 'LSF' or os.getenv(
            'JOBSCHEDULER') == 'PBS':

        for item in run_file_list:
            step_name = '_'
            step_name = step_name.join(item.split('_')[3::])
            try:
                memorymax = config[step_name]['memory']
            except:
                memorymax = config['DEFAULT']['memory']

            try:
                if config[step_name]['adjust'] == 'True':
                    walltimelimit = putils.walltime_adjust(
                        inps, config[step_name]['walltime'])
                else:
                    walltimelimit = config[step_name]['walltime']
            except:
                walltimelimit = config['DEFAULT']['walltime']

            queuename = os.getenv('QUEUENAME')

            putils.remove_last_job_running_products(run_file=item)

            jobs = js.submit_batch_jobs(batch_file=item,
                                        out_dir=os.path.join(
                                            inps.work_dir, 'run_files'),
                                        work_dir=inps.work_dir,
                                        memory=memorymax,
                                        walltime=walltimelimit,
                                        queue=queuename)

            putils.remove_zero_size_or_length_error_files(run_file=item)
            putils.raise_exception_if_job_exited(run_file=item)
            putils.concatenate_error_files(run_file=item,
                                           work_dir=inps.work_dir)
            putils.move_out_job_files_to_stdout(run_file=item)

    else:
        for item in run_file_list:
            with open(item, 'r') as f:
                command_lines = f.readlines()
                for command_line in command_lines:
                    os.system(command_line)

    return None
Ejemplo n.º 30
0
def main(iargs=None):
    """ create orth and geo rectifying run jobs and submit them. """

    inps = putils.cmd_line_parse(iargs)

    import s1a_isce_utils as ut
    import mergeBursts as mb

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    inps.geom_masterDir = os.path.join(inps.work_dir, pathObj.geomlatlondir)
    inps.master = os.path.join(inps.work_dir, pathObj.masterdir)

    try:
        inps.dem = glob.glob('{}/DEM/*.wgs84'.format(inps.work_dir))[0]
    except:
        print('DEM not exists!')
        sys.exit(1)

    if not os.path.exists(inps.geom_masterDir):
        os.mkdir(inps.geom_masterDir)

    time.sleep(putils.pause_seconds(inps.wait_time))

    inps.out_dir = os.path.join(inps.work_dir, 'run_files')
    job_obj = JOB_SUBMIT(inps)

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_name = 'export_ortho_geo'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    pic_dir = os.path.join(inps.work_dir, pathObj.tiffdir)

    if not os.path.exists(pic_dir):
        os.mkdir(pic_dir)

    demZero = create_demZero(inps.dem, inps.geom_masterDir)

    swathList = ut.getSwathList(inps.master)

    create_georectified_lat_lon(swathList, inps.master, inps.geom_masterDir,
                                demZero)

    merge_burst_lat_lon(inps)

    multilook_images(inps)

    run_file_list = make_run_list(inps)

    for item in run_file_list:

        putils.remove_last_job_running_products(run_file=item)

        job_status = job_obj.submit_batch_jobs(batch_file=item)

        if job_status:

            putils.remove_zero_size_or_length_error_files(run_file=item)
            putils.rerun_job_if_exit_code_140(run_file=item, inps_dict=inps)
            putils.raise_exception_if_job_exited(run_file=item)
            putils.concatenate_error_files(run_file=item,
                                           work_dir=inps.work_dir)
            putils.move_out_job_files_to_stdout(run_file=item)

    #upload_to_s3(pic_dir)
    minsar.upload_data_products.main(
        [inps.custom_template_file, '--imageProducts'])

    return