Exemplo n.º 1
0
def main(iargs=None):

    start_time = time.time()

    inps = process_rsmas_cmd_line_parse(iargs)

    inps = check_directories_and_inputs(inps)

    command_line = os.path.basename(sys.argv[0]) + ' ' + ' '.join(sys.argv[1:])
    message_rsmas.log(inps.work_dir, '##### NEW RUN #####')
    message_rsmas.log(inps.work_dir, command_line)

    time.sleep(putils.pause_seconds(inps.wait_time))

    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:
        job_obj = JOB_SUBMIT(inps)
        job_file_name = 'process_rsmas'
        job = job_obj.submit_script(inps.project_name, job_file_name,
                                    sys.argv[:])
        # run_operations.py needs this print statement for now.
        # This is not for debugging purposes.
        # DO NOT REMOVE.
        print(job)

    else:
        objInsar = RsmasInsar(inps)
        objInsar.run(steps=inps.runSteps)

    # Timing
    m, s = divmod(time.time() - start_time, 60)
    print('\nTotal time: {:02.0f} mins {:02.1f} secs'.format(m, s))
    return
Exemplo n.º 2
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs)

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    os.chdir(inps.work_dir)

    time.sleep(putils.pause_seconds(inps.wait_time))

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_obj = JOB_SUBMIT(inps)
        job_name = 'create_runfiles'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    try:
        dem_file = glob.glob('DEM/*.wgs84')[0]
        inps.template['topsStack.demDir'] = dem_file
    except:
        raise SystemExit('DEM does not exist')

    # check for orbits
    orbit_dir = os.getenv('SENTINEL_ORBITS')

    # make run file
    inps.topsStack_template = pathObj.correct_for_isce_naming_convention(inps)
    runObj = CreateRun(inps)
    runObj.run_stack_workflow()

    run_file_list = putils.make_run_list(inps.work_dir)

    with open(inps.work_dir + '/run_files_list', 'w') as run_file:
        for item in run_file_list:
            run_file.writelines(item + '\n')

    local_orbit = os.path.join(inps.work_dir, 'orbits')
    precise_orbits_in_local = glob.glob(local_orbit + '/*/*POEORB*')
    if len(precise_orbits_in_local) > 0:
        for orbit_file in precise_orbits_in_local:
            os.system('cp {} {}'.format(orbit_file, orbit_dir))

    return None
Exemplo n.º 3
0
def rerun_job_if_exit_code_140(run_file, inps_dict):
    """Find files that exited because walltime exceeed and run again with twice the walltime"""
    from minsar.job_submission import JOB_SUBMIT

    inps = inps_dict

    search_string = 'Exited with exit code 140.'
    files, job_files = find_completed_jobs_matching_search_string(
        run_file, search_string)

    if len(files) == 0:
        return

    rerun_file = create_rerun_run_file(job_files)

    wall_time = extract_walltime_from_job_file(job_files[0])
    memory = extract_memory_from_job_file(job_files[0])
    new_wall_time = multiply_walltime(wall_time, factor=2)

    print(new_wall_time)
    print(memory)

    for file in files:
        os.remove(file.replace('.o*', '.e*'))

    move_out_job_files_to_stdout(run_file)

    # renaming of stdout dir as otherwise it will get deleted in later move_out_job_files_to_stdout step
    stdout_dir = os.path.dirname(run_file) + '/stdout_' + os.path.basename(
        run_file)
    os.rename(stdout_dir, stdout_dir + '_pre_rerun')

    remove_last_job_running_products(run_file)
    #queuename = os.getenv('QUEUENAME')

    inps.wall_time = new_wall_time
    inps.work_dir = os.path.dirname(os.path.dirname(rerun_file))
    inps.out_dir = os.path.dirname(rerun_file)
    #inps.queue = queuename

    job_obj = JOB_SUBMIT(inps)
    job_obj.write_batch_jobs(batch_file=rerun_file)
    jobs = job_obj.submit_batch_jobs(batch_file=rerun_file)

    remove_zero_size_or_length_error_files(run_file=rerun_file)
    raise_exception_if_job_exited(run_file=rerun_file)
    concatenate_error_files(run_file=rerun_file,
                            work_dir=os.path.dirname(
                                os.path.dirname(run_file)))
    move_out_job_files_to_stdout(rerun_file)
Exemplo n.º 4
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='download_rsmas')

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    logfile_name = inps.work_dir + '/ssara_rsmas.log'
    logger = RsmasLogger(file_name=logfile_name)

    if not inps.template['topsStack.slcDir'] is None:
        inps.slc_dir = inps.template['topsStack.slcDir']
    else:
        inps.slc_dir = os.path.join(inps.work_dir, 'SLC')

    project_slc_dir = os.path.join(inps.work_dir, 'SLC')
    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:
        job_file_name = 'download_ssara_rsmas'
        job_name = inps.custom_template_file.split(os.sep)[-1].split('.')[0]
        job_obj = JOB_SUBMIT(inps)
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    if not os.path.isdir(project_slc_dir):
        os.makedirs(project_slc_dir)
    os.chdir(inps.slc_dir)

    logger.log(loglevel.INFO, "DATASET: %s",
               str(inps.custom_template_file.split('/')[-1].split(".")[0]))
    logger.log(loglevel.INFO, "DATE: %s",
               datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f"))
    succesful = run_ssara(project_slc_dir, inps.custom_template_file,
                          inps.delta_lat, logger)
    logger.log(loglevel.INFO, "SUCCESS: %s", str(succesful))
    logger.log(loglevel.INFO, "------------------------------------")

    return None
Exemplo n.º 5
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='minopy_wrapper')

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    time.sleep(putils.pause_seconds(inps.wait_time))

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_obj = JOB_SUBMIT(inps)
        job_name = 'minopy_wrapper'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)

    os.chdir(inps.work_dir)

    try:
        with open('out_minopy.o', 'w') as f:
            with contextlib.redirect_stdout(f):
                smallbaselineApp.main(
                    [inps.custom_template_file, '--dir', pathObj.mintpydir])
    except:
        with open('out_minopy.e', 'w') as g:
            with contextlib.redirect_stderr(g):
                smallbaselineApp.main(
                    [inps.custom_template_file, '--dir', pathObj.mintpydir])

    inps.mintpy_dir = os.path.join(inps.work_dir, pathObj.mintpydir)
    putils.set_permission_dask_files(directory=inps.mintpy_dir)

    # Email Minopy results
    if inps.email:
        email_results.main([inps.custom_template_file, '--minopy'])

    return None
Exemplo n.º 6
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='download_rsmas')

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    logfile_name = inps.work_dir + '/ssara_rsmas.log'
    logger = RsmasLogger(file_name=logfile_name)

    #import pdb; pdb.set_trace()
    if not inps.template[inps.prefix + 'Stack.slcDir'] is None:
        inps.download_dir = inps.template[inps.prefix + 'Stack.slcDir']

    if 'COSMO' in inps.template['ssaraopt.platform']:
        inps.download_dir = os.path.join(inps.work_dir, 'RAW_data')
    else:
        inps.download_dir = os.path.join(inps.work_dir, 'SLC')

    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:
        job_file_name = 'download_ssara_rsmas'
        job_name = inps.custom_template_file.split(os.sep)[-1].split('.')[0]
        job_obj = JOB_SUBMIT(inps)
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    if not os.path.isdir(inps.download_dir):
        os.makedirs(inps.download_dir)
    os.chdir(inps.download_dir)

    succesful = run_ssara(inps.download_dir, inps.custom_template_file,
                          inps.delta_lat, logger)

    return None
Exemplo n.º 7
0
    def create_run_files(self):
        print('-' * 50)
        print('Create run files for all the steps')
        os.makedirs(self.run_dir, exist_ok=True)
        if self.write_job:
            from minsar.job_submission import JOB_SUBMIT
            inps = self.inps
            inps.custom_template_file = self.org_custom_template
            if self.org_custom_template is None:
                inps.custom_template_file = self.templateFile
            inps.work_dir = os.path.dirname(self.run_dir)
            inps.out_dir = self.run_dir
            inps.num_data = self.num_images
            job_obj = JOB_SUBMIT(inps)
        else:
            job_obj = None
        self.run_load_data('load_data', job_obj)
        self.run_phase_linking('phase_linking', job_obj)
        self.run_phase_linking('concatenate_patch', job_obj)
        self.run_interferogram('generate_ifgram', job_obj)
        self.run_unwrap('unwrap_ifgram', job_obj)
        self.run_load_ifg('load_ifgram', job_obj)
        self.run_ifgram_correction('ifgram_correction', job_obj)
        self.run_network_inversion('invert_network', job_obj)
        self.run_timeseries_correction('timeseries_correction', job_obj)

        del job_obj

        run_file_list = []
        for key, value in RUN_FILES.items():
            run_file_list.append(value)
        with open(self.workDir + '/run_files_list', 'w') as run_file:
            for item in run_file_list:
                run_file.writelines(item + '\n')

        return
Exemplo n.º 8
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='ingest_insarmaps')

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    #time.sleep(putils.pause_seconds(inps.wait_time))

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_obj = JOB_SUBMIT(inps)
        job_name = 'ingest_insarmaps'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)

    os.chdir(inps.work_dir)

    hdfeos_file = glob.glob(inps.work_dir + '/mintpy/*.he5')
    hdfeos_file.append(glob.glob(inps.work_dir + '/mintpy/SUBSET_*/*.he5'))
    hdfeos_file = hdfeos_file[0]

    json_folder = inps.work_dir + '/mintpy/JSON'
    mbtiles_file = json_folder + '/' + os.path.splitext(
        os.path.basename(hdfeos_file))[0] + '.mbtiles'

    if os.path.isdir(json_folder):
        shutil.rmtree(json_folder)

    command1 = 'hdfeos5_2json_mbtiles.py ' + hdfeos_file + ' ' + json_folder
    command2 = 'json_mbtiles2insarmaps.py -u ' + password.insaruser + ' -p ' + password.insarpass + ' --host ' + \
               'insarmaps.miami.edu -P rsmastest -U rsmas\@gmail.com --json_folder ' + \
               json_folder + ' --mbtiles_file ' + mbtiles_file

    with open(inps.work_dir + '/run_insarmaps', 'w') as f:
        f.write(command1 + '\n')
        f.write(command2 + '\n')

    out_file = 'out_ingest_insarmaps'
    message_rsmas.log(inps.work_dir, command1)
    #command1 = '('+command1+' | tee '+out_file+'.o) 3>&1 1>&2 2>&3 | tee '+out_file+'.e'
    status = subprocess.Popen(command1, shell=True).wait()
    if status is not 0:
        raise Exception('ERROR in hdfeos5_2json_mbtiles.py')

    # TODO: Change subprocess call to get back error code and send error code to logger
    message_rsmas.log(inps.work_dir, command2)
    #command2 = '('+command2+' | tee -a '+out_file+'.o) 3>&1 1>&2 2>&3 | tee -a '+out_file+'.e'
    status = subprocess.Popen(command2, shell=True).wait()
    if status is not 0:
        raise Exception('ERROR in json_mbtiles2insarmaps.py')

    # Email insarmaps results:
    if inps.email:
        message_rsmas.log(
            inps.work_dir,
            'email_results.py --insarmaps ' + inps.custom_template_file)
        email_results.main([inps.custom_template_file, '--insarmaps'])

    return None
Exemplo n.º 9
0
def main(iargs=None):
    inps = putils.cmd_line_parse(iargs, script='create_runfiles')
    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    os.chdir(inps.work_dir)

    #time.sleep(putils.pause_seconds(inps.wait_time))

    inps.out_dir = inps.work_dir
    inps.num_data = 1

    job_obj = JOB_SUBMIT(inps)
    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_name = 'create_runfiles'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    try:
        dem_dir = inps.template[inps.prefix + 'Stack.demDir']
    except:
        dem_dir = 'DEM'

    try:
        dem_file = glob.glob(dem_dir + '/*.wgs84')[0]
        inps.template[inps.prefix + 'Stack.demDir'] = dem_file
    except:
        raise SystemExit('DEM does not exist')

    slc_dir = inps.template[inps.prefix + 'Stack.slcDir']
    os.makedirs(slc_dir, exist_ok=True)

    if int(
            get_size(slc_dir) / 1024**2
    ) < 500:  # calculate slc_dir size in MB and see if there are SLCs according to size

        # Unpack Raw data:
        if not inps.template['raw_image_dir'] in [None, 'None']:
            raw_image_dir = inps.template['raw_image_dir']
        else:
            raw_image_dir = os.path.join(inps.work_dir, 'RAW_data')

        if os.path.exists(raw_image_dir):
            unpackObj = Sensors(
                raw_image_dir,
                slc_dir,
                remove_file='False',
                multiple_raw_frame=inps.template['multiple_raw_frame'])
            unpack_run_file = unpackObj.start()
            unpackObj.close()

            job_obj.write_batch_jobs(batch_file=unpack_run_file)
            job_status = job_obj.submit_batch_jobs(batch_file=unpack_run_file)

            if not job_status:
                raise Exception('ERROR: Unpacking was failed')
        else:
            raise Exception('ERROR: No data (SLC or Raw) available')

    # make run file:
    run_files_dirname = "run_files"
    config_dirnane = "configs"

    if inps.copy_to_tmp:
        run_files_dirname += "_tmp"
        config_dirnane += "_tmp"

    run_dir = os.path.join(inps.work_dir, run_files_dirname)
    config_dir = os.path.join(inps.work_dir, config_dirnane)

    for directory in [run_dir, config_dir]:
        if os.path.exists(directory):
            shutil.rmtree(directory)

    inps.Stack_template = pathObj.correct_for_isce_naming_convention(inps)
    if inps.ignore_stack and os.path.exists(inps.work_dir +
                                            '/coreg_secondarys'):
        shutil.rmtree(inps.work_dir + '/tmp_coreg_secondarys',
                      ignore_errors=True)
        shutil.move(inps.work_dir + '/coreg_secondarys',
                    inps.work_dir + '/tmp_coreg_secondarys')

    runObj = CreateRun(inps)
    runObj.run_stack_workflow()

    if inps.ignore_stack and os.path.exists(inps.work_dir +
                                            '/tmp_coreg_secondarys'):
        shutil.move(inps.work_dir + '/tmp_coreg_secondarys',
                    inps.work_dir + '/coreg_secondarys')

    if os.path.isfile(run_dir + '/run_06_extract_stack_valid_region'):
        with open(run_dir + '/run_06_extract_stack_valid_region', 'r') as f:
            line = f.readlines()
        with open(run_dir + '/run_06_extract_stack_valid_region', 'w') as f:
            f.writelines(['rm -rf ./stack; '] + line)

    run_file_list = putils.make_run_list(inps.work_dir)
    with open(inps.work_dir + '/run_files_list', 'w') as run_file:
        for item in run_file_list:
            run_file.writelines(item + '\n')

    if inps.copy_to_tmp:
        run_file_list = [
            item.replace("/run_files/", "/run_files_tmp/")
            for item in run_file_list
        ]
        with open(inps.work_dir + '/run_files_tmp_list', 'w') as run_file:
            for item in run_file_list:
                run_file.writelines(item + '\n')
        shutil.copytree(pathObj.rundir, run_dir)

    if inps.prefix == 'tops':
        # check for orbits
        orbit_dir = os.getenv('SENTINEL_ORBITS')
        local_orbit = os.path.join(inps.work_dir, 'orbits')
        precise_orbits_in_local = glob.glob(local_orbit + '/*/*POEORB*')
        if len(precise_orbits_in_local) > 0:
            for orbit_file in precise_orbits_in_local:
                os.system('cp {} {}'.format(orbit_file, orbit_dir))

    # Writing job files
    if inps.write_jobs:
        for item in run_file_list:
            job_obj.write_batch_jobs(batch_file=item)

        if inps.template['processingMethod'] == 'smallbaseline':
            job_name = 'smallbaseline_wrapper'
            job_file_name = job_name
            command = [
                'smallbaselineApp.py', inps.custom_template_file, '--dir',
                'mintpy'
            ]

            # pre_command = ["""[[ $(ls mintpy/time* | wc -l) -eq 1 ]] && rm mintpy/time*"""]
            pre_command = ["clean_mintpy_dir.bash;"]
            command = pre_command + command

            job_obj.submit_script(job_name,
                                  job_file_name,
                                  command,
                                  writeOnly='True')
        else:
            job_name = 'minopy_wrapper'
            job_file_name = job_name
            command = [
                'minopyApp.py', inps.custom_template_file, '--dir', 'minopy'
            ]
            job_obj.submit_script(job_name,
                                  job_file_name,
                                  command,
                                  writeOnly='True')

        job_name = 'insarmaps'
        job_file_name = job_name
        command = ['ingest_insarmaps.py', inps.custom_template_file]
        job_obj.submit_script(job_name,
                              job_file_name,
                              command,
                              writeOnly='True')

    print("copy_to_tmp: {}".format(inps.copy_to_tmp))
    if inps.copy_to_tmp:
        #run_dir_tmp = os.path.join(inps.work_dir, 'run_files_tmp')
        config_dir_tmp = os.path.join(inps.work_dir, 'configs_tmp')
        shutil.copytree(os.path.join(inps.work_dir, "configs"), config_dir_tmp)

        cmd = "update_configs_for_tmp.bash {}".format(inps.work_dir)
        subprocess.Popen(cmd, shell=True)

    return None
Exemplo n.º 10
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='')

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    logfile_name = inps.work_dir + '/gep_download.log'
    logger = RsmasLogger(file_name=logfile_name)

    if not inps.template['raw_image_dir'] is None:
        inps.slc_dir = inps.template['raw_image_dir']
    else:
        inps.slc_dir = os.path.join(inps.work_dir, 'raw')

    project_slc_dir = os.path.join(inps.work_dir, 'raw')
    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:
        job_file_name = 'download_gep_csk'
        job_name = inps.custom_template_file.split(os.sep)[-1].split('.')[0]
        job_obj = JOB_SUBMIT(inps)
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    if not os.path.isdir(project_slc_dir):
        os.makedirs(project_slc_dir)
    os.chdir(inps.slc_dir)

    logger.log(loglevel.INFO, "DATASET: %s",
               str(inps.custom_template_file.split('/')[-1].split(".")[0]))
    logger.log(loglevel.INFO, "DATE: %s",
               datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f"))

    start_date = inps.template['ssaraopt.startDate']
    start_date = datetime.datetime.strptime(start_date, '%Y%m%d')
    end_date = inps.template['ssaraopt.endDate']
    end_date = datetime.datetime.strptime(end_date, '%Y%m%d')

    if 'stripmapStack.boundingBox' in inps.template:
        bbox = inps.template['stripmapStack.boundingBox']
    else:
        bbox = inps.template['topsStack.boundingBox']

    bbox = bbox.split(' ')
    bbox = '{},{},{},{}'.format(bbox[2], bbox[0], bbox[3], bbox[1])
    user = subprocess.check_output(
        "grep gepuser $RSMASINSAR_HOME/3rdparty/SSARA/password_config.py |\
     sed 's/\"//g''' | cut -d '=' -f 2",
        shell=True).decode('UTF-8').split('\n')[0]
    passwd = subprocess.check_output(
        "grep geppass $RSMASINSAR_HOME/3rdparty/SSARA/password_config.py |\
     sed 's/\"//g''' | cut -d '=' -f 2",
        shell=True).decode('UTF-8').split('\n')[0]

    command_get_list = 'curl -s "https://catalog.terradue.com/csk/search?format=atom&count=1000&bbox={bbox}" |\
     xmllint --format - | grep enclosure | sed "s/.*<link rel="enclosure".*href="\(.*\)"\/>/\1/g"'.format(
        bbox=bbox)

    print(command_get_list)
    data_list = subprocess.check_output(command_get_list, shell=True).decode(
        'UTF-8')  #os.system(command_get_list)

    data_list = data_list.split('/>\n')
    data_list = [x.split('"')[-2] for x in data_list[0:-1]]

    cmd_all = []
    for data in data_list:
        date = datetime.datetime.strptime(
            data.split('.h5')[0].split('_')[-1][0:8], '%Y%m%d')
        if date >= start_date and date <= end_date:
            cmd = 'curl -u {username}:{password} -o $(basename ${enclosure}) {enclosure}'.format(
                username=user, password=passwd, enclosure=data)
            cmd_all.append(cmd)

    pool = mp.Pool(6)
    pool.map(os.system, cmd_all)
    pool.close()

    logger.log(loglevel.INFO, "Download Finish")
    logger.log(loglevel.INFO, "------------------------------------")

    return None
Exemplo n.º 11
0
def main(iargs=None):
    """Downloads data with ssara and asfserial scripts."""

    inps = putils.cmd_line_parse(iargs, script='download_rsmas')

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    time.sleep(putils.pause_seconds(inps.wait_time))

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_obj = JOB_SUBMIT(inps)
        job_name = 'download_rsmas'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    if inps.prefix == 'tops':
        if not inps.template[inps.prefix + 'Stack.slcDir'] is None:
            download_dir = inps.template[inps.prefix + 'Stack.slcDir']
        else:
            download_dir = os.path.join(inps.work_dir, 'SLC')
    else:
        if not inps.template['raw_image_dir'] in [None, 'None']:
            download_dir = inps.template['raw_image_dir']
        else:
            download_dir = os.path.join(inps.work_dir, 'RAW_data')

    os.makedirs(inps.work_dir, exist_ok=True)
    os.makedirs(download_dir, exist_ok=True)

    if 'SenDT' not in inps.project_name and 'SenAT' not in inps.project_name or os.getenv('SSARA_ASF') == 'False':
        
        try:
           inps.template['ssaraopt.intersectsWithPoint']
           inps.ssaraopt = ' '.join(add_point_to_ssaraopt(inps.template, inps.ssaraopt.split(' '))) 
        except:
           inps.ssaraopt = ' '.join(add_polygon_to_ssaraopt(inps.template, inps.ssaraopt.split(' '), delta_lat=inps.delta_lat)) 
        command = 'ssara_federated_query.py ' + inps.ssaraopt + ' --print' + ' --download'

        os.chdir(download_dir)
        message_rsmas.log(download_dir, command)

        status = subprocess.Popen(command, shell=True).wait()

        if status is not 0:
            raise Exception('ERROR in ssara_federated_query.py')

        os.chdir(inps.work_dir)
        return

    if os.getenv('SSARA_ASF') == 'False':
        return

    download('ssara', inps.custom_template_file, download_dir, outnum=1)
    #download('asfserial', inps.custom_template_file, download_dir, outnum = 1)

    for i_download in [2, 3]:
        download_success = run_check_download(download_dir = download_dir)

        if not download_success:
           print('check_download.py: There were bad files, download again')
           message_rsmas.log(inps.work_dir,'check_download.py: there were bad files, download again')

           download('ssara', inps.custom_template_file, download_dir, outnum = i_download)
    ssaraopt = [ x for x in ssaraopt if not x[0:7]=='--frame']

    return ssaraopt


if __name__ == "__main__":
    inps = command_line_parse(sys.argv[1:])

    inps.project_name = putils.get_project_name(custom_template_file=inps.template)
    inps.work_dir = putils.get_work_directory(None, inps.project_name)
    inps.slc_dir = inps.work_dir + "/SLC"

    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:
        job_file_name = 'download_ssara_rsmas'
        job_name = inps.template.split(os.sep)[-1].split('.')[0]
        inps.wall_time = '24:00'
        job_obj = JOB_SUBMIT(inps)
        job_obj.submit_script(job_name, job_file_name, sys.argv[:])
        sys.exit(0)

    os.chdir(inps.work_dir)
    message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::]))
    if not os.path.isdir(inps.slc_dir):
        os.makedirs(inps.slc_dir)
    os.chdir(inps.slc_dir)

    succesful = run_ssara(inps.work_dir, inps.template, inps.delta_lat)
Exemplo n.º 13
0
def main(iargs=None):
    """ create orth and geo rectifying run jobs and submit them. """

    inps = putils.cmd_line_parse(iargs)

    import s1a_isce_utils as ut
    import mergeBursts as mb

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    inps.geom_masterDir = os.path.join(inps.work_dir, pathObj.geomlatlondir)
    inps.master = os.path.join(inps.work_dir, pathObj.masterdir)

    try:
        inps.dem = glob.glob('{}/DEM/*.wgs84'.format(inps.work_dir))[0]
    except:
        print('DEM not exists!')
        sys.exit(1)

    if not os.path.exists(inps.geom_masterDir):
        os.mkdir(inps.geom_masterDir)

    time.sleep(putils.pause_seconds(inps.wait_time))

    inps.out_dir = os.path.join(inps.work_dir, 'run_files')
    job_obj = JOB_SUBMIT(inps)

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_name = 'export_ortho_geo'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    pic_dir = os.path.join(inps.work_dir, pathObj.tiffdir)

    if not os.path.exists(pic_dir):
        os.mkdir(pic_dir)

    demZero = create_demZero(inps.dem, inps.geom_masterDir)

    swathList = ut.getSwathList(inps.master)

    create_georectified_lat_lon(swathList, inps.master, inps.geom_masterDir,
                                demZero)

    merge_burst_lat_lon(inps)

    multilook_images(inps)

    run_file_list = make_run_list(inps)

    for item in run_file_list:

        putils.remove_last_job_running_products(run_file=item)

        job_status = job_obj.submit_batch_jobs(batch_file=item)

        if job_status:

            putils.remove_zero_size_or_length_error_files(run_file=item)
            putils.rerun_job_if_exit_code_140(run_file=item, inps_dict=inps)
            putils.raise_exception_if_job_exited(run_file=item)
            putils.concatenate_error_files(run_file=item,
                                           work_dir=inps.work_dir)
            putils.move_out_job_files_to_stdout(run_file=item)

    #upload_to_s3(pic_dir)
    minsar.upload_data_products.main(
        [inps.custom_template_file, '--imageProducts'])

    return
Exemplo n.º 14
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='execute_runfiles')

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    os.chdir(inps.work_dir)

    time.sleep(putils.pause_seconds(inps.wait_time))

    if inps.prefix == 'stripmap':
        inps.num_bursts = 1

    inps.out_dir = os.path.join(inps.work_dir, 'run_files')
    job_obj = JOB_SUBMIT(inps)

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_name = 'execute_runfiles'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    run_file_list = putils.read_run_list(inps.work_dir)

    if inps.end_run == 0:
        inps.end_run = len(run_file_list)

    if not inps.start_run == 0:
        inps.start_run = inps.start_run - 1

    if inps.step:
        inps.start_run = inps.step - 1
        inps.end_run = inps.step

    run_file_list = run_file_list[inps.start_run:inps.end_run]

    for item in run_file_list:

        putils.remove_last_job_running_products(run_file=item)

        job_status = job_obj.submit_batch_jobs(batch_file=item)

        if job_status:

            putils.remove_zero_size_or_length_error_files(run_file=item)
            putils.rerun_job_if_exit_code_140(run_file=item, inps_dict=inps)
            putils.raise_exception_if_job_exited(run_file=item)
            putils.concatenate_error_files(run_file=item,
                                           work_dir=inps.work_dir)
            putils.move_out_job_files_to_stdout(run_file=item)

            date_str = datetime.datetime.strftime(datetime.datetime.now(),
                                                  '%Y%m%d:%H%M%S')
            print(date_str + ' * Job {} completed'.format(item))

    date_str = datetime.datetime.strftime(datetime.datetime.now(),
                                          '%Y%m%d:%H%M%S')
    print(date_str + ' * all jobs from {} to {} have been completed'.format(
        os.path.basename(run_file_list[0]), os.path.basename(
            run_file_list[-1])))

    return None
Exemplo n.º 15
0
def main(iargs=None):
    """ create orth and geo rectifying run jobs and submit them. """

    inps = putils.cmd_line_parse(iargs)

    if 'stripmap' in inps.prefix:
        sys.path.append(os.path.join(os.getenv('ISCE_STACK'), 'stripmapStack'))
    else:
        sys.path.append(os.path.join(os.getenv('ISCE_STACK'), 'topsStack'))

    from s1a_isce_utils import loadProduct, getSwathList
    import mergeBursts

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    inps.geom_referenceDir = os.path.join(inps.work_dir, pathObj.geomlatlondir)
    inps.reference = os.path.join(inps.work_dir, pathObj.referencedir)

    try:
        inps.dem = glob.glob('{}/DEM/*.wgs84'.format(inps.work_dir))[0]
    except:
        print('DEM not exists!')
        sys.exit(1)

    if not os.path.exists(inps.geom_referenceDir):
        os.mkdir(inps.geom_referenceDir)

    time.sleep(putils.pause_seconds(inps.wait_time))

    inps.out_dir = os.path.join(inps.work_dir, 'run_files')
    job_obj = JOB_SUBMIT(inps)

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_name = 'export_ortho_geo'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)

    pic_dir = os.path.join(inps.work_dir, pathObj.tiffdir)

    if not os.path.exists(pic_dir):
        os.mkdir(pic_dir)

    demZero = create_demZero(inps.dem, inps.geom_referenceDir)

    swathList = getSwathList(inps.reference)

    create_georectified_lat_lon(swathList, inps.reference,
                                inps.geom_referenceDir, demZero, loadProduct)

    merge_burst_lat_lon(inps, mergeBursts)

    multilook_images(inps, mergeBursts)

    run_file_list = make_run_list(inps)

    for item in run_file_list:

        putils.remove_last_job_running_products(run_file=item)

        job_obj.write_batch_jobs(batch_file=item)
        job_status = job_obj.submit_batch_jobs(batch_file=item)

        if job_status:
            putils.remove_zero_size_or_length_error_files(run_file=item)
            putils.rerun_job_if_exit_code_140(run_file=item, inps_dict=inps)
            putils.raise_exception_if_job_exited(run_file=item)
            putils.concatenate_error_files(run_file=item,
                                           work_dir=inps.work_dir)
            putils.move_out_job_files_to_stdout(run_file=item)

    return
Exemplo n.º 16
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='smallbaseline_wrapper')

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    time.sleep(putils.pause_seconds(inps.wait_time))
    
    #########################################
    # stripmap prep to isce
    #########################################
    if inps.template['acquisition_mode']=='stripmap':
        inps.dsetDir = inps.work_dir +'/Igrams';
        inps.slcDir  = inps.work_dir +'/merged/SLC';
        inps.geometryDir = inps.work_dir +'/geom_master';
        inps.baselineDir = inps.work_dir +'/baselines';
        masterDate= inps.template['stripmapStack.master']
        if masterDate=='None':
            command1= 'cp -r '+inps.slcDir+'/'+os.listdir(inps.slcDir)[0]+'/'+'masterShelve '+inps.work_dir+'/.';
        else:
            command1= 'cp -r '+inps.slcDir+'/' + masterDate+'/'+'masterShelve '+inps.work_dir+'/.';
        print(command1);subprocess.Popen(command1, shell=True).wait();
        inps.metaFile= inps.work_dir+'/' +'masterShelve/data.dat';
        command2= 'prep_isce.py -d '+inps.dsetDir+' -m '+inps.metaFile+' -b '+inps.baselineDir+' -g '+inps.geometryDir; 
        print(command2)
        subprocess.Popen(command2, shell=True).wait();
    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_obj = JOB_SUBMIT(inps)
        job_name = 'smallbaseline_wrapper'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    os.chdir(inps.work_dir)

    try:
        with open('out_mintpy.o', 'w') as f:
            with contextlib.redirect_stdout(f):
                smallbaselineApp.main([inps.custom_template_file])
    except:
        with open('out_mintpy.e', 'w') as g:
            with contextlib.redirect_stderr(g):
                smallbaselineApp.main([inps.custom_template_file])

    inps.mintpy_dir = os.path.join(inps.work_dir, pathObj.mintpydir)
    putils.set_permission_dask_files(directory=inps.mintpy_dir)

    # Email Mintpy results
    if inps.email:
        email_results.main([inps.custom_template_file, '--mintpy'])

    return None
Exemplo n.º 17
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='download_rsmas')

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    logfile_name = inps.work_dir + '/asfserial_rsmas.log'
    global logger
    logger = RsmasLogger(file_name=logfile_name)

    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:
        job_file_name = 'download_asfserial_rsmas'
        job_name = inps.custom_template_file.split(os.sep)[-1].split('.')[0]
        job_obj = JOB_SUBMIT(inps)
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    os.chdir(inps.work_dir)

    if not inps.template[inps.prefix + 'Stack.slcDir'] is None:
        inps.slc_dir = inps.template[inps.prefix + 'Stack.slcDir']
    else:
        inps.slc_dir = os.path.join(inps.work_dir, 'SLC')

    global project_slc_dir
    project_slc_dir = os.path.join(inps.work_dir, 'SLC')

    if not os.path.exists(inps.slc_dir):
        os.mkdir(inps.slc_dir)

    os.chdir(inps.slc_dir)

    try:
        os.remove(os.path.expanduser('~') + '/.bulk_download_cookiejar.txt')
    except OSError:
        pass

    dataset_template = Template(inps.custom_template_file)
    dataset_template.options.update(
        PathFind.correct_for_ssara_date_format(dataset_template.options))
    subprocess.Popen("rm " + project_slc_dir + "/new_files*.csv",
                     shell=True).wait()
    seasonal_start_date = None
    seasonal_end_date = None

    try:
        if dataset_template.options[
                'seasonalStartDate'] is not None and dataset_template.options[
                    'seasonalEndDate'] is not None:
            seasonal_start_date = dataset_template.options['seasonalStartDate']
            seasonal_end_date = dataset_template.options['seasonalEndDate']
    except:
        pass

    if inps.seasonalStartDate is not None and inps.seasonalEndDate is not None:
        seasonal_start_date = inps.seasonalStartDate
        seasonal_end_date = inps.seasonalEndDate

    if seasonal_start_date is not None and seasonal_end_date is not None:
        generate_seasonal_files_csv(dataset_template, seasonal_start_date,
                                    seasonal_end_date)
    else:
        generate_files_csv(project_slc_dir, dataset_template)

    parallel = False

    try:
        if dataset_template.options['parallel'] == 'yes':
            parallel = True
    except:
        pass
    """if inps.parallel == 'yes':
        parallel = True"""

    threads = os.cpu_count()

    try:
        if dataset_template.options['threads'] is not None:
            threads = int(dataset_template.options['threads'])
    except:
        pass
    """if inps.processes is not None:
        processes = inps.processes"""

    if parallel:
        run_parallel_download_asf_serial(project_slc_dir, threads)
    else:
        succesful = run_download_asf_serial(project_slc_dir, logger)
        logger.log(loglevel.INFO, "SUCCESS: %s", str(succesful))

    change_file_permissions()
    logger.log(loglevel.INFO, "------------------------------------")
    subprocess.Popen("rm " + project_slc_dir + "/new_files*.csv",
                     shell=True).wait()

    return None
Exemplo n.º 18
0
def main(iargs=None):
    inps = putils.cmd_line_parse(iargs, script='create_runfiles')
    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    os.chdir(inps.work_dir)

    time.sleep(putils.pause_seconds(inps.wait_time))

    inps.out_dir = inps.work_dir
    job_obj = JOB_SUBMIT(inps)
    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_name = 'create_runfiles'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    try:
        dem_file = glob.glob('DEM/*.wgs84')[0]
        inps.template[inps.prefix + 'Stack.demDir'] = dem_file
    except:
        raise SystemExit('DEM does not exist')

    slc_dir = inps.template[inps.prefix + 'Stack.slcDir']
    os.makedirs(slc_dir, exist_ok=True)

    if int(
            get_size(slc_dir) / 1024**2
    ) < 500:  # calculate slc_dir size in MB and see if there are SLCs according to size

        # Unpack Raw data:
        if not inps.template['raw_image_dir'] in [None, 'None']:
            raw_image_dir = inps.template['raw_image_dir']
        else:
            raw_image_dir = os.path.join(inps.work_dir, 'RAW_data')

        if os.path.exists(raw_image_dir):
            unpackObj = Sensors(
                raw_image_dir,
                slc_dir,
                remove_file='False',
                multiple_raw_frame=inps.template['multiple_raw_frame'])
            unpack_run_file = unpackObj.start()
            unpackObj.close()

            job_obj.write_batch_jobs(batch_file=unpack_run_file)
            job_status = job_obj.submit_batch_jobs(batch_file=unpack_run_file)

            if not job_status:
                raise Exception('ERROR: Unpacking was failed')
        else:
            raise Exception('ERROR: No data (SLC or Raw) available')

    # make run file:
    run_dir = os.path.join(inps.work_dir, 'run_files')
    config_dir = os.path.join(inps.work_dir, 'configs')
    for directory in [run_dir, config_dir]:
        if os.path.exists(directory):
            shutil.rmtree(directory)

    inps.Stack_template = pathObj.correct_for_isce_naming_convention(inps)
    runObj = CreateRun(inps)
    runObj.run_stack_workflow()

    run_file_list = putils.make_run_list(inps.work_dir)

    with open(inps.work_dir + '/run_files_list', 'w') as run_file:
        for item in run_file_list:
            run_file.writelines(item + '\n')

    if inps.prefix == 'tops':
        # check for orbits
        orbit_dir = os.getenv('SENTINEL_ORBITS')
        local_orbit = os.path.join(inps.work_dir, 'orbits')
        precise_orbits_in_local = glob.glob(local_orbit + '/*/*POEORB*')
        if len(precise_orbits_in_local) > 0:
            for orbit_file in precise_orbits_in_local:
                os.system('cp {} {}'.format(orbit_file, orbit_dir))

    # Writing job files
    if inps.write_jobs:
        for item in run_file_list:
            job_obj.write_batch_jobs(batch_file=item)

        if inps.template['processingMethod'] == 'smallbaseline':
            job_name = 'smallbaseline_wrapper'
            job_file_name = job_name
            command = [
                'smallbaselineApp.py', inps.custom_template_file, '--dir',
                'mintpy'
            ]
            job_obj.submit_script(job_name,
                                  job_file_name,
                                  command,
                                  writeOnly='True')
        else:
            job_name = 'minopy_wrapper'
            job_file_name = job_name
            command = [
                'minopyApp.py', inps.custom_template_file, '--dir', 'minopy'
            ]
            job_obj.submit_script(job_name,
                                  job_file_name,
                                  command,
                                  writeOnly='True')

        job_name = 'insarmaps'
        job_file_name = job_name
        command = ['ingest_insarmaps.py', inps.custom_template_file]
        job_obj.submit_script(job_name,
                              job_file_name,
                              command,
                              writeOnly='True')

    return None
Exemplo n.º 19
0
def main(iargs=None):
    """ generates interferograms and coherence images in GeoTiff format """

    inps = putils.cmd_line_parse(iargs)

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    time.sleep(putils.pause_seconds(inps.wait_time))

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_obj = JOB_SUBMIT(inps)
        job_name = 'ifgramStack_to_ifgram_and_coherence'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    out_dir = inps.work_dir + '/' + pathObj.tiffdir
    if not os.path.isdir(out_dir):
        os.makedirs(out_dir)
    try:
        file = glob.glob(inps.work_dir + '/mintpy/inputs/ifgramStack.h5')[0]
    except:
        raise Exception('ERROR in ' + os.path.basename(__file__) +
                        ': file ifgramStack.h5 not found')

    # modify network so that only one connection left
    arg_string = file + ' --max-conn-num 1'
    print('modify_network.py', arg_string)
    mintpy.modify_network.main(arg_string.split())

    if not os.path.isdir(inps.work_dir + '/mintpy/geo'):
        os.makedirs(inps.work_dir + '/mintpy/geo')

    # geocode ifgramStack
    geo_file = os.path.dirname(
        os.path.dirname(file)) + '/geo/geo_' + os.path.basename(file)
    lookup_file = os.path.dirname(
        os.path.dirname(file)) + '/inputs/geometryRadar.h5'
    template_file = os.path.dirname(
        os.path.dirname(file)) + '/smallbaselineApp_template.txt'
    arg_string = file + ' -t ' + template_file + ' -l ' + lookup_file + ' -o ' + geo_file
    print('geocode.py', arg_string)
    mintpy.geocode.main(arg_string.split())

    # loop over all interferograms
    obj = ifgramStack(geo_file)
    obj.open()
    date12_list = obj.get_date12_list()
    # dummy_data, atr = readfile.read(geo_file)

    for i in range(len(date12_list)):
        date_str = date12_list[i]
        print('Working on ... ' + date_str)
        data_coh = readfile.read(file, datasetName='coherence-' + date_str)[0]
        data_unw = readfile.read(file,
                                 datasetName='unwrapPhase-' + date_str)[0]

        fname_coh = out_dir + '/coherence_' + date_str + '.tif'
        fname_unw = out_dir + '/interferogram_' + date_str + '.tif'

        create_geotiff(obj,
                       data=data_coh,
                       outfile=fname_coh,
                       type='coherence',
                       work_dir=inps.work_dir)
        create_geotiff(obj,
                       data=data_unw,
                       outfile=fname_unw,
                       type='interferogram',
                       work_dir=inps.work_dir)
    return
Exemplo n.º 20
0
def main(iargs=None):
    """Downloads data with ssara and asfserial scripts."""

    inps = putils.cmd_line_parse(iargs, script='download_rsmas')

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    time.sleep(putils.pause_seconds(inps.wait_time))

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_obj = JOB_SUBMIT(inps)
        job_name = 'download_rsmas'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    if not inps.template['topsStack.slcDir'] is None:
        slc_dir = inps.template['topsStack.slcDir']
    else:
        slc_dir = os.path.join(inps.work_dir, 'SLC')

    if not os.path.isdir(inps.work_dir):
        os.makedirs(inps.work_dir)

    if not os.path.isdir(slc_dir):
        os.makedirs(slc_dir)

    # if satellite is not Sentinel (not tried yet)
    if 'SenDT' not in inps.project_name and 'SenAT' not in inps.project_name:

        ssara_call = 'ssara_federated_query.py ' + inps.ssaraopt + ' --print' + ' --download'
        ssara_process = subprocess.Popen(ssara_call, shell=True).wait()
        completion_status = ssara_process.poll()

        return

    download('ssara', inps.custom_template_file, slc_dir, outnum=1)
    #download('asfserial', inps.custom_template_file, slc_dir, outnum = 1)

    for i_download in [2, 3]:
        download_success = run_check_download(slc_dir=slc_dir)

        if not download_success:
            print('check_download.py: There were bad files, download again')
            message_rsmas.log(
                inps.work_dir,
                'check_download.py: there were bad files, download again')

            download('ssara',
                     inps.custom_template_file,
                     slc_dir,
                     outnum=i_download)