def generate_files_csv(slc_dir, custom_template_file):
    """ Generates a csv file of the files to download serially.
    Uses the `awk` command to generate a csv file containing the data files to be download
    serially. The output csv file is then sent through the `sed` command to remove the first five
    empty values to eliminate errors in download_ASF_serial.py.
    """

    dataset_template = Template(custom_template_file)
    dataset_template.options.update(
        PathFind.correct_for_ssara_date_format(dataset_template.options))
    ssaraopt = dataset_template.generate_ssaraopt_string()
    ssaraopt = ssaraopt.split(' ')

    # add intersectWith to ssaraopt string #FA 8/19: the delta_lat default value should come from a command_linr parse
    ssaraopt = add_polygon_to_ssaraopt(dataset_template.get_options(),
                                       ssaraopt.copy(),
                                       delta_lat=0.0)

    filecsv_options = ['ssara_federated_query.py'] + ssaraopt + [
        '--print', '|', 'awk', "'BEGIN{FS=\",\"; ORS=\",\"}{ print $14}'", '>',
        os.path.join(slc_dir, 'files.csv')
    ]

    csv_command = ' '.join(filecsv_options)
    message_rsmas.log(slc_dir, csv_command)
    subprocess.Popen(csv_command, shell=True).wait()
    # FA 8/2019: replaced new_files.csv by files.csv as infile argument
    sed_command = "sed 's/^.\{5\}//' " + os.path.join(slc_dir, 'files.csv') + \
                  ">" + os.path.join(slc_dir, 'new_files.csv')
    message_rsmas.log(slc_dir, sed_command)
    subprocess.Popen(sed_command, shell=True).wait()
Exemple #2
0
    def run_stack_workflow(self):        # This part is for isce stack run_files
               
        if self.prefix == 'tops':
            message_rsmas.log(self.work_dir, 'stackSentinel.py' + ' ' + ' '.join(self.command_options))
            out_file_name = 'out_stackSentinel'
            cmd = 'export PATH=$ISCE_STACK/topsStack:$PATH; stackSentinel.py' + ' ' + ' '.join(self.command_options)

        else:
            message_rsmas.log(self.work_dir, 'stackStripMap.py' + ' ' + ' '.join(self.command_options))
            out_file_name = 'out_stackStripMap'
            cmd = 'export PATH=$ISCE_STACK/stripmapStack:$PATH; stackStripMap.py' + ' ' + ' '.join(self.command_options)
        
        system_path = os.getenv('PATH')

        try:
            with open(out_file_name + '.o', 'w') as f:
                with contextlib.redirect_stdout(f):
                    os.system(cmd)
        except:
            with open(out_file_name + '.e', 'w') as g:
                with contextlib.redirect_stderr(g):
                    os.system(cmd)

        os.environ['PATH'] = system_path
        
        return
Exemple #3
0
def main(iargs=None):
    """ email mintpy or insarmaps results """

    inps = putils.cmd_line_parse(iargs, script='email_results')

    email_address = os.getenv('NOTIFICATIONEMAIL')

    if not iargs is None:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(iargs[:]))
    else:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::]))

    if inps.email_insarmaps_flag:
        email_insarmaps_results(email_address)

        if int(inps.template['cleanopt']) == 4:
            cleanlist = pathObj.isce_clean_list
            putils.remove_directories(cleanlist[4])

        return

    if inps.email_mintpy_flag:
        email_mintpy_results(email_address)
        return

    return None
Exemple #4
0
    def run_stack_workflow(self):  # This part is for isce stack run_files

        if self.prefix == 'tops':
            message_rsmas.log(
                self.work_dir,
                'stackSentinel.py' + ' ' + ' '.join(self.command_options))
            cmd = 'export PATH=$ISCE_STACK/topsStack:$PATH; stackSentinel.py' + ' ' + ' '.join(
                self.command_options)

        else:
            message_rsmas.log(
                self.work_dir,
                'stackStripMap.py' + ' ' + ' '.join(self.command_options))
            cmd = 'export PATH=$ISCE_STACK/stripmapStack:$PATH; stackStripMap.py' + ' ' + ' '.join(
                self.command_options)

        system_path = os.getenv('PATH')

        print(cmd)
        status = subprocess.Popen(cmd, shell=True).wait()
        if status is not 0:
            raise Exception('ERROR in create_runfiles.py')

        os.environ['PATH'] = system_path

        woke_PBS_job_files = glob.glob(self.work_dir + '/run_files/*.job')
        for f in woke_PBS_job_files:
            os.remove(f)

        return
Exemple #5
0
def main(iargs=None):

    start_time = time.time()

    inps = process_rsmas_cmd_line_parse(iargs)

    inps = check_directories_and_inputs(inps)

    command_line = os.path.basename(sys.argv[0]) + ' ' + ' '.join(sys.argv[1:])
    message_rsmas.log(inps.work_dir, '##### NEW RUN #####')
    message_rsmas.log(inps.work_dir, command_line)

    time.sleep(putils.pause_seconds(inps.wait_time))

    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:
        job_obj = JOB_SUBMIT(inps)
        job_file_name = 'process_rsmas'
        job = job_obj.submit_script(inps.project_name, job_file_name,
                                    sys.argv[:])
        # run_operations.py needs this print statement for now.
        # This is not for debugging purposes.
        # DO NOT REMOVE.
        print(job)

    else:
        objInsar = RsmasInsar(inps)
        objInsar.run(steps=inps.runSteps)

    # Timing
    m, s = divmod(time.time() - start_time, 60)
    print('\nTotal time: {:02.0f} mins {:02.1f} secs'.format(m, s))
    return
def submit_batch_jobs(batch_file,
                      out_dir='./run_files',
                      work_dir='.',
                      memory=None,
                      walltime=None,
                      queue=None):
    """
    submit jobs based on scheduler
    :param batch_file: batch job name
    :param out_dir: output directory
    :param work_dir: logging directory
    :param walltime: max time to process the job
    :param memory: max memory required
    :return: True if running on a cluster
    """

    message_rsmas.log(
        work_dir, 'job_submission.py {a} --outdir {b}'.format(a=batch_file,
                                                              b=out_dir))

    supported_schedulers = ['LSF', 'PBS', 'SLURM']

    if os.getenv('JOBSCHEDULER') in supported_schedulers:
        print('\nWorking on a {} machine ...\n'.format(
            os.getenv('JOBSCHEDULER')))

        maxmemory, wall_time, num_threads = get_memory_walltime(
            batch_file, job_type='batch', wall_time=walltime, memory=memory)

        if queue is None:
            queue = os.getenv('QUEUENAME')

        if os.getenv('JOBSCHEDULER') in ['SLURM', 'sge']:

            submit_job_with_launcher(batch_file=batch_file,
                                     out_dir=out_dir,
                                     memory=maxmemory,
                                     walltime=wall_time,
                                     number_of_threads=num_threads,
                                     queue=queue)
        else:

            jobs = submit_jobs_individually(batch_file=batch_file,
                                            out_dir=out_dir,
                                            memory=maxmemory,
                                            walltime=wall_time,
                                            queue=queue)

        return True

    else:
        print('\nWorking on a single machine ...\n')

        with open(batch_file, 'r') as f:
            command_lines = f.readlines()
            for command_line in command_lines:
                os.system(command_line)

        return False
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs)

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    os.chdir(inps.work_dir)

    time.sleep(putils.pause_seconds(inps.wait_time))

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_obj = JOB_SUBMIT(inps)
        job_name = 'create_runfiles'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    try:
        dem_file = glob.glob('DEM/*.wgs84')[0]
        inps.template['topsStack.demDir'] = dem_file
    except:
        raise SystemExit('DEM does not exist')

    # check for orbits
    orbit_dir = os.getenv('SENTINEL_ORBITS')

    # make run file
    inps.topsStack_template = pathObj.correct_for_isce_naming_convention(inps)
    runObj = CreateRun(inps)
    runObj.run_stack_workflow()

    run_file_list = putils.make_run_list(inps.work_dir)

    with open(inps.work_dir + '/run_files_list', 'w') as run_file:
        for item in run_file_list:
            run_file.writelines(item + '\n')

    local_orbit = os.path.join(inps.work_dir, 'orbits')
    precise_orbits_in_local = glob.glob(local_orbit + '/*/*POEORB*')
    if len(precise_orbits_in_local) > 0:
        for orbit_file in precise_orbits_in_local:
            os.system('cp {} {}'.format(orbit_file, orbit_dir))

    return None
def call_ssara_dem(inps, cwd):
    print('DEM generation using SSARA')
    sys.stdout.flush()
    out_file = 'ssara_dem.log'

    # need to refactor so that Josh's dataset_template will be used throughout
    ssaraopt_string = inps.ssaraopt
    ssaraopt_list = ssaraopt_string.split(' ')
    ssaraopt_list = add_polygon_to_ssaraopt(dataset_template=inps.template,
                                            ssaraopt=ssaraopt_list.copy(),
                                            delta_lat=0)
    ssaraopt_string = ' '.join(ssaraopt_list)

    out_file = 'out_ssara_dem'
    command = 'ssara_federated_query.py {ssaraopt} --dem '.format(
        ssaraopt=ssaraopt_string)
    message_rsmas.log(os.getcwd(), command)
    command = '(' + command + ' | tee ' + out_file + '.o) 3>&1 1>&2 2>&3 | tee ' + out_file + '.e'
    print('command currently executing: ' + command)
    sys.stdout.flush()
    if os.getenv('DOWNLOADHOST') == 'local':
        print('Command: ' + command)
        sys.stdout.flush()
        try:
            proc = subprocess.Popen(command,
                                    stderr=subprocess.PIPE,
                                    stdout=subprocess.PIPE,
                                    shell=True,
                                    universal_newlines=True)
            error, output = proc.communicate(
            )  # FA 8/19 error, output works better here than output, error. Could be that stderr and stdout are switched .
            if proc.returncode is not 0:
                raise Exception(
                    'ERROR starting dem.py subprocess'
                )  # FA 8/19: I don't think this happens, errors are is output
        except subprocess.CalledProcessError as exc:
            print("Command failed. Exit code, StdErr:", exc.returncode,
                  exc.output)
            sys.exit('Error produced by ssara_federated_query.py')
        else:
            if not 'Downloading DEM' in output:
                os.chdir('..')
                shutil.rmtree('DEM')
                sys.exit('Error in dem.py: Tiles are missing. Ocean???')
    else:
        dem_dir = os.getcwd()
        ssh_command_list = ['s.bgood', 'cd {0}'.format(cwd), command]
        host = os.getenv('DOWNLOADHOST')
        status = ssh_with_commands(host, ssh_command_list)
        print('status from ssh_with_commands:' + str(status))
        sys.stdout.flush()

    print('Done downloading dem.grd')
    sys.stdout.flush()
    grd_to_envi_and_vrt()
    grd_to_xml(cwd)
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs)

    config = putils.get_config_defaults(config_file='job_defaults.cfg')

    os.chdir(inps.work_dir)

    job_file_name = 'create_runfiles'
    job_name = job_file_name
    if inps.wall_time == 'None':
        inps.wall_time = config[job_file_name]['walltime']

    wait_seconds, new_wall_time = putils.add_pause_to_walltime(
        inps.wall_time, inps.wait_time)

    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:

        js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir,
                         new_wall_time)
        sys.exit(0)

    time.sleep(wait_seconds)

    if not iargs is None:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(iargs[:]))
    else:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::]))

    try:
        dem_file = glob.glob('DEM/*.wgs84')[0]
        inps.template['topsStack.demDir'] = dem_file
    except:
        raise SystemExit('DEM does not exist')

    inps.topsStack_template = pathObj.correct_for_isce_naming_convention(inps)
    runObj = CreateRun(inps)
    runObj.run_stack_workflow()

    run_file_list = putils.make_run_list(inps.work_dir)

    with open(inps.work_dir + '/run_files_list', 'w') as run_file:
        for item in run_file_list:
            run_file.writelines(item + '\n')

    if inps.template['topsStack.workflow'] in ['interferogram', 'slc']:
        runObj.run_post_stack()

    return None
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='download_rsmas')

    config = putils.get_config_defaults(config_file='job_defaults.cfg')

    if not iargs is None:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(iargs[:]))
    else:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::]))

    logfile_name = inps.work_dir + '/asfserial_rsmas.log'
    logger = RsmasLogger(file_name=logfile_name)

    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:
        job_file_name = 'download_asfserial_rsmas'
        job_name = inps.custom_template_file.split(os.sep)[-1].split('.')[0]
        work_dir = inps.work_dir

        if inps.wall_time == 'None':
            inps.wall_time = config['download_rsmas']['walltime']

        js.submit_script(job_name, job_file_name, sys.argv[:], work_dir,
                         inps.wall_time)

    os.chdir(inps.work_dir)

    if not inps.template['topsStack.slcDir'] is None:
        inps.slc_dir = inps.template['topsStack.slcDir']
    else:
        inps.slc_dir = os.path.join(inps.work_dir, 'SLC')

    project_slc_dir = os.path.join(inps.work_dir, 'SLC')

    os.chdir(inps.slc_dir)

    try:
        os.remove(os.path.expanduser('~') + '/.bulk_download_cookiejar.txt')
    except OSError:
        pass

    generate_files_csv(project_slc_dir, inps.custom_template_file)
    succesful = run_download_asf_serial(project_slc_dir, logger)
    change_file_permissions()
    logger.log(loglevel.INFO, "SUCCESS: %s", str(succesful))
    logger.log(loglevel.INFO, "------------------------------------")

    return None
Exemple #11
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='smallbaseline_wrapper')

    config = putils.get_config_defaults(config_file='job_defaults.cfg')

    job_file_name = 'smallbaseline_wrapper'
    job_name = job_file_name

    if inps.wall_time == 'None':
        inps.wall_time = config[job_file_name]['walltime']

    wait_seconds, new_wall_time = putils.add_pause_to_walltime(
        inps.wall_time, inps.wait_time)

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:

        js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir,
                         new_wall_time)
        sys.exit(0)

    if not iargs is None:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(iargs[:]))
    else:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::]))

    os.chdir(inps.work_dir)

    time.sleep(wait_seconds)

    try:
        with open('out_mintpy.o', 'w') as f:
            with contextlib.redirect_stdout(f):
                smallbaselineApp.main([inps.customTemplateFile])
    except:
        with open('out_mintpy.e', 'w') as g:
            with contextlib.redirect_stderr(g):
                smallbaselineApp.main([inps.customTemplateFile])

    inps.mintpy_dir = os.path.join(inps.work_dir, pathObj.mintpydir)
    putils.set_permission_dask_files(directory=inps.mintpy_dir)

    # Email Mintpy results
    if inps.email:
        email_results.main([inps.customTemplateFile])

    return None
def get_ssara_kml(download_dir, ssaraopt):
    """download the ssara kml file and generate a file listing of granules to be downloaded"""

    ssaraopt_kml = ['--kml --maxResults=20000' if x.startswith('--parallel') else x for x in ssaraopt]

    ssara_call = ['ssara_federated_query.py'] + ssaraopt_kml
    print('Get KML using:\n' + ' '.join(ssara_call))
    message_rsmas.log(download_dir, ' '.join(ssara_call))
    ssara_process = subprocess.run(' '.join(ssara_call), shell=True)

    return None
Exemple #13
0
def delete_files(inps, broken_list):
    """delete bad files"""
    inputdir = "".join(inps.inputdir)
    os.chdir(inputdir)
    for file in broken_list:
        real_path = os.path.realpath(file)
        if os.path.exists(real_path):
            os.remove(real_path)
            message_rsmas.log(
                os.getcwd(),
                os.path.basename(__file__) + ': deleting ' + real_path)
    return
Exemple #14
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='download_rsmas')

    config = putils.get_config_defaults(config_file='job_defaults.cfg')

    if not iargs is None:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(iargs[:]))
    else:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::]))

    logfile_name = inps.work_dir + '/ssara_rsmas.log'
    logger = RsmasLogger(file_name=logfile_name)

    if not inps.template['topsStack.slcDir'] is None:
        inps.slc_dir = inps.template['topsStack.slcDir']
    else:
        inps.slc_dir = os.path.join(inps.work_dir, 'SLC')

    project_slc_dir = os.path.join(inps.work_dir, 'SLC')
    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:
        job_file_name = 'download_ssara_rsmas'
        job_name = inps.custom_template_file.split(os.sep)[-1].split('.')[0]

        if inps.wall_time == 'None':
            inps.wall_time = config['download_rsmas']['walltime']

        js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir,
                         inps.wall_time)
        sys.exit(0)

    if not os.path.isdir(project_slc_dir):
        os.makedirs(project_slc_dir)
    os.chdir(inps.slc_dir)

    logger.log(loglevel.INFO, "DATASET: %s",
               str(inps.custom_template_file.split('/')[-1].split(".")[0]))
    logger.log(loglevel.INFO, "DATE: %s",
               datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f"))
    succesful = run_ssara(project_slc_dir, inps.custom_template_file,
                          inps.delta_lat, logger)
    logger.log(loglevel.INFO, "SUCCESS: %s", str(succesful))
    logger.log(loglevel.INFO, "------------------------------------")

    return None
Exemple #15
0
    def run_upload_data_products(self):
        """ upload data to jetstream server for data download
        """
        if self.template['upload_flag'] in ['True', True]:
            # upload_data_products.main([self.custom_template_file, '--mintpyProducts'])   # this is simpler, but how to put process into background?
            command = 'upload_data_products.py --mintpyProducts ' + self.custom_template_file + ' > out_upload_data_products.o 2> out_upload_data_products.e'
            message_rsmas.log(os.getcwd(), command)
            status = subprocess.Popen(command,
                                      stderr=subprocess.PIPE,
                                      stdout=subprocess.PIPE,
                                      shell=True)

        return
Exemple #16
0
    def run_stack_workflow(self):        # This part is for isceStack run_files

        message_rsmas.log(self.work_dir, 'stackSentinel.py' + ' ' + ' '.join(self.command_options))

        try:
            with open('out_stackSentinel.o', 'w') as f:
                with contextlib.redirect_stdout(f):
                    stackSentinel.main(self.command_options)
        except:
            with open('out_stackSentinel.e', 'w') as g:
                with contextlib.redirect_stderr(g):
                    stackSentinel.main(self.command_options)

        return
Exemple #17
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='minopy_wrapper')

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    time.sleep(putils.pause_seconds(inps.wait_time))

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_obj = JOB_SUBMIT(inps)
        job_name = 'minopy_wrapper'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)

    os.chdir(inps.work_dir)

    try:
        with open('out_minopy.o', 'w') as f:
            with contextlib.redirect_stdout(f):
                smallbaselineApp.main(
                    [inps.custom_template_file, '--dir', pathObj.mintpydir])
    except:
        with open('out_minopy.e', 'w') as g:
            with contextlib.redirect_stderr(g):
                smallbaselineApp.main(
                    [inps.custom_template_file, '--dir', pathObj.mintpydir])

    inps.mintpy_dir = os.path.join(inps.work_dir, pathObj.mintpydir)
    putils.set_permission_dask_files(directory=inps.mintpy_dir)

    # Email Minopy results
    if inps.email:
        email_results.main([inps.custom_template_file, '--minopy'])

    return None
Exemple #18
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='download_rsmas')

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    logfile_name = inps.work_dir + '/ssara_rsmas.log'
    logger = RsmasLogger(file_name=logfile_name)

    if not inps.template['topsStack.slcDir'] is None:
        inps.slc_dir = inps.template['topsStack.slcDir']
    else:
        inps.slc_dir = os.path.join(inps.work_dir, 'SLC')

    project_slc_dir = os.path.join(inps.work_dir, 'SLC')
    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:
        job_file_name = 'download_ssara_rsmas'
        job_name = inps.custom_template_file.split(os.sep)[-1].split('.')[0]
        job_obj = JOB_SUBMIT(inps)
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    if not os.path.isdir(project_slc_dir):
        os.makedirs(project_slc_dir)
    os.chdir(inps.slc_dir)

    logger.log(loglevel.INFO, "DATASET: %s",
               str(inps.custom_template_file.split('/')[-1].split(".")[0]))
    logger.log(loglevel.INFO, "DATE: %s",
               datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f"))
    succesful = run_ssara(project_slc_dir, inps.custom_template_file,
                          inps.delta_lat, logger)
    logger.log(loglevel.INFO, "SUCCESS: %s", str(succesful))
    logger.log(loglevel.INFO, "------------------------------------")

    return None
def main(iargs=None):

    start_time = time.time()

    inps = process_rsmas_cmd_line_parse(iargs)

    template_file = pathObj.auto_template

    # print default template
    if inps.print_template:
        raise SystemExit(open(template_file, 'r').read())

    inps = check_directories_and_inputs(inps)

    command_line = os.path.basename(sys.argv[0]) + ' ' + ' '.join(sys.argv[1:])
    message_rsmas.log(inps.work_dir, '##### NEW RUN #####')
    message_rsmas.log(inps.work_dir, command_line)

    config = putils.get_config_defaults(config_file='job_defaults.cfg')

    job_file_name = 'process_rsmas'
    if inps.wall_time == 'None':
        inps.wall_time = config[job_file_name]['walltime']

    wait_seconds, new_wall_time = putils.add_pause_to_walltime(
        inps.wall_time, inps.wait_time)

    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:
        job = js.submit_script(inps.project_name, job_file_name, sys.argv[:],
                               inps.work_dir, new_wall_time)
        # run_operations.py needs this print statement for now.
        # This is not for debugging purposes.
        # DO NOT REMOVE.
        print(job)

    else:
        time.sleep(wait_seconds)

        objInsar = RsmasInsar(inps)
        objInsar.run(steps=inps.runSteps)

    # Timing
    m, s = divmod(time.time() - start_time, 60)
    print('\nTotal time: {:02.0f} mins {:02.1f} secs'.format(m, s))
    return
Exemple #20
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='download_rsmas')

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    logfile_name = inps.work_dir + '/ssara_rsmas.log'
    logger = RsmasLogger(file_name=logfile_name)

    #import pdb; pdb.set_trace()
    if not inps.template[inps.prefix + 'Stack.slcDir'] is None:
        inps.download_dir = inps.template[inps.prefix + 'Stack.slcDir']

    if 'COSMO' in inps.template['ssaraopt.platform']:
        inps.download_dir = os.path.join(inps.work_dir, 'RAW_data')
    else:
        inps.download_dir = os.path.join(inps.work_dir, 'SLC')

    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:
        job_file_name = 'download_ssara_rsmas'
        job_name = inps.custom_template_file.split(os.sep)[-1].split('.')[0]
        job_obj = JOB_SUBMIT(inps)
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    if not os.path.isdir(inps.download_dir):
        os.makedirs(inps.download_dir)
    os.chdir(inps.download_dir)

    succesful = run_ssara(inps.download_dir, inps.custom_template_file,
                          inps.delta_lat, logger)

    return None
Exemple #21
0
def get_ssara_kml_and_listing(slc_dir, ssaraopt):
    """download the ssara kml file and generate a file listing of granules to be downloaded"""

    ssaraopt_kml = ['--kml' if x.startswith('--parallel') else x for x in ssaraopt]
    ssaraopt_print = ['--print' if x.startswith('--parallel') else x for x in ssaraopt]
    ssaraopt_print.append('>')
    ssaraopt_print.append(os.path.join(slc_dir, 'ssara_listing.txt'))

    ssara_call = ['ssara_federated_query.py'] + ssaraopt_kml
    print('Get KML using:\n' + ' '.join(ssara_call))
    message_rsmas.log(slc_dir, ' '.join(ssara_call))
    ssara_process = subprocess.run(' '.join(ssara_call), shell=True)
    ssara_call = ['ssara_federated_query.py'] + ssaraopt_print
    print('Get listing using:\n' + ' '.join(ssara_call))
    message_rsmas.log(slc_dir, ' '.join(ssara_call))
    ssara_process = subprocess.run(' '.join(ssara_call), shell=True)

    return None
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='smallbaseline_wrapper')

    time.sleep(putils.pause_seconds(inps.wait_time))

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_name = 'smallbaseline_wrapper'
        job_file_name = job_name
        js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir)
        sys.exit(0)

    if not iargs is None:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(iargs[:]))
    else:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::]))

    os.chdir(inps.work_dir)

    try:
        with open('out_mintpy.o', 'w') as f:
            with contextlib.redirect_stdout(f):
                smallbaselineApp.main([inps.custom_template_file])
    except:
        with open('out_mintpy.e', 'w') as g:
            with contextlib.redirect_stderr(g):
                smallbaselineApp.main([inps.custom_template_file])

    inps.mintpy_dir = os.path.join(inps.work_dir, pathObj.mintpydir)
    putils.set_permission_dask_files(directory=inps.mintpy_dir)

    # Email Mintpy results
    if inps.email:
        email_results.main([inps.custom_template_file, '--mintpy'])

    return None
Exemple #23
0
    def run_interferogram(self):
        """ Process images from unpacking to making interferograms using ISCE
        1. create run_files
        2. execute run_files
        """
        try:
            minsar.create_runfiles.main([self.custom_template_file])
        except:
            print('Skip creating run files ...')

        command = 'tropo_pyaps3.py --date-list SAFE_files.txt --dir $WEATHER_DIR >> /dev/null'
        message_rsmas.log(os.getcwd(), command)
        status = subprocess.Popen(command,
                                  stderr=subprocess.PIPE,
                                  stdout=subprocess.PIPE,
                                  shell=True)

        minsar.execute_runfiles.main([self.custom_template_file])
        return
def main(iargs=None):
    """ create template files for chunk processing """

    inps = putils.cmd_line_parse(iargs, script='generate_chunk_template_files')

    os.chdir(inps.work_dir)

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    run_generate_chunk_template_files(inps)

    return
def compare_ssara_listings(work_dir, ssaraopt_frame, ssaraopt_polygon):
    """download the ssara kml file and generate a file listing of granules to be downloaded"""

    ssaraopt_frame_kml = [
        '--kml' if x.startswith('--parallel') else x for x in ssaraopt_frame
    ]
    ssaraopt_frame_print = [
        '--print' if x.startswith('--parallel') else x for x in ssaraopt_frame
    ]
    ssaraopt_frame_print.append('>')
    ssaraopt_frame_print.append('ssara_listing_frame.txt')

    ssara_call = ['ssara_federated_query.py'] + ssaraopt_frame_kml
    message_rsmas.log(work_dir, ' '.join(ssara_call))
    ssara_process = subprocess.run(' '.join(ssara_call), shell=True)
    rename_latest_kml(suffix='frame')
    ssara_call = ['ssara_federated_query.py'] + ssaraopt_frame_print
    message_rsmas.log(work_dir, ' '.join(ssara_call))
    ssara_process = subprocess.run(' '.join(ssara_call), shell=True)

    ssaraopt_polygon_kml = [
        '--kml' if x.startswith('--parallel') else x for x in ssaraopt_polygon
    ]
    ssaraopt_polygon_print = [
        '--print' if x.startswith('--parallel') else x
        for x in ssaraopt_polygon
    ]
    ssaraopt_polygon_print.append('>')
    ssaraopt_polygon_print.append('ssara_listing_polygon.txt')

    ssara_call = ['ssara_federated_query.py'] + ssaraopt_polygon_kml
    message_rsmas.log(work_dir, ' '.join(ssara_call))
    ssara_process = subprocess.run(' '.join(ssara_call), shell=True)
    rename_latest_kml(suffix='polygon')
    ssara_call = ['ssara_federated_query.py'] + ssaraopt_polygon_print
    message_rsmas.log(work_dir, ' '.join(ssara_call))
    ssara_process = subprocess.run(' '.join(ssara_call), shell=True)

    with open('ssara_listing_frame.txt', 'r') as file0:
        with open('ssara_listing_polygon.txt', 'r') as file1:
            diff = difflib.unified_diff(
                file0.readlines(),
                file1.readlines(),
                fromfile='file0',
                tofile='file1',
            )
            print('\ndiff ssara_listing_frame.txt ssara_listing_polygon.txt:')
            for line in diff:
                sys.stdout.write(line)
            print(' ')
    return None
Exemple #26
0
def main(iargs=None):
    """Calculate horz and vert."""

    inps = command_line_parse(iargs)

    inps.project_name = get_project_name(inps.customTemplateFile)
    inps.work_dir = get_work_directory(None, inps.project_name)
    if not os.path.isdir(inps.work_dir):
        os.mkdir(inps.work_dir)
    os.chdir(inps.work_dir)

    command = os.path.basename(__file__) + ' ' + iargs[0]
    message_rsmas.log(command)

    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:
        job_file_name = 'download_rsmas'
        work_dir = os.getcwd()
        job_name = inps.customTemplateFile.split(os.sep)[-1].split('.')[0]
        wall_time = '24:00'

        js.submit_script(job_name, job_file_name, sys.argv[:], work_dir,
                         wall_time)
        sys.exit(0)

    inps = putils.create_or_update_template(inps)

    if not os.path.isdir(inps.work_dir):
        os.makedirs(inps.work_dir)

    import pdb
    pdb.set_trace()
    if bool(inps.template['horzvert']):
        generate_verthorz(inps.customTemplateFile)

    #if not os.path.isdir(slc_dir):
    #    os.makedirs(slc_dir)

    os.chdir(inps.work_dir)
Exemple #27
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='execute_runfiles')

    os.chdir(inps.work_dir)

    config = putils.get_config_defaults(config_file='job_defaults.cfg')

    job_file_name = 'execute_runfiles'
    job_name = job_file_name

    if inps.wall_time == 'None':
        inps.wall_time = config[job_file_name]['walltime']

    wait_seconds, new_wall_time = putils.add_pause_to_walltime(
        inps.wall_time, inps.wait_time)

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:

        js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir,
                         new_wall_time)
        sys.exit(0)

    time.sleep(wait_seconds)

    command_line = os.path.basename(sys.argv[0]) + ' ' + ' '.join(sys.argv[1:])
    message_rsmas.log(inps.work_dir, command_line)

    run_file_list = putils.read_run_list(inps.work_dir)

    if inps.endrun == 0:
        inps.endrun = len(run_file_list)

    if not inps.startrun == 0:
        inps.startrun = inps.startrun - 1

    run_file_list = run_file_list[inps.startrun:inps.endrun]

    if os.getenv('JOBSCHEDULER') == 'LSF' or os.getenv(
            'JOBSCHEDULER') == 'PBS':

        for item in run_file_list:
            step_name = '_'
            step_name = step_name.join(item.split('_')[3::])
            try:
                memorymax = config[step_name]['memory']
            except:
                memorymax = config['DEFAULT']['memory']

            try:
                if config[step_name]['adjust'] == 'True':
                    walltimelimit = putils.walltime_adjust(
                        inps, config[step_name]['walltime'])
                else:
                    walltimelimit = config[step_name]['walltime']
            except:
                walltimelimit = config['DEFAULT']['walltime']

            queuename = os.getenv('QUEUENAME')

            putils.remove_last_job_running_products(run_file=item)

            jobs = js.submit_batch_jobs(batch_file=item,
                                        out_dir=os.path.join(
                                            inps.work_dir, 'run_files'),
                                        work_dir=inps.work_dir,
                                        memory=memorymax,
                                        walltime=walltimelimit,
                                        queue=queuename)

            putils.remove_zero_size_or_length_error_files(run_file=item)
            putils.raise_exception_if_job_exited(run_file=item)
            putils.concatenate_error_files(run_file=item,
                                           work_dir=inps.work_dir)
            putils.move_out_job_files_to_stdout(run_file=item)

    else:
        for item in run_file_list:
            with open(item, 'r') as f:
                command_lines = f.readlines()
                for command_line in command_lines:
                    os.system(command_line)

    return None
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='ingest_insarmaps')

    config = putils.get_config_defaults(config_file='job_defaults.cfg')

    job_file_name = 'ingest_insarmaps'
    job_name = job_file_name

    if inps.wall_time == 'None':
        inps.wall_time = config[job_file_name]['walltime']

    wait_seconds, new_wall_time = putils.add_pause_to_walltime(inps.wall_time, inps.wait_time)

    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:
        js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir, new_wall_time)

    time.sleep(wait_seconds)

    os.chdir(inps.work_dir)

    if not iargs is None:
        message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:]))
    else:
        message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::]))

    hdfeos_file = glob.glob(inps.work_dir + '/mintpy/S1*.he5')
    hdfeos_file.append(glob.glob(inps.work_dir +'/mintpy/SUBSET_*/S1*.he5'))
    hdfeos_file = hdfeos_file[0]

    json_folder = inps.work_dir + '/mintpy/JSON'
    mbtiles_file = json_folder + '/' + os.path.splitext(os.path.basename(hdfeos_file))[0] + '.mbtiles'

    if os.path.isdir(json_folder):
        shutil.rmtree(json_folder)

    command1 = 'hdfeos5_2json_mbtiles.py ' + hdfeos_file + ' ' + json_folder + ' |& tee out_insarmaps.log'
    command2 = 'json_mbtiles2insarmaps.py -u ' + password.insaruser + ' -p ' + password.insarpass + ' --host ' + \
               'insarmaps.miami.edu -P rsmastest -U rsmas\@gmail.com --json_folder ' + \
               json_folder + ' --mbtiles_file ' + mbtiles_file + ' |& tee -a out_insarmaps.log'

    with open(inps.work_dir + '/mintpy/run_insarmaps', 'w') as f:
        f.write(command1 + '\n')
        f.write(command2 + '\n')

    out_file = 'out_insarmaps'
    message_rsmas.log(inps.work_dir, command1)
    command1 = '('+command1+' | tee '+out_file+'.o) 3>&1 1>&2 2>&3 | tee '+out_file+'.e'
    status = subprocess.Popen(command1, shell=True).wait()
    if status is not 0:
        raise Exception('ERROR in hdfeos5_2json_mbtiles.py')

    # TODO: Change subprocess call to get back error code and send error code to logger
    message_rsmas.log(inps.work_dir, command2)
    command2 = '('+command2+' | tee -a '+out_file+'.o) 3>&1 1>&2 2>&3 | tee -a '+out_file+'.e'
    status = subprocess.Popen(command2, shell=True).wait()
    if status is not 0:
        raise Exception('ERROR in json_mbtiles2insarmaps.py')

    # Email insarmaps results:
    if inps.email:
        email_results.main([inps.custom_template_file, '--insarmap'])

    return None
Exemple #29
0
def main(iargs=None):
    """Downloads data with ssara and asfserial scripts."""

    inps = putils.cmd_line_parse(iargs, script='download_rsmas')

    config = putils.get_config_defaults(config_file='job_defaults.cfg')

    job_file_name = 'download_rsmas'
    job_name = job_file_name

    if inps.wall_time == 'None':
        inps.wall_time = config[job_file_name]['walltime']

    wait_seconds, new_wall_time = putils.add_pause_to_walltime(inps.wall_time, inps.wait_time)

    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:
        js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir, new_wall_time)
        sys.exit(0)

    time.sleep(wait_seconds)

    if not iargs is None:
        message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:]))
    else:
        message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::]))

    if not inps.template['topsStack.slcDir'] is None:
        slc_dir = inps.template['topsStack.slcDir']
    else:
        slc_dir = os.path.join(inps.work_dir, 'SLC')

    if not os.path.isdir(inps.work_dir):
        os.makedirs(inps.work_dir)

    if not os.path.isdir(slc_dir):
        os.makedirs(slc_dir)

    # if satellite is not Sentinel (not tried yet)
    if 'SenDT' not in inps.project_name and 'SenAT' not in inps.project_name:

        ssara_call = ['ssara_federated_query.py'] + inps.ssaraopt + ['--print', '--download']
        ssara_process = subprocess.Popen(ssara_call, shell=True).wait()
        completion_status = ssara_process.poll()

        return

    download('ssara', inps.custom_template_file, slc_dir, outnum=1)
    download('asfserial', inps.custom_template_file, slc_dir, outnum = 1)

    for i_download in [2,3]:
        download_success = run_check_download(slc_dir = slc_dir)

        if not download_success:
           print('check_download.py: There were bad files, download again')
           message_rsmas.log(inps.work_dir,'check_download.py: there were bad files, download again')

           download('ssara', inps.custom_template_file, slc_dir, outnum = i_download)
           download('asfserial', inps.custom_template_file, slc_dir, outnum = i_download)
Exemple #30
0
def main(iargs=None):
    """ generates interferograms and coherence images in GeoTiff format """

    inps = putils.cmd_line_parse(iargs)

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    time.sleep(putils.pause_seconds(inps.wait_time))

    #########################################
    # Submit job
    #########################################

    if inps.submit_flag:
        job_obj = JOB_SUBMIT(inps)
        job_name = 'ifgramStack_to_ifgram_and_coherence'
        job_file_name = job_name
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    out_dir = inps.work_dir + '/' + pathObj.tiffdir
    if not os.path.isdir(out_dir):
        os.makedirs(out_dir)
    try:
        file = glob.glob(inps.work_dir + '/mintpy/inputs/ifgramStack.h5')[0]
    except:
        raise Exception('ERROR in ' + os.path.basename(__file__) +
                        ': file ifgramStack.h5 not found')

    # modify network so that only one connection left
    arg_string = file + ' --max-conn-num 1'
    print('modify_network.py', arg_string)
    mintpy.modify_network.main(arg_string.split())

    if not os.path.isdir(inps.work_dir + '/mintpy/geo'):
        os.makedirs(inps.work_dir + '/mintpy/geo')

    # geocode ifgramStack
    geo_file = os.path.dirname(
        os.path.dirname(file)) + '/geo/geo_' + os.path.basename(file)
    lookup_file = os.path.dirname(
        os.path.dirname(file)) + '/inputs/geometryRadar.h5'
    template_file = os.path.dirname(
        os.path.dirname(file)) + '/smallbaselineApp_template.txt'
    arg_string = file + ' -t ' + template_file + ' -l ' + lookup_file + ' -o ' + geo_file
    print('geocode.py', arg_string)
    mintpy.geocode.main(arg_string.split())

    # loop over all interferograms
    obj = ifgramStack(geo_file)
    obj.open()
    date12_list = obj.get_date12_list()
    # dummy_data, atr = readfile.read(geo_file)

    for i in range(len(date12_list)):
        date_str = date12_list[i]
        print('Working on ... ' + date_str)
        data_coh = readfile.read(file, datasetName='coherence-' + date_str)[0]
        data_unw = readfile.read(file,
                                 datasetName='unwrapPhase-' + date_str)[0]

        fname_coh = out_dir + '/coherence_' + date_str + '.tif'
        fname_unw = out_dir + '/interferogram_' + date_str + '.tif'

        create_geotiff(obj,
                       data=data_coh,
                       outfile=fname_coh,
                       type='coherence',
                       work_dir=inps.work_dir)
        create_geotiff(obj,
                       data=data_unw,
                       outfile=fname_unw,
                       type='interferogram',
                       work_dir=inps.work_dir)
    return