def generate_files_csv(slc_dir, custom_template_file):
    """ Generates a csv file of the files to download serially.
    Uses the `awk` command to generate a csv file containing the data files to be download
    serially. The output csv file is then sent through the `sed` command to remove the first five
    empty values to eliminate errors in download_ASF_serial.py.
    """

    dataset_template = Template(custom_template_file)
    dataset_template.options.update(
        PathFind.correct_for_ssara_date_format(dataset_template.options))
    ssaraopt = dataset_template.generate_ssaraopt_string()
    ssaraopt = ssaraopt.split(' ')

    # add intersectWith to ssaraopt string #FA 8/19: the delta_lat default value should come from a command_linr parse
    ssaraopt = add_polygon_to_ssaraopt(dataset_template.get_options(),
                                       ssaraopt.copy(),
                                       delta_lat=0.0)

    filecsv_options = ['ssara_federated_query.py'] + ssaraopt + [
        '--print', '|', 'awk', "'BEGIN{FS=\",\"; ORS=\",\"}{ print $14}'", '>',
        os.path.join(slc_dir, 'files.csv')
    ]

    csv_command = ' '.join(filecsv_options)
    message_rsmas.log(slc_dir, csv_command)
    subprocess.Popen(csv_command, shell=True).wait()
    # FA 8/2019: replaced new_files.csv by files.csv as infile argument
    sed_command = "sed 's/^.\{5\}//' " + os.path.join(slc_dir, 'files.csv') + \
                  ">" + os.path.join(slc_dir, 'new_files.csv')
    message_rsmas.log(slc_dir, sed_command)
    subprocess.Popen(sed_command, shell=True).wait()
Exemple #2
0
"""
   Author: Falk Amelung, Sara Mirzaee
"""
###############################################################################

import os
import sys
import glob
import subprocess
import h5py
from mintpy.utils import readfile
import minsar.utils.process_utilities as putils
from minsar.objects.auto_defaults import PathFind
from minsar.objects import message_rsmas

pathObj = PathFind()

###########################################################################################


def main(iargs=None):
    """ email mintpy or insarmaps results """

    inps = putils.cmd_line_parse(iargs, script='email_results')

    email_address = os.getenv('NOTIFICATIONEMAIL')

    if not iargs is None:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(iargs[:]))
Exemple #3
0
# Backwards compatibility for Python 2
from __future__ import print_function

import os
import sys
import shutil
import time
import argparse
import minsar
import minsar.workflow
from minsar.objects import message_rsmas
import minsar.utils.process_utilities as putils
import minsar.job_submission as js
from minsar.objects.auto_defaults import PathFind

pathObj = PathFind()
step_list, step_help = pathObj.process_rsmas_help()
###############################################################################

EXAMPLE = """example:
      process_rsmas.py  <custom_template_file>              # run with default and custom templates
      process_rsmas.py  <custom_template_file>  --submit    # submit as job
      process_rsmas.py  -h / --help                       # help
      process_rsmas.py  -H                                # print    default template options
      # Run with --start/stop/step options
      process_rsmas.py GalapagosSenDT128.template --step  download        # run the step 'download' only
      process_rsmas.py GalapagosSenDT128.template --start download        # start from the step 'download'
      process_rsmas.py GalapagosSenDT128.template --stop  ifgrams         # end after step 'interferogram'
    """

Exemple #4
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='download_rsmas')

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    logfile_name = inps.work_dir + '/asfserial_rsmas.log'
    global logger
    logger = RsmasLogger(file_name=logfile_name)

    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:
        job_file_name = 'download_asfserial_rsmas'
        job_name = inps.custom_template_file.split(os.sep)[-1].split('.')[0]
        job_obj = JOB_SUBMIT(inps)
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    os.chdir(inps.work_dir)

    if not inps.template[inps.prefix + 'Stack.slcDir'] is None:
        inps.slc_dir = inps.template[inps.prefix + 'Stack.slcDir']
    else:
        inps.slc_dir = os.path.join(inps.work_dir, 'SLC')

    global project_slc_dir
    project_slc_dir = os.path.join(inps.work_dir, 'SLC')

    if not os.path.exists(inps.slc_dir):
        os.mkdir(inps.slc_dir)

    os.chdir(inps.slc_dir)

    try:
        os.remove(os.path.expanduser('~') + '/.bulk_download_cookiejar.txt')
    except OSError:
        pass

    dataset_template = Template(inps.custom_template_file)
    dataset_template.options.update(
        PathFind.correct_for_ssara_date_format(dataset_template.options))
    subprocess.Popen("rm " + project_slc_dir + "/new_files*.csv",
                     shell=True).wait()
    seasonal_start_date = None
    seasonal_end_date = None

    try:
        if dataset_template.options[
                'seasonalStartDate'] is not None and dataset_template.options[
                    'seasonalEndDate'] is not None:
            seasonal_start_date = dataset_template.options['seasonalStartDate']
            seasonal_end_date = dataset_template.options['seasonalEndDate']
    except:
        pass

    if inps.seasonalStartDate is not None and inps.seasonalEndDate is not None:
        seasonal_start_date = inps.seasonalStartDate
        seasonal_end_date = inps.seasonalEndDate

    if seasonal_start_date is not None and seasonal_end_date is not None:
        generate_seasonal_files_csv(dataset_template, seasonal_start_date,
                                    seasonal_end_date)
    else:
        generate_files_csv(project_slc_dir, dataset_template)

    parallel = False

    try:
        if dataset_template.options['parallel'] == 'yes':
            parallel = True
    except:
        pass
    """if inps.parallel == 'yes':
        parallel = True"""

    threads = os.cpu_count()

    try:
        if dataset_template.options['threads'] is not None:
            threads = int(dataset_template.options['threads'])
    except:
        pass
    """if inps.processes is not None:
        processes = inps.processes"""

    if parallel:
        run_parallel_download_asf_serial(project_slc_dir, threads)
    else:
        succesful = run_download_asf_serial(project_slc_dir, logger)
        logger.log(loglevel.INFO, "SUCCESS: %s", str(succesful))

    change_file_permissions()
    logger.log(loglevel.INFO, "------------------------------------")
    subprocess.Popen("rm " + project_slc_dir + "/new_files*.csv",
                     shell=True).wait()

    return None
Exemple #5
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='download_rsmas')

    config = putils.get_config_defaults(config_file='job_defaults.cfg')

    if not iargs is None:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(iargs[:]))
    else:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::]))

    logfile_name = inps.work_dir + '/asfserial_rsmas.log'
    logger = RsmasLogger(file_name=logfile_name)

    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:
        job_file_name = 'download_asfserial_rsmas'
        job_name = inps.custom_template_file.split(os.sep)[-1].split('.')[0]
        work_dir = inps.work_dir

        if inps.wall_time == 'None':
            inps.wall_time = config['download_rsmas']['walltime']

        js.submit_script(job_name, job_file_name, sys.argv[:], work_dir,
                         inps.wall_time)

    os.chdir(inps.work_dir)

    if not inps.template['topsStack.slcDir'] is None:
        inps.slc_dir = inps.template['topsStack.slcDir']
    else:
        inps.slc_dir = os.path.join(inps.work_dir, 'SLC')

    project_slc_dir = os.path.join(inps.work_dir, 'SLC')

    os.chdir(inps.slc_dir)

    try:
        os.remove(os.path.expanduser('~') + '/.bulk_download_cookiejar.txt')
    except OSError:
        pass

    dataset_template = Template(inps.custom_template_file)
    dataset_template.options.update(
        PathFind.correct_for_ssara_date_format(dataset_template.options))
    subprocess.Popen("rm new_files.csv", shell=True).wait()
    standardTuple = (inps, dataset_template)
    if inps.seasonalStartDate is not None and inps.seasonalEndDate is not None:
        ogStartYearInt = int(
            dataset_template.options['ssaraopt.startDate'][:4])
        if int(inps.seasonalStartDate) > int(inps.seasonalEndDate):
            y = 1
        else:
            y = 0
        YearRange = int(dataset_template.options['ssaraopt.endDate']
                        [:4]) - ogStartYearInt + 1
        if YearRange > 1 and y == 1:
            YearRange = YearRange - 1
        seasonalStartDateAddOn = '-' + inps.seasonalStartDate[:
                                                              2] + '-' + inps.seasonalStartDate[
                                                                  2:]
        seasonalEndDateAddOn = '-' + inps.seasonalEndDate[:
                                                          2] + '-' + inps.seasonalEndDate[
                                                              2:]
        ogEndDate = dataset_template.options['ssaraopt.endDate']
        for x in range(YearRange):
            seasonalTuple = standardTuple + (x, ogStartYearInt, y, YearRange,
                                             seasonalStartDateAddOn,
                                             seasonalEndDateAddOn, ogEndDate)
            generate_files_csv(project_slc_dir, inps.custom_template_file,
                               seasonalTuple)
            y += 1
    else:
        generate_files_csv(project_slc_dir, inps.custom_template_file,
                           standardTuple)
    succesful = run_download_asf_serial(project_slc_dir, logger)
    change_file_permissions()
    logger.log(loglevel.INFO, "SUCCESS: %s", str(succesful))
    logger.log(loglevel.INFO, "------------------------------------")

    return None