Ejemplo n.º 1
0
def update_template_file(TEMP_FILE, custom_templateObj):
    """
    updates final template file in project directory based on custom template file
    :param TEMP_FILE: file to be updated
    :param custom_templateObj: custom template having extra or new options
    :return: updated file text
    """

    tempObj = Template(TEMP_FILE)

    update_status = False

    for key, value in custom_templateObj.options.items():
        if key not in tempObj.options or tempObj.options[key] != value:
            tempObj.options[key] = value
            update_status = True

    if update_status:
        print('Updating template file')
        fileText = '#####################################\n'
        for key, value in tempObj.options.items():
            fileText = fileText + "{:<38}".format(key) + "{:<15}".format(
                "= {}".format(value.strip("'"))) + '\n'

        with open(TEMP_FILE, 'w') as file:
            file.writelines(fileText)
    else:
        print('template file exists: {}, no updates'.format(TEMP_FILE))

    return
Ejemplo n.º 2
0
def create_default_template(temp_inps):
    """
    :param temp_inps: input parsed arguments
    :return Updated template file added to temp_inps.
    """

    inps = temp_inps

    inps.customTemplateFile = os.path.abspath(inps.customTemplateFile)

    inps.template_file = os.path.join(inps.work_dir, os.path.basename(inps.customTemplateFile))
    
    # read custom template from file
    custom_tempObj = Template(os.path.abspath(inps.customTemplateFile))

    # check for required options
    required_template_keys = pathObj.required_template_options

    for template_key in required_template_keys:
        if not template_key in custom_tempObj.options:
            raise Exception('ERROR: {0} is required'.format(template_key))

    # find default values from template_defaults.cfg to assign to default_tempObj
    default_tempObj = Template(pathObj.auto_template)
    config_template = get_config_defaults(config_file='template_defaults.cfg')
    for each_section in config_template.sections():
        for (each_key, each_val) in config_template.items(each_section):
            default_tempObj.options.update({each_key: os.path.expandvars(each_val.strip("'"))})

    inps.template = default_tempObj.options

    pathObj.set_isce_defaults(inps)

    # update default_temObj with custom_tempObj
    for key, value in custom_tempObj.options.items():
        if not value in [None, 'auto']:
            inps.template.update({key: os.path.expandvars(value.strip("'"))})

    if os.path.exists(inps.template_file):
        if not os.path.samefile(inps.customTemplateFile, inps.template_file):
            print('generate template file: {}'.format(inps.template_file))
            shutil.copyfile(inps.customTemplateFile, inps.template_file)
        else:
            print('template file exists: {}'.format(inps.template_file))
    else:
        print('generate template file: {}'.format(inps.template_file))
        shutil.copyfile(inps.customTemplateFile, inps.template_file)

    # updates tempDefault dictionary with the given templateObj adding new keys
    new_file = update_template_file(inps.template_file, custom_tempObj)
    with open(inps.template_file, 'w') as file:
        file.write(new_file)

    inps.cropbox = pathObj.grab_cropbox(inps)

    # build ssaraopt string from ssara options
    custom_tempObj.options.update(pathObj.correct_for_ssara_date_format(custom_tempObj.options))
    inps.ssaraopt = custom_tempObj.generate_ssaraopt_string()

    return inps
def generate_files_csv(slc_dir, custom_template_file):
    """ Generates a csv file of the files to download serially.
    Uses the `awk` command to generate a csv file containing the data files to be download
    serially. The output csv file is then sent through the `sed` command to remove the first five
    empty values to eliminate errors in download_ASF_serial.py.
    """

    dataset_template = Template(custom_template_file)
    dataset_template.options.update(
        PathFind.correct_for_ssara_date_format(dataset_template.options))
    ssaraopt = dataset_template.generate_ssaraopt_string()
    ssaraopt = ssaraopt.split(' ')

    # add intersectWith to ssaraopt string #FA 8/19: the delta_lat default value should come from a command_linr parse
    ssaraopt = add_polygon_to_ssaraopt(dataset_template.get_options(),
                                       ssaraopt.copy(),
                                       delta_lat=0.0)

    filecsv_options = ['ssara_federated_query.py'] + ssaraopt + [
        '--print', '|', 'awk', "'BEGIN{FS=\",\"; ORS=\",\"}{ print $14}'", '>',
        os.path.join(slc_dir, 'files.csv')
    ]

    csv_command = ' '.join(filecsv_options)
    message_rsmas.log(slc_dir, csv_command)
    subprocess.Popen(csv_command, shell=True).wait()
    # FA 8/2019: replaced new_files.csv by files.csv as infile argument
    sed_command = "sed 's/^.\{5\}//' " + os.path.join(slc_dir, 'files.csv') + \
                  ">" + os.path.join(slc_dir, 'new_files.csv')
    message_rsmas.log(slc_dir, sed_command)
    subprocess.Popen(sed_command, shell=True).wait()
Ejemplo n.º 4
0
def run_ssara(download_dir, template, delta_lat, logger, run_number=1):
    """ Runs ssara_federated_query-cj.py and checks for download issues.
        Runs ssara_federated_query-cj.py and checks continuously for whether the data download has hung without
        comleting or exited with an error code. If either of the above occur, the function is run again, for a
        maxiumum of 10 times.
        Parameters: run_number: int, the current iteration the wrapper is on (maxiumum 10 before quitting)
        Returns: status_cod: int, the status of the donwload (0 for failed, 1 for success)
    """

    # Compute SSARA options to use

    dataset_template = Template(template)
    dataset_template.options.update(pathObj.correct_for_ssara_date_format(dataset_template.options))

    ssaraopt = dataset_template.generate_ssaraopt_string()
    ssaraopt = ssaraopt.split(' ')

    # add intersectWith to ssaraopt string
    ssaraopt = add_polygon_to_ssaraopt(dataset_template.get_options(), ssaraopt.copy(), delta_lat)

    # get kml file and create listing
    get_ssara_kml(download_dir, ssaraopt=ssaraopt)

    # Runs ssara_federated_query.bash with proper options
    ssara_call = ['ssara_federated_query.bash'] + ssaraopt + ['--print', '--download']

    #FA 9/20: I could not figure out how to get the string into a bash shell variable, that is why writing a file
    #print( ' '.join(ssara_call) )

    with open('../ssara_command.txt', 'w') as f:
        f.write(' '.join(ssara_call) + '\n')

    return 
Ejemplo n.º 5
0
def create_default_template(temp_inps):
    """
    :param temp_inps: input parsed arguments
    :return Updated template file added to temp_inps.
    """
    inps = temp_inps

    inps.custom_template_file = os.path.abspath(inps.custom_template_file)

    inps.template_file = os.path.join(inps.work_dir, os.path.basename(inps.custom_template_file))

    # read custom template from file
    custom_tempObj = Template(inps.custom_template_file)

    if not 'acquisition_mode' in custom_tempObj.options:
        print('WARNING: "acquisition_mode" is not given --> default: tops   (available options: tops, stripmap)')
        inps.prefix = 'tops'
    else:
        inps.prefix = custom_tempObj.options['acquisition_mode']

    # check for required options
    required_template_keys = pathObj.required_template_options(inps.prefix)

    for template_key in required_template_keys:
        if template_key not in custom_tempObj.options:
            raise Exception('ERROR: {0} is required'.format(template_key))

    # find default values from minsar_template_defaults.cfg to assign to default_tempObj
    default_tempObj = Template(pathObj.auto_template)
    config_template = get_config_defaults(config_file='minsar_template_defaults.cfg')

    for each_section in config_template.sections():
        for (each_key, each_val) in config_template.items(each_section):
            status = (inps.prefix == 'tops' and each_key.startswith('stripmap')) or \
                     (inps.prefix == 'stripmap' and each_key.startswith('tops'))
            if status:
                default_tempObj.options.pop(each_key)
            else:
                default_tempObj.options.update({each_key: os.path.expandvars(each_val.strip("'"))})

    inps.template = default_tempObj.options
    pathObj.set_isce_defaults(inps)
    # update default_temObj with custom_tempObj
    for key, value in custom_tempObj.options.items():
        if value not in [None, 'auto']:
            inps.template.update({key: os.path.expandvars(value.strip("'"))})

    # update template file if necessary
    if not os.path.exists(inps.template_file):
        shutil.copyfile(inps.custom_template_file, inps.template_file)
    else:
        update_template_file(inps.template_file, custom_tempObj)

    inps.cropbox = pathObj.grab_cropbox(inps)

    # build ssaraopt string from ssara options
    custom_tempObj.options.update(pathObj.correct_for_ssara_date_format(custom_tempObj.options))
    inps.ssaraopt = custom_tempObj.generate_ssaraopt_string()
     
    return inps
Ejemplo n.º 6
0
def get_newest_data_date(template_file):
    """
    Obtains the most recent image date for a dataset
    :param template_file: the template file corresponding to the dataset being obtained
    :return: the newest image date in "YYYY-MM-DD T H:M:S.00000" format
    """

    delta_lat = 0.0  # 8/2019: this should use the same default as download_ssara_rsmas.py which I believe is set in
    # utils/process_utilities.py:    flag_parser.add_argument('--delta_lat', dest='delta_lat', default='0.0', type=float,

    dataset_template = Template(template_file)
    dataset_template.options.update(
        pathObj.correct_for_ssara_date_format(dataset_template.options))

    ssaraopt = dataset_template.generate_ssaraopt_string()
    ssaraopt = ssaraopt.split(' ')

    # add intersectWith to ssaraopt string
    ssaraopt = add_polygon_to_ssaraopt(dataset_template.get_options(),
                                       ssaraopt.copy(), delta_lat)

    ssaraopt_cmd = ['ssara_federated_query.py'] + ssaraopt + ['--print']
    ssaraopt_cmd = ' '.join(ssaraopt_cmd[:])

    print(ssaraopt_cmd)
    # Yield list of images in following format:
    # ASF,Sentinel-1A,15775,2017-03-20T11:49:56.000000,2017-03-20T11:50:25.000000,128,3592,3592,IW,NA,DESCENDING,R,VV+VH,https://datapool.asf.alaska.edu/SLC/SA/S1A_IW_SLC__1SDV_20170320T114956_20170320T115025_015775_019FA4_097A.zip
    ssara_output = subprocess.check_output(ssaraopt_cmd, shell=True)

    newest_data = ssara_output.decode('utf-8').split("\n")[-2]

    return datetime.strptime(newest_data.split(",")[3], DATE_FORMAT)
def run_ssara(work_dir, template, delta_lat):
    """ Runs ssara_federated_query.py and checks for differences
    """

    # Compute SSARA options to use

    dataset_template = Template(template)

    ssaraopt = dataset_template.generate_ssaraopt_string()
    ssaraopt = ssaraopt.split(' ')

    # add intersectWith to ssaraopt string
    ssaraopt_polygon = add_polygon_to_ssaraopt(dataset_template, ssaraopt.copy(), delta_lat)

    # get kml file and create listing
    compare_ssara_listings(work_dir, ssaraopt, ssaraopt_polygon)

    return 0
Ejemplo n.º 8
0
def auto_template_not_existing_options(args):

    job_options = ['QUEUENAME', 'CPUS_PER_NODE', 'THREADS_PER_CORE', 'MAX_JOBS_PER_WORKFLOW', 'MAX_JOBS_PER_QUEUE',
                   'WALLTIME_FACTOR', 'MEM_PER_NODE', 'job_submission_scheme']

    if hasattr(args, 'custom_template_file'):
        from minsar.objects.dataset_template import Template
        template = Template(args.custom_template_file).options

        for option in job_options:
            if not option in template:
                template[option] = 'auto'
    else:
        template = {}
        for option in job_options:
            template[option] = 'auto'

    return template
Ejemplo n.º 9
0
def update_template_file(TEMP_FILE, custom_templateObj):
    """
    updates final template file in project directory based on custom template file
    :param TEMP_FILE: file to be updated
    :param custom_templateObj: custom template having extra or new options
    :return: updated file text
    """

    # fileText = TEMP_FILE
    with open(TEMP_FILE, 'r') as file:
        fileText = file.read()

    tempObj = Template(TEMP_FILE)

    for key, value in custom_templateObj.options.items():
        if not key in tempObj.options:
            fileText = fileText + "{:<38}".format(key) + "{:<15}".format("= {}".format(value.strip("'"))) + '\n'

    return fileText
Ejemplo n.º 10
0
def run_ssara(slc_dir, template, delta_lat, logger, run_number=1):
    """ Runs ssara_federated_query-cj.py and checks for download issues.
        Runs ssara_federated_query-cj.py and checks continuously for whether the data download has hung without
        comleting or exited with an error code. If either of the above occur, the function is run again, for a
        maxiumum of 10 times.
        Parameters: run_number: int, the current iteration the wrapper is on (maxiumum 10 before quitting)
        Returns: status_cod: int, the status of the donwload (0 for failed, 1 for success)
    """

    logger.log(loglevel.INFO, "RUN NUMBER: %s", str(run_number))
    if run_number > 10:
        return 0

    logger.log(loglevel.INFO, "PASSED RUN NUMBER > 10")

    # Compute SSARA options to use

    dataset_template = Template(template)
    dataset_template.options.update(pathObj.correct_for_ssara_date_format(dataset_template.options))

    ssaraopt = dataset_template.generate_ssaraopt_string()
    ssaraopt = ssaraopt.split(' ')
    logger.log(loglevel.INFO, "GENERATED SSARAOPT STRING")

    # add intersectWith to ssaraopt string
    ssaraopt = add_polygon_to_ssaraopt(dataset_template.get_options(), ssaraopt.copy(), delta_lat)

    # get kml file and create listing
    get_ssara_kml_and_listing(slc_dir, ssaraopt=ssaraopt)

    # Runs ssara_federated_query-cj.py with proper options
    ssara_call = ['ssara_federated_query-cj.py'] + ssaraopt + ['--print', '--download']
    print('Download data using:\n' + ' '.join(ssara_call))
    message_rsmas.log(slc_dir, ' '.join(ssara_call))
    ssara_process = subprocess.Popen(' '.join(ssara_call), shell=True)

    logger.log(loglevel.INFO, "STARTED PROCESS")

    completion_status = ssara_process.poll()  # the completion status of the process
    hang_status = False  # whether or not the download has hung
    wait_time = 2  # 10 wait time in 'minutes' to determine hang status
    prev_size = -1  # initial download directory size
    i = 0  # index for waiting periods (for calculation of total time only)

    logger.log(loglevel.INFO, "INITIAL COMPLETION STATUS: %s", str(completion_status))

    # while the process has not completed
    while completion_status is None:

        i = i + 1

        # Computer the current download directory size
        curr_size = int(subprocess.check_output(['du', '-s', os.getcwd()]).split()[0].decode('utf-8'))

        # Compare the current and previous directory sizes to determine determine hang status
        if prev_size == curr_size:
            hang_status = True
            logger.log(loglevel.WARNING, "SSARA Hung")
            ssara_process.terminate()  # teminate the process beacause download hung
            break  # break the completion loop

        prev_size = curr_size  # store current size for comparison after waiting

        time.sleep(60 * wait_time)  # wait 'wait_time' minutes before continuing (checking for completion)
        completion_status = ssara_process.poll()
        logger.log(loglevel.INFO,
                   "{} minutes: {:.1f}GB, completion_status {}".format(i * wait_time, curr_size / 1024 / 1024,
                                                                       completion_status))

    exit_code = completion_status  # get the exit code of the command
    ssara_process.terminate()
    logger.log(loglevel.INFO, "EXIT CODE: %s", str(exit_code))

    bad_codes = [137, -9]

    # If the exit code is one that signifies an error, rerun the entire command
    if exit_code in bad_codes or hang_status:
        if exit_code in bad_codes:
            logger.log(loglevel.WARNING, "Exited with bad exit code, running again")
        if hang_status:
            logger.log(loglevel.WARNING, "Hanging, running again")

        run_ssara(slc_dir, template, delta_lat, logger, run_number=run_number + 1)

    return 0
Ejemplo n.º 11
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='download_rsmas')

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(
        inps.work_dir,
        os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    logfile_name = inps.work_dir + '/asfserial_rsmas.log'
    global logger
    logger = RsmasLogger(file_name=logfile_name)

    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:
        job_file_name = 'download_asfserial_rsmas'
        job_name = inps.custom_template_file.split(os.sep)[-1].split('.')[0]
        job_obj = JOB_SUBMIT(inps)
        if '--submit' in input_arguments:
            input_arguments.remove('--submit')
        command = [os.path.abspath(__file__)] + input_arguments
        job_obj.submit_script(job_name, job_file_name, command)
        sys.exit(0)

    os.chdir(inps.work_dir)

    if not inps.template[inps.prefix + 'Stack.slcDir'] is None:
        inps.slc_dir = inps.template[inps.prefix + 'Stack.slcDir']
    else:
        inps.slc_dir = os.path.join(inps.work_dir, 'SLC')

    global project_slc_dir
    project_slc_dir = os.path.join(inps.work_dir, 'SLC')

    if not os.path.exists(inps.slc_dir):
        os.mkdir(inps.slc_dir)

    os.chdir(inps.slc_dir)

    try:
        os.remove(os.path.expanduser('~') + '/.bulk_download_cookiejar.txt')
    except OSError:
        pass

    dataset_template = Template(inps.custom_template_file)
    dataset_template.options.update(
        PathFind.correct_for_ssara_date_format(dataset_template.options))
    subprocess.Popen("rm " + project_slc_dir + "/new_files*.csv",
                     shell=True).wait()
    seasonal_start_date = None
    seasonal_end_date = None

    try:
        if dataset_template.options[
                'seasonalStartDate'] is not None and dataset_template.options[
                    'seasonalEndDate'] is not None:
            seasonal_start_date = dataset_template.options['seasonalStartDate']
            seasonal_end_date = dataset_template.options['seasonalEndDate']
    except:
        pass

    if inps.seasonalStartDate is not None and inps.seasonalEndDate is not None:
        seasonal_start_date = inps.seasonalStartDate
        seasonal_end_date = inps.seasonalEndDate

    if seasonal_start_date is not None and seasonal_end_date is not None:
        generate_seasonal_files_csv(dataset_template, seasonal_start_date,
                                    seasonal_end_date)
    else:
        generate_files_csv(project_slc_dir, dataset_template)

    parallel = False

    try:
        if dataset_template.options['parallel'] == 'yes':
            parallel = True
    except:
        pass
    """if inps.parallel == 'yes':
        parallel = True"""

    threads = os.cpu_count()

    try:
        if dataset_template.options['threads'] is not None:
            threads = int(dataset_template.options['threads'])
    except:
        pass
    """if inps.processes is not None:
        processes = inps.processes"""

    if parallel:
        run_parallel_download_asf_serial(project_slc_dir, threads)
    else:
        succesful = run_download_asf_serial(project_slc_dir, logger)
        logger.log(loglevel.INFO, "SUCCESS: %s", str(succesful))

    change_file_permissions()
    logger.log(loglevel.INFO, "------------------------------------")
    subprocess.Popen("rm " + project_slc_dir + "/new_files*.csv",
                     shell=True).wait()

    return None
def run_generate_chunk_template_files(inps):
    """ create e*chunk*.template files """

    project_name = putils.get_project_name(inps.custom_template_file)

    location_name, sat_direction, sat_track = putils.split_project_name(
        project_name)

    location_name = location_name.split('Big')[0]

    chunk_templates_dir = inps.work_dir + '/chunk_templates'
    chunk_templates_dir_string = '$SCRATCHDIR/' + project_name + '/chunk_templates'
    os.makedirs(chunk_templates_dir, exist_ok=True)

    command_list = []
    sleep_time = 0

    command_options = ''
    if inps.start_step is None and inps.do_step is None:
        inps.start_step = 'jobfiles'

    if inps.do_step is not None:
        command_options = command_options + ' --dostep ' + inps.do_step
    else:
        if inps.start_step is not None:
            command_options = command_options + ' --start ' + inps.start_step
        if inps.end_step is not None:
            command_options = command_options + ' --end ' + inps.end_step

    prefix = 'tops'
    bbox_list = inps.template[prefix + 'Stack.boundingBox'].split(' ')

    bbox_list[0] = bbox_list[0].replace(
        "\'", ''
    )  # this does ["'-8.75", '-7.8', '115.0', "115.7'"] (needed for run_operations.py, run_operations
    bbox_list[1] = bbox_list[1].replace(
        "\'", ''
    )  # -->       ['-8.75',  '-7.8', '115.0', '115.7']  (should be modified so that this is not needed)
    bbox_list[2] = bbox_list[2].replace("\'", '')
    bbox_list[3] = bbox_list[3].replace("\'", '')

    tmp_min_lat = float(bbox_list[0])
    tmp_max_lat = float(bbox_list[1])

    min_lat = math.ceil(tmp_min_lat)
    max_lat = math.floor(tmp_max_lat)

    lat = min_lat

    chunk_number = 0
    chunk1_option = ''

    while lat < max_lat:
        tmp_min_lat = lat
        tmp_max_lat = lat + inps.lat_step

        chunk_name = [
            location_name + 'Chunk' + str(int(lat)) + sat_direction + sat_track
        ]
        chunk_template_file = chunk_templates_dir + '/' + chunk_name[
            0] + '.template'
        chunk_template_file_base = chunk_name[0] + '.template'
        shutil.copy(inps.custom_template_file, chunk_template_file)

        chunk_bbox_list = bbox_list
        chunk_bbox_list[0] = str(float(tmp_min_lat - inps.lat_margin))
        chunk_bbox_list[1] = str(float(tmp_max_lat + inps.lat_margin))
        print(chunk_name, tmp_min_lat, tmp_max_lat, chunk_bbox_list)

        custom_tempObj = Template(inps.custom_template_file)
        custom_tempObj.options['topsStack.boundingBox'] = ' '.join(
            chunk_bbox_list)

        slcDir = '$SCRATCHDIR/' + project_name + '/SLC'
        demDir = '$SCRATCHDIR/' + project_name + '/DEM'
        custom_tempObj.options['topsStack.slcDir'] = slcDir
        custom_tempObj.options['topsStack.demDir'] = demDir

        #if inps.download_flag in [ True , 'True']:
        #   del(custom_tempObj.options['topsStack.slcDir'])

        if 'download' in command_options:
            del (custom_tempObj.options['topsStack.slcDir'])

        putils.write_template_file(chunk_template_file, custom_tempObj)
        putils.beautify_template_file(chunk_template_file)

        chunk_number = chunk_number + 1
        if chunk_number > 1 and inps.bash_script == 'minsarApp.bash':
            chunk1_option = ' --no_download_ECMWF '

        command = inps.bash_script + ' ' + chunk_templates_dir_string + '/' + chunk_template_file_base + command_options + chunk1_option + ' --sleep ' + str(
            sleep_time) + ' &'

        command_list.append(command)

        lat = lat + inps.lat_step
        sleep_time = sleep_time + inps.wait_time
        chunk1_option = ''

    commands_file = inps.work_dir + '/minsar_commands.txt'
    f = open(commands_file, "w")

    print()
    for item in command_list:
        print(item)
        f.write(item + '\n')

    print()
    f.write('\n')

    return
def run_generate_chunk_template_files(inps):
    """ create e*chunk*.template files """

    project_name = putils.get_project_name(inps.custom_template_file)

    location_name, sat_direction, sat_track = putils.split_project_name(
        project_name)

    chunk_templates_dir = inps.work_dir + '/chunk_templates'
    os.makedirs(chunk_templates_dir, exist_ok=True)

    commands_file = inps.work_dir + '/minsar_commands.txt'
    f = open(commands_file, "w")

    if inps.download_flag == True:
        minsarApp_option = '--start download'
    else:
        minsarApp_option = '--start dem'

    prefix = 'tops'
    bbox_list = inps.template[prefix + 'Stack.boundingBox'].split(' ')

    bbox_list[0] = bbox_list[0].replace(
        "\'", ''
    )  # this does ["'-8.75", '-7.8', '115.0', "115.7'"] (needed for run_operations.py, run_operations
    bbox_list[1] = bbox_list[1].replace(
        "\'", ''
    )  # -->       ['-8.75',  '-7.8', '115.0', '115.7']  (should be modified so that this is not needed)
    bbox_list[2] = bbox_list[2].replace("\'", '')
    bbox_list[3] = bbox_list[3].replace("\'", '')

    tmp_min_lat = float(bbox_list[0])
    tmp_max_lat = float(bbox_list[1])

    min_lat = math.ceil(tmp_min_lat)
    max_lat = math.floor(tmp_max_lat)

    lat = min_lat
    while lat < max_lat:
        tmp_min_lat = lat
        tmp_max_lat = lat + inps.lat_step

        chunk_name = [
            location_name + 'Chunk' + str(int(lat)) + sat_direction + sat_track
        ]
        chunk_template_file = chunk_templates_dir + '/' + chunk_name[
            0] + '.template'
        shutil.copy(inps.custom_template_file, chunk_template_file)

        chunk_bbox_list = bbox_list
        chunk_bbox_list[0] = str(float(tmp_min_lat - inps.lat_margin))
        chunk_bbox_list[1] = str(float(tmp_max_lat + inps.lat_margin))
        print(chunk_name, tmp_min_lat, tmp_max_lat, chunk_bbox_list)

        custom_tempObj = Template(inps.custom_template_file)
        custom_tempObj.options['topsStack.boundingBox'] = ' '.join(
            chunk_bbox_list)

        if inps.download_flag in [True, 'True']:
            del (custom_tempObj.options['topsStack.slcDir'])

        putils.write_template_file(chunk_template_file, custom_tempObj)
        putils.beautify_template_file(chunk_template_file)

        minsar_command = 'minsarApp.bash ' + chunk_template_file + ' ' + minsarApp_option

        f.write(minsar_command + '\n')

        lat = lat + inps.lat_step

    return
Ejemplo n.º 14
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='download_rsmas')

    config = putils.get_config_defaults(config_file='job_defaults.cfg')

    if not iargs is None:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(iargs[:]))
    else:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::]))

    logfile_name = inps.work_dir + '/asfserial_rsmas.log'
    logger = RsmasLogger(file_name=logfile_name)

    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:
        job_file_name = 'download_asfserial_rsmas'
        job_name = inps.custom_template_file.split(os.sep)[-1].split('.')[0]
        work_dir = inps.work_dir

        if inps.wall_time == 'None':
            inps.wall_time = config['download_rsmas']['walltime']

        js.submit_script(job_name, job_file_name, sys.argv[:], work_dir,
                         inps.wall_time)

    os.chdir(inps.work_dir)

    if not inps.template['topsStack.slcDir'] is None:
        inps.slc_dir = inps.template['topsStack.slcDir']
    else:
        inps.slc_dir = os.path.join(inps.work_dir, 'SLC')

    project_slc_dir = os.path.join(inps.work_dir, 'SLC')

    os.chdir(inps.slc_dir)

    try:
        os.remove(os.path.expanduser('~') + '/.bulk_download_cookiejar.txt')
    except OSError:
        pass

    dataset_template = Template(inps.custom_template_file)
    dataset_template.options.update(
        PathFind.correct_for_ssara_date_format(dataset_template.options))
    subprocess.Popen("rm new_files.csv", shell=True).wait()
    standardTuple = (inps, dataset_template)
    if inps.seasonalStartDate is not None and inps.seasonalEndDate is not None:
        ogStartYearInt = int(
            dataset_template.options['ssaraopt.startDate'][:4])
        if int(inps.seasonalStartDate) > int(inps.seasonalEndDate):
            y = 1
        else:
            y = 0
        YearRange = int(dataset_template.options['ssaraopt.endDate']
                        [:4]) - ogStartYearInt + 1
        if YearRange > 1 and y == 1:
            YearRange = YearRange - 1
        seasonalStartDateAddOn = '-' + inps.seasonalStartDate[:
                                                              2] + '-' + inps.seasonalStartDate[
                                                                  2:]
        seasonalEndDateAddOn = '-' + inps.seasonalEndDate[:
                                                          2] + '-' + inps.seasonalEndDate[
                                                              2:]
        ogEndDate = dataset_template.options['ssaraopt.endDate']
        for x in range(YearRange):
            seasonalTuple = standardTuple + (x, ogStartYearInt, y, YearRange,
                                             seasonalStartDateAddOn,
                                             seasonalEndDateAddOn, ogEndDate)
            generate_files_csv(project_slc_dir, inps.custom_template_file,
                               seasonalTuple)
            y += 1
    else:
        generate_files_csv(project_slc_dir, inps.custom_template_file,
                           standardTuple)
    succesful = run_download_asf_serial(project_slc_dir, logger)
    change_file_permissions()
    logger.log(loglevel.INFO, "SUCCESS: %s", str(succesful))
    logger.log(loglevel.INFO, "------------------------------------")

    return None