示例#1
0
def get_datasets_to_process(template_files, dataset=None):
    """
    Extracts project names from the list of tenplate files
    :param template_files: the list of template files to parse the dataset name from
    :param dataset: a single dataset to return
    :return: a list of dataset names
    """
    # Obtains list of datasets to run processSentinel on
    if dataset:
        datasets = [dataset]
    else:
        datasets = [putils.get_project_name(template) for template in template_files]

    return datasets
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='upload_data_products')

    if inps.image_products_flag:
       inps.mintpy_products_flag = False
    
    os.chdir(inps.work_dir)

    if not iargs is None:
        input_arguments = iargs
    else:
        input_arguments = sys.argv[1::]

    message_rsmas.log(inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(input_arguments))

    # get DATA_SERVER and return if it does not exist

    DATA_SERVER = '[email protected]'

    #try:
    #    DATA_SERVER = os.getenv('DATA_SERVER')
    #except:
    #    return

    project_name = putils.get_project_name(inps.custom_template_file)

    if inps.mintpy_products_flag:

        REMOTE_DIR = '/data/HDF5EOS/'
        destination = DATA_SERVER + ':' + REMOTE_DIR

        scp_list = [
                '/mintpy/pic',
                '/mintpy/*.he5',
                '/mintpy/inputs',
                '/remora_*'
                ]
        
        if inps.mintpy_products_all_flag:
            scp_list = [ '/mintpy' ]

        command = 'ssh ' + DATA_SERVER + ' mkdir -p ' + REMOTE_DIR + project_name + '/mintpy'
        print (command)
        status = subprocess.Popen(command, shell=True).wait()
        if status is not 0:
             raise Exception('ERROR in upload_data_products.py')

        for pattern in scp_list:
            if ( len(glob.glob(inps.work_dir + '/' + pattern)) >= 1 ):
                command = 'scp -r ' + inps.work_dir + pattern + ' ' + destination + project_name + '/'.join(pattern.split('/')[0:-1])
                print (command)
                status = subprocess.Popen(command, shell=True).wait()
                if status is not 0:
                    raise Exception('ERROR in upload_data_products.py')

                print ('\nAdjusting permissions:')
                command = 'ssh ' + DATA_SERVER + ' chmod -R u=rwX,go=rX ' + REMOTE_DIR + project_name 
                print (command)
                status = subprocess.Popen(command, shell=True).wait()
                if status is not 0:
                    raise Exception('ERROR in upload_data_products.py')

    if inps.image_products_flag:
        REMOTE_DIR = '/data/image_products/'
        destination = DATA_SERVER + ':' + REMOTE_DIR

        rsync_list = [
                '/image_products/*',
                ]

        command = 'ssh ' + DATA_SERVER + ' mkdir -p ' + REMOTE_DIR + project_name
        print (command)
        status = subprocess.Popen(command, shell=True).wait()
        if status is not 0:
             raise Exception('ERROR in upload_data_products.py')


        for pattern in rsync_list:
            command = 'rsync -avuz -e ssh --chmod=Du=rwx,Dg=rx,Do=rx,Fu=rw,Fg=r,Fo=r ' + inps.work_dir + pattern + ' ' + destination + project_name + '/'.join(pattern.split('/')[0:-1])
            print (command)
            status = subprocess.Popen(command, shell=True).wait()
            if status is not 0:
                raise Exception('ERROR in upload_data_products.py')

        return None

    return None
    polygon = '--intersectsWith=\'Polygon(({:.2f} {:.2f}, {:.2f} {:.2f}, {:.2f} {:.2f}, {:.2f} {:.2f}, {:.2f} {:.2f}))\''.format(
        min_lon, min_lat, min_lon, max_lat, max_lon, max_lat, max_lon, min_lat,
        min_lon, min_lat)

    # add --polygon and remove --frame option
    ssaraopt.insert(2, polygon)
    ssaraopt = [x for x in ssaraopt if not x[0:7] == '--frame']

    return ssaraopt


if __name__ == "__main__":
    inps = command_line_parse(sys.argv[1:])

    inps.project_name = putils.get_project_name(
        custom_template_file=inps.template)
    inps.work_dir = putils.get_work_directory(None, inps.project_name)
    inps.slc_dir = inps.work_dir + "/SLC"

    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:
        job_file_name = 'download_ssara_rsmas'
        job_name = inps.template.split(os.sep)[-1].split('.')[0]
        work_dir = inps.work_dir
        wall_time = '24:00'

        js.submit_script(job_name, job_file_name, sys.argv[:], work_dir,
                         wall_time)
        sys.exit(0)
示例#4
0
def run_operations(args):
    """
    Runs the entire data processing routing from start to finish. Steps as follows:
        1. Generates the template files for all of the datasets in the provivded CSV or Google Sheet file
        2. Gets the dataset names for each template files
        for each dataset:
            3. Gets the newest available image date from `ssara_federated_query.py`
            4. Gets the last image date downloaded from the `stored_date.date` file
            5. Runs `process_rsmas.py` if their is new data available
        6. Waits for all of the output files from submitted `process_rsmas` calls to exist
    :param args: command line arguments to use
    """
    inps = command_line_parse(args)

    # Remove and reinitiate $OPERATIONS directory
    if inps.restart:
        shutil.rmtree(OPERATIONS_DIRECTORY)

    initiate_operations()

    template_files = []

    # inps.sheet_ids is an array of sheets.
    # Each use of the --sheet_id command line parameter adds another array to the inps.sheet_id variable.
    for sheet in inps.sheet_ids:
        template_files += generate_templates_with_options(
            inps.template_csv, inps.dataset, sheet[0])

    datasets = get_datasets_to_process(template_files, inps.dataset)

    logger_run_operations.log(loglevel.INFO,
                              "Datasets to Process: {}".format(datasets))

    output_files = []
    job_to_dset = {}

    for dset in datasets:

        template_file = "{}/{}.template".format(TEMPLATE_DIRECTORY, dset)

        logger_run_operations.log(loglevel.INFO,
                                  "{}: {}".format(dset, template_file))

        newest_date = get_newest_data_date(template_file)
        last_date = get_last_downloaded_date(dset)

        print(newest_date)
        print(last_date)

        if newest_date > last_date:
            print("Submitting minsar_wrapper.bash for {}".format(
                putils.get_project_name(template_file)))
            subprocess.Popen(["minsar_wrapper.bash", template_file],
                             stdout=subprocess.DEVNULL,
                             stderr=subprocess.STDOUT)
        else:
            print("SKIPPING")
    print(
        "-------------- run_operations.py has completed. Exiting now. -------------- \n\n\n\n\n\n\n"
    )

    sys.exit(0)
    return
def run_generate_chunk_template_files(inps):
    """ create e*chunk*.template files """

    project_name = putils.get_project_name(inps.custom_template_file)

    location_name, sat_direction, sat_track = putils.split_project_name(
        project_name)

    location_name = location_name.split('Big')[0]

    chunk_templates_dir = inps.work_dir + '/chunk_templates'
    chunk_templates_dir_string = '$SCRATCHDIR/' + project_name + '/chunk_templates'
    os.makedirs(chunk_templates_dir, exist_ok=True)

    command_list = []
    sleep_time = 0

    command_options = ''
    if inps.start_step is None and inps.do_step is None:
        inps.start_step = 'jobfiles'

    if inps.do_step is not None:
        command_options = command_options + ' --dostep ' + inps.do_step
    else:
        if inps.start_step is not None:
            command_options = command_options + ' --start ' + inps.start_step
        if inps.end_step is not None:
            command_options = command_options + ' --end ' + inps.end_step

    prefix = 'tops'
    bbox_list = inps.template[prefix + 'Stack.boundingBox'].split(' ')

    bbox_list[0] = bbox_list[0].replace(
        "\'", ''
    )  # this does ["'-8.75", '-7.8', '115.0', "115.7'"] (needed for run_operations.py, run_operations
    bbox_list[1] = bbox_list[1].replace(
        "\'", ''
    )  # -->       ['-8.75',  '-7.8', '115.0', '115.7']  (should be modified so that this is not needed)
    bbox_list[2] = bbox_list[2].replace("\'", '')
    bbox_list[3] = bbox_list[3].replace("\'", '')

    tmp_min_lat = float(bbox_list[0])
    tmp_max_lat = float(bbox_list[1])

    min_lat = math.ceil(tmp_min_lat)
    max_lat = math.floor(tmp_max_lat)

    lat = min_lat

    chunk_number = 0
    chunk1_option = ''

    while lat < max_lat:
        tmp_min_lat = lat
        tmp_max_lat = lat + inps.lat_step

        chunk_name = [
            location_name + 'Chunk' + str(int(lat)) + sat_direction + sat_track
        ]
        chunk_template_file = chunk_templates_dir + '/' + chunk_name[
            0] + '.template'
        chunk_template_file_base = chunk_name[0] + '.template'
        shutil.copy(inps.custom_template_file, chunk_template_file)

        chunk_bbox_list = bbox_list
        chunk_bbox_list[0] = str(float(tmp_min_lat - inps.lat_margin))
        chunk_bbox_list[1] = str(float(tmp_max_lat + inps.lat_margin))
        print(chunk_name, tmp_min_lat, tmp_max_lat, chunk_bbox_list)

        custom_tempObj = Template(inps.custom_template_file)
        custom_tempObj.options['topsStack.boundingBox'] = ' '.join(
            chunk_bbox_list)

        slcDir = '$SCRATCHDIR/' + project_name + '/SLC'
        demDir = '$SCRATCHDIR/' + project_name + '/DEM'
        custom_tempObj.options['topsStack.slcDir'] = slcDir
        custom_tempObj.options['topsStack.demDir'] = demDir

        #if inps.download_flag in [ True , 'True']:
        #   del(custom_tempObj.options['topsStack.slcDir'])

        if 'download' in command_options:
            del (custom_tempObj.options['topsStack.slcDir'])

        putils.write_template_file(chunk_template_file, custom_tempObj)
        putils.beautify_template_file(chunk_template_file)

        chunk_number = chunk_number + 1
        if chunk_number > 1 and inps.bash_script == 'minsarApp.bash':
            chunk1_option = ' --no_download_ECMWF '

        command = inps.bash_script + ' ' + chunk_templates_dir_string + '/' + chunk_template_file_base + command_options + chunk1_option + ' --sleep ' + str(
            sleep_time) + ' &'

        command_list.append(command)

        lat = lat + inps.lat_step
        sleep_time = sleep_time + inps.wait_time
        chunk1_option = ''

    commands_file = inps.work_dir + '/minsar_commands.txt'
    f = open(commands_file, "w")

    print()
    for item in command_list:
        print(item)
        f.write(item + '\n')

    print()
    f.write('\n')

    return
示例#6
0
def main(iargs=None):

    inps = putils.cmd_line_parse(iargs, script='upload_data_products')

    os.chdir(inps.work_dir)

    if not iargs is None:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(iargs[:]))
    else:
        message_rsmas.log(
            inps.work_dir,
            os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::]))

    # get DATA_SERVER and return if it does not exist

    DATA_SERVER = '[email protected]'

    #try:
    #    DATA_SERVER = os.getenv('DATA_SERVER')
    #except:
    #    return

    project_name = putils.get_project_name(inps.custom_template_file)

    if inps.flag_mintpy_products:

        REMOTE_DIR = '/data/HDF5EOS/'
        destination = DATA_SERVER + ':' + REMOTE_DIR

        rsync_list = ['/mintpy/inputs', '/mintpy/pic', '/mintpy/*.he5']

        command = 'ssh ' + DATA_SERVER + ' mkdir -p ' + REMOTE_DIR + project_name
        print(command)
        status = subprocess.Popen(command, shell=True).wait()
        if status is not 0:
            raise Exception('ERROR in upload_data_products.py')

        for pattern in rsync_list:
            command = 'rsync -avuz -e ssh --chmod=Du=rwx,Dg=rx,Do=rx,Fu=rw,Fg=r,Fo=r ' + inps.work_dir + pattern + ' ' + destination + project_name + '/'.join(
                pattern.split('/')[0:-1])
            print(command)
            status = subprocess.Popen(command, shell=True).wait()
            if status is not 0:
                raise Exception('ERROR in upload_data_products.py')

        # temporary rsync of full mintpy folder
        # text for test PR 2
        command = 'rsync -avuz -e ssh --chmod=Du=rwx,Dg=rx,Do=rx,Fu=rw,Fg=r,Fo=r ' + inps.work_dir + '/mintpy ' + destination + project_name + '/full_mintpy'
        print(command)
        status = subprocess.Popen(command, shell=True).wait()
        if status is not 0:
            raise Exception('ERROR in upload_data_products.py')
        return None

    if inps.flag_image_products:
        REMOTE_DIR = '/data/image_products/'
        destination = DATA_SERVER + ':' + REMOTE_DIR

        rsync_list = [
            '/image_products/*',
        ]

        command = 'ssh ' + DATA_SERVER + ' mkdir -p ' + REMOTE_DIR + project_name
        print(command)
        status = subprocess.Popen(command, shell=True).wait()
        if status is not 0:
            raise Exception('ERROR in upload_data_products.py')

        for pattern in rsync_list:
            command = 'rsync -avuz -e ssh --chmod=Du=rwx,Dg=rx,Do=rx,Fu=rw,Fg=r,Fo=r ' + inps.work_dir + pattern + ' ' + destination + project_name + '/'.join(
                pattern.split('/')[0:-1])
            print(command)
            status = subprocess.Popen(command, shell=True).wait()
            if status is not 0:
                raise Exception('ERROR in upload_data_products.py')

        return None

    return None
def run_generate_chunk_template_files(inps):
    """ create e*chunk*.template files """

    project_name = putils.get_project_name(inps.custom_template_file)

    location_name, sat_direction, sat_track = putils.split_project_name(
        project_name)

    chunk_templates_dir = inps.work_dir + '/chunk_templates'
    os.makedirs(chunk_templates_dir, exist_ok=True)

    commands_file = inps.work_dir + '/minsar_commands.txt'
    f = open(commands_file, "w")

    if inps.download_flag == True:
        minsarApp_option = '--start download'
    else:
        minsarApp_option = '--start dem'

    prefix = 'tops'
    bbox_list = inps.template[prefix + 'Stack.boundingBox'].split(' ')

    bbox_list[0] = bbox_list[0].replace(
        "\'", ''
    )  # this does ["'-8.75", '-7.8', '115.0', "115.7'"] (needed for run_operations.py, run_operations
    bbox_list[1] = bbox_list[1].replace(
        "\'", ''
    )  # -->       ['-8.75',  '-7.8', '115.0', '115.7']  (should be modified so that this is not needed)
    bbox_list[2] = bbox_list[2].replace("\'", '')
    bbox_list[3] = bbox_list[3].replace("\'", '')

    tmp_min_lat = float(bbox_list[0])
    tmp_max_lat = float(bbox_list[1])

    min_lat = math.ceil(tmp_min_lat)
    max_lat = math.floor(tmp_max_lat)

    lat = min_lat
    while lat < max_lat:
        tmp_min_lat = lat
        tmp_max_lat = lat + inps.lat_step

        chunk_name = [
            location_name + 'Chunk' + str(int(lat)) + sat_direction + sat_track
        ]
        chunk_template_file = chunk_templates_dir + '/' + chunk_name[
            0] + '.template'
        shutil.copy(inps.custom_template_file, chunk_template_file)

        chunk_bbox_list = bbox_list
        chunk_bbox_list[0] = str(float(tmp_min_lat - inps.lat_margin))
        chunk_bbox_list[1] = str(float(tmp_max_lat + inps.lat_margin))
        print(chunk_name, tmp_min_lat, tmp_max_lat, chunk_bbox_list)

        custom_tempObj = Template(inps.custom_template_file)
        custom_tempObj.options['topsStack.boundingBox'] = ' '.join(
            chunk_bbox_list)

        if inps.download_flag in [True, 'True']:
            del (custom_tempObj.options['topsStack.slcDir'])

        putils.write_template_file(chunk_template_file, custom_tempObj)
        putils.beautify_template_file(chunk_template_file)

        minsar_command = 'minsarApp.bash ' + chunk_template_file + ' ' + minsarApp_option

        f.write(minsar_command + '\n')

        lat = lat + inps.lat_step

    return