Exemplo n.º 1
0
def process_runfiles(inps):
    """ Calls the script to execute stackSentinel runfiles."""

    if inps.flag_process:
        command = 'execute_stacksentinel_run_files.py ' + inps.custom_template_file
        messageRsmas.log(command)
        #Check the performance, change in subprocess
        status = subprocess.Popen(command, shell=True).wait()
        if status is not 0:
            logger.log(loglevel.ERROR,
                       'ERROR in execute_stacksentinel_run_files.py')
            raise Exception('ERROR in execute_stacksentinel_run_files.py')

        if os.path.isdir('PYSAR'):
            shutil.rmtree('PYSAR')

        if int(inps.custom_template['cleanopt']) >= 1:
            cleanlist = clean_list()
            _remove_directories(cleanlist[1])

    if inps.stopprocess:
        logger.log(loglevel.INFO, 'Exit as planned after processing')
        sys.exit(0)

    return inps
Exemplo n.º 2
0
def process_time_series(inps):
    """ runs time series analysis based on squeesar."""

    if inps.flag_pysar:

        if not os.path.isdir(inps.workingDir + '/run_files_SQ'):
            create_squeesar_runfiles(inps)

        command = 'execute_squeesar_run_files.py ' + inps.custom_template_file
        messageRsmas.log(command)
        # Check the performance, change in subprocess
        status = subprocess.Popen(command, shell=True).wait()
        if status is not 0:
            logger.log(loglevel.ERROR,
                       'ERROR in execute_squeesar_run_files.py')
            raise Exception('ERROR in execute_squeesar_run_files.py')

        if int(inps.custom_template['cleanopt']) >= 1:
            cleanlist = clean_list()
            _remove_directories(cleanlist[1])

    if inps.stoppysar:
        logger.log(loglevel.DEBUG,
                   'Exit as planned after time series analysis')
        sys.exit(0)

    return
Exemplo n.º 3
0
def create_squeesar_runfiles(inps):
    os.chdir(inps.work_dir)
    command = 'create_squeesar_run_files.py ' + inps.custom_template_file
    messageRsmas.log(command)
    # Check the performance, change in subprocess
    status = subprocess.Popen(command, shell=True).wait()
    if status is not 0:
        logger.log(loglevel.ERROR, 'ERROR in create_squeesar_run_files.py')
        raise Exception('ERROR in create_squeesar_run_files.py')
    if inps.stopmakerun:
        logger.log(loglevel.INFO,
                   'Exit as planned after making squeesar run files ')
        sys.exit(0)
    return
Exemplo n.º 4
0
def main(iargs=None):
    """Downloads data with ssara and asfserial scripts."""

    command = 'download_rsmas.py ' + iargs[0]
    messageRsmas.log(command)

    inps = command_line_parse(iargs)

    #########################################
    # Submit job
    #########################################
    if inps.submit_flag:
        job_file_name = 'download_rsmas'
        work_dir = os.getcwd()
        job_name = inps.template_file.split(os.sep)[-1].split('.')[0]
        wall_time = '24:00'

        cb.submit_script(job_name, job_file_name, sys.argv[:], work_dir,
                         wall_time)

    project_name = putils.get_project_name(
        custom_template_file=inps.template_file)
    work_dir = putils.get_work_directory(None, project_name)
    slc_dir = os.path.join(work_dir, 'SLC')
    if not os.path.isdir(work_dir):
        os.makedirs(work_dir)
    if not os.path.isdir(slc_dir):
        os.makedirs(slc_dir)

    os.chdir(work_dir)

    # if satellite is not Sentinel (not tried yet)
    if 'SenDT' not in project_name and 'SenAT' not in project_name:

        dataset_template = Template(inps.template_file)

        ssaraopt = dataset_template.generate_ssaraopt_string()
        ssara_call = ['ssara_federated_query.py'
                      ] + ssaraopt + ['--print', '--download']
        ssara_process = subprocess.Popen(ssara_call, shell=True).wait()
        completion_status = ssara_process.poll()

        return

    download('ssara', inps.template_file, slc_dir, outnum=1)
    #download('ssara', inps.template_file, slc_dir, outnum = 2)
    download('asfserial', inps.template_file, slc_dir, outnum=1)
Exemplo n.º 5
0
def create_runfiles(inps):
    """ Calls the script to create stackSentinel runfiles and configs."""

    if inps.flag_makerun:
        os.chdir(inps.work_dir)
        command = 'create_stacksentinel_run_files.py ' + inps.custom_template_file
        messageRsmas.log(command)
        # Check the performance, change in subprocess
        status = subprocess.Popen(command, shell=True).wait()
        if status is not 0:
            logger.log(loglevel.ERROR,
                       'ERROR in create_stacksentinel_run_files.py')
            raise Exception('ERROR in create_stacksentinel_run_files.py')
        if inps.stopmakerun:
            logger.log(loglevel.INFO,
                       'Exit as planned after making sentinel run files ')
            sys.exit(0)

    return inps
Exemplo n.º 6
0
def call_isce_dem(custom_template):

    bbox = custom_template['sentinelStack.boundingBox']
    bbox = bbox.strip("'")
    south = round(float(bbox.split()[0]) -
                  0.5)  # assumes quotes '-1 0.15 -91.3 -91.0'
    north = round(float(bbox.split()[1]) + 0.5)
    west = round(float(bbox.split()[2]) - 0.5)
    east = round(float(bbox.split()[3]) + 0.5)

    dembbox = str(int(south)) + ' ' + str(int(north)) + ' ' + str(
        int(west)) + ' ' + str(int(east))

    # cmd = 'dem.py -a stitch -b '+demBbox+' -c -u https://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL1.003/2000.02.11/'
    cmd = 'dem_rsmas_kawan.py -a stitch -b ' + dembbox + ' -c -u https://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL1.003/2000.02.11/'
    messageRsmas.log(cmd)

    cwd = os.getcwd()

    try:
        output = subprocess.check_output(cmd,
                                         stderr=subprocess.STDOUT,
                                         shell=True,
                                         universal_newlines=True)
    except subprocess.CalledProcessError as exc:
        print("Command failed. Exit code, StdErr:", exc.returncode, exc.output)
        sys.exit('Error produced by dem.py')
    else:
        # print("Success.        StdOut \n{}\n".format(output))
        if 'Could not create a stitched DEM. Some tiles are missing' in output:
            os.chdir('..')
            shutil.rmtree('DEM')
            sys.exit('Error in dem.py: Tiles are missing. Ocean???')

    xmlfile = glob.glob('demLat_*.wgs84.xml')[0]
    fin = open(xmlfile, 'r')
    fout = open("tmp.txt", "wt")
    for line in fin:
        fout.write(line.replace('demLat', cwd + '/demLat'))
    fin.close()
    fout.close()
    os.rename('tmp.txt', xmlfile)
Exemplo n.º 7
0
def main(argv):
    # import pdb; pdb.set_trace()

    messageRsmas.log(' '.join(argv))
    inps = command_line_parse()
    # moved below to parse methods
    # parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter,
    #                                 epilog=EXAMPLE)
    # parser.add_argument('custom_template_file', nargs='?',
    #                    help='custom template with option settings.\n')
    # inps = parser.parse_args()

    custom_template = readfile.read_template(inps.custom_template_file)

    # import pdb;
    # pdb.set_trace()
    if os.path.isdir('DEM'):
        shutil.rmtree('DEM')
    os.mkdir('DEM')
    os.chdir('DEM')

    # cwd = os.getcwd()

    if 'sentinelStack.demMethod' not in custom_template.keys():
        custom_template['sentinelStack.demMethod'] = 'bbox'

    if custom_template['sentinelStack.demMethod'] == 'bbox' and inps.ssara:
        call_ssara_dem()
    if inps.new:
        print('nice job kawan! You aren\' dumb!')
    if custom_template['sentinelStack.demMethod'] == 'bbox' and inps.isce:
        print('you started isce')
        call_isce_dem(custom_template)
        print('you finished isce')

    else:
        sys.exit('Error unspported demMethod option: ' +
                 custom_template['sentinelStack.demMethod'])

    print('\n###############################################')
    print('End of dem_rsmas.py')
    print('################################################\n')
Exemplo n.º 8
0
def run_ingest_insarmaps(inps):
    """ Calls the script of ingest insarmaps and emails the results."""

    if inps.flag_insarmaps:

        command = 'ingest_insarmaps.py ' + inps.custom_template_file
        messageRsmas.log(command)
        status = subprocess.Popen(command, shell=True).wait()
        if status is not 0:
            logger.log(loglevel.ERROR, 'ERROR in ingest_insarmaps.py')
            raise Exception('ERROR in ingest_insarmaps.py')

        putils.email_insarmaps_results(inps.custom_template)

    # clean
    if int(inps.custom_template['cleanopt']) == 4:
        cleanlist = clean_list()
        _remove_directories(cleanlist[4])

    if inps.stopinsarmaps:
        logger.log(loglevel.DEBUG, 'Exit as planned after insarmaps')
Exemplo n.º 9
0
def create_or_copy_dem(inps, work_dir, template, custom_template_file):
    """ Downloads a DEM file or copies an existing one."""

    #if inps.flag_dem:
    dem_dir = work_dir + '/DEM'
    if os.path.isdir(dem_dir) and len(os.listdir(dem_dir)) == 0:
        os.rmdir(dem_dir)

    if not os.path.isdir(dem_dir):
        if 'sentinelStack.demDir' in list(template.keys(
        )) and template['sentinelStack.demDir'] != str('auto'):
            shutil.copytree(template['sentinelStack.demDir'], dem_dir)
        else:
            # TODO: Change subprocess call to get back error code and send error code to logger
            command = 'dem_rsmas.py ' + custom_template_file
            print(command)
            messageRsmas.log(command)
            status = subprocess.Popen(command, shell=True).wait()
            if status is not 0:
                logger.log(loglevel.ERROR, 'ERROR while making DEM')
                raise Exception('ERROR while making DEM')
Exemplo n.º 10
0
def call_pysar(custom_template, custom_template_file, flag_load_and_stop):
    """ Calls pysarAPP to load and process data. """

    # TODO: Change subprocess call to get back error code and send error code to logger
    logger.log(loglevel.DEBUG,
               '\n*************** running pysar ****************')
    command = 'pysarApp.py ' + custom_template_file + ' --end load_data |& tee out_pysar.log'
    out_file = 'out_pysar_load'
    logger.log(loglevel.INFO, command)
    messageRsmas.log(command)
    command = '(' + command + ' | tee ' + out_file + '.o) 3>&1 1>&2 2>&3 | tee ' + out_file + '.e'
    status = subprocess.Popen(command, shell=True).wait()
    if status is not 0:
        logger.log(loglevel.ERROR, 'ERROR in pysarApp.py --end load_data')
        raise Exception('ERROR in pysarApp.py --end load_data')

    if flag_load_and_stop:
        logger.log(loglevel.DEBUG, 'Exit as planned after loading into pysar')
        return

    # clean after loading data for cleanopt >= 2
    if 'All data needed found' in open('out_pysar.log').read():
        if int(custom_template['cleanopt']) >= 2:
            print('Cleaning files:   cleanopt= ' +
                  str(custom_template['cleanopt']))
            cleanlist = clean_list()
            _remove_directories(cleanlist[2])
        if int(custom_template['cleanopt']) >= 3:
            _remove_directories(cleanlist[3])

    command = 'pysarApp.py ' + custom_template_file
    out_file = 'out_pysar'
    logger.log(loglevel.INFO, command)
    messageRsmas.log(command)
    command = '(' + command + ' | tee ' + out_file + '.o) 3>&1 1>&2 2>&3 | tee ' + out_file + '.e'
    messageRsmas.log(command)
    status = subprocess.Popen(command, shell=True).wait()
    if status is not 0:
        logger.log(loglevel.ERROR, 'ERROR in pysarApp.py')
        raise Exception('ERROR in pysarApp.py')

    return None
Exemplo n.º 11
0
def process(inps):

    #########################################
    # Initiation
    #########################################
    start_time = time.time()

    inps.project_name = get_project_name(inps.custom_template_file)
    inps.work_dir = get_work_directory(None, inps.project_name)
    inps.slc_dir = os.path.join(inps.work_dir, 'SLC')

    if inps.remove_project_dir:
        _remove_directories(directories_to_delete=[inps.work_dir])

    if not os.path.isdir(inps.work_dir):
        os.makedirs(inps.work_dir)
    os.chdir(inps.work_dir)

    #  Read and update template file:
    inps = prs.create_or_update_template(inps)
    print(inps)
    if not inps.processingMethod or inps.workflow == 'interferogram':
        inps.processingMethod = 'sbas'

    if not os.path.isdir(inps.slc_dir):
        os.makedirs(inps.slc_dir)

    command_line = os.path.basename(sys.argv[0]) + ' ' + ' '.join(sys.argv[1:])
    logger_process_rsmas.log(loglevel.INFO, '##### NEW RUN #####')
    logger_process_rsmas.log(loglevel.INFO, 'process_rsmas.py ' + command_line)
    messageRsmas.log('##### NEW RUN #####')
    messageRsmas.log(command_line)

    #########################################
    # startssara: Getting Data
    #########################################

    # running download scripts:
    #     download_ssara_rsmas.py $TE/template
    #     downlaod_asfserial_rsmas.py $TE/template
    prs.call_ssara(inps.flag_ssara, inps.custom_template_file, inps.slc_dir)

    #########################################
    # startmakerun: create run files
    #########################################

    # create or copy DEM
    # running the script:
    #     dem_rsmas.py $TE/template

    prs.create_or_copy_dem(inps, inps.work_dir, inps.template,
                           inps.custom_template_file)

    # Check for DEM and create sentinel run files
    # running the script:
    #     create_stacksentinel_run_files.py $TE/template

    prs.create_runfiles(inps)

    #########################################
    # startprocess: Execute run files
    #########################################

    # Running the script:
    #    execute_stacksentinel_run_files.py $TE/template starting_run stopping_run
    #    Example for running run 1 to 4:
    #    execute_stacksentinel_run_files.py $TE/template 1 4

    prs.process_runfiles(inps)

    #########################################
    # startpysar: running PySAR and email results
    #########################################

    if inps.processingMethod == 'squeesar':
        # Run squeesar script:
        #    create_squeesar_run_files.py $TE/template
        #    execute_squeesar_run_files.py $TE/template
        prs.process_time_series(inps)
    else:
        # Run PySAR script:
        #    pysarApp.py $TE/template
        prs.run_pysar(inps, start_time)

    # Run ingest insarmaps script and email results
    #    ingest_insarmaps.py $TE/template

    prs.run_ingest_insarmaps(inps)

    logger_process_rsmas.log(loglevel.INFO, 'End of process_rsmas')
Exemplo n.º 12
0
def main(argv):

    messageRsmas.log(' '.join(argv))
    inps = dem_parser()
    custom_template = readfile.read_template(inps.custom_template_file)
    cwd = make_dem_dir()

    # can sentinelStack.demMethod be removed? I think parser is the replacement
    if 'sentinelStack.demMethod' not in custom_template.keys():
        custom_template['sentinelStack.demMethod'] = 'bbox'

    if custom_template['sentinelStack.demMethod'] == 'bbox':
        bbox = custom_template['sentinelStack.boundingBox']
        south = bbox.split(' ')[0].split('\'')[
            1]  # assumes quotes '-1 0.15 -91.3 -91.0'
        north = bbox.split(' ')[1]
        west = bbox.split(' ')[2]
        east = bbox.split(' ')[3].split('\'')[0]
    elif custom_template['sentinelStack.demMethod'] == 'ssara':
        call_ssara_dem(custom_template, inps)
        cmd = 'ssara_federated_query.py ' + custom_template[
            'ssaraopt'] + ' --dem'
        output = subprocess.check_output(cmd, shell=True)
        output = output.split("\n")
        for line in output:
            if line.startswith("wget"):
                coordList = line.split("?")[1].split("&")[0:4]
                for item in coordList:
                    if "north" in item:
                        north = item.split("=")[1]
                    if "south" in item:
                        south = item.split("=")[1]
                    if "east" in item:
                        east = item.split("=")[1]
                    if "west" in item:
                        west = item.split("=")[1]
    else:
        sys.ext('Error unspported demMethod option: ' +
                custom_template['sentinelStack.demMethod'])

    if inps.ssara:
        call_ssara_dem(custom_template, inps)
        print('####### CONTINUED')
    else:
        print('not ssara')
    south = round(float(south) - 0.5)
    north = round(float(north) + 0.5)
    west = round(float(west) - 0.5)
    east = round(float(east) + 0.5)

    demBbox = str(int(south)) + ' ' + str(int(north)) + ' ' + str(
        int(west)) + ' ' + str(int(east))
    cmd = 'dem.py -a stitch -b ' + demBbox + ' -c -u https://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL1.003/2000.02.11/'
    messageRsmas.log(cmd)

    try:
        output = subprocess.check_output(cmd,
                                         stderr=subprocess.STDOUT,
                                         shell=True,
                                         universal_newlines=True)
    except subprocess.CalledProcessError as exc:
        print("Command failed. Exit code, StdErr:", exc.returncode, exc.output)
        sys.exit('Error produced by dem.py')
    else:
        #print("Success.        StdOut \n{}\n".format(output))
        if 'Could not create a stitched DEM. Some tiles are missing' in output:
            os.chdir('..')
            shutil.rmtree('DEM')
            sys.exit('Error in dem.py: Tiles are missing. Ocean???')

    xmlFile = glob.glob('demLat_*.wgs84.xml')[0]
    fin = open(xmlFile, 'r')
    fout = open("tmp.txt", "wt")
    for line in fin:
        fout.write(line.replace('demLat', cwd + '/demLat'))
    fin.close()
    fout.close()
    os.rename('tmp.txt', xmlFile)

    print '\n###############################################'
    print 'End of dem_rsmas.py'
    print '################################################\n'
Exemplo n.º 13
0
    Submits a single script as a job.
    :param job_name: Name of job.
    :param job_file_name: Name of job file.
    :param argv: Command line arguments for running job.
    :param work_dir: Work directory in which to write job, output, and error files.
    :param walltime: Input parameter of walltime for the job.
    :param email_notif: If email notifications should be on or not. Defaults to true.
    """
    command_line = os.path.basename(argv[0]) + " "
    command_line += " ".join(flag for flag in argv[1:] if flag != "--submit")
    write_single_job_file(job_name,
                          job_file_name,
                          command_line,
                          work_dir,
                          email_notif,
                          walltime=walltime,
                          queue=os.getenv("QUEUENAME"))
    submit_single_job("{0}.job".format(job_file_name), work_dir)


if __name__ == "__main__":
    PARAMS = parse_arguments(sys.argv[1::])
    messageRsmas.log(
        os.path.basename(sys.argv[0]) + " " + " ".join(sys.argv[1::]))
    JOBS = write_batch_job_files(PARAMS.file,
                                 PARAMS.outdir,
                                 memory=PARAMS.memory,
                                 walltime=PARAMS.wall,
                                 queue=PARAMS.queue)
    submit_batch_jobs(JOBS, PARAMS.file, PARAMS.outdir)
Exemplo n.º 14
0
        'plmethod', 'range_window', 'azimuth_window', 'cropbox', 'excludeDate',
        'azimuthLooks', 'rangeLooks', 'unwMethod', 'textCmd'
    ]

    for value, pref in zip(inpsvalue, prefixletters):
        keyvalue = eval('inps.' + value)
        if keyvalue is not None:
            command = command + ' -' + str(pref) + ' ' + str(keyvalue)

    print(command)

    out_file = 'out_squeesar_create_runfiles'
    command = '(' + command + ' | tee ' + out_file + '.o) 3>&1 1>&2 2>&3 | tee ' + out_file + '.e'

    logger.log(loglevel.INFO, command)
    messageRsmas.log(command)

    status = subprocess.Popen(command, shell=True).wait()
    if status is not 0:
        logger.log(loglevel.ERROR,
                   'ERROR making run_files using {}'.format(script))
        raise Exception('ERROR making run_files using {}'.format(script))

    run_file_list = glob.glob(inps.work_dir + '/run_files_SQ/run_*')
    with open(inps.work_dir + '/run_files_list_sq', 'w') as run_file:
        for item in run_file_list:
            run_file.writelines(item + '\n')

    logger.log(loglevel.INFO,
               "-----------------Done making Run files-------------------")
Exemplo n.º 15
0
    if os.path.isdir(json_folder):
        logger.log(loglevel.INFO, 'Removing directory: {}'.format(json_folder))
        shutil.rmtree(json_folder)

    command1 = 'hdfeos5_2json_mbtiles.py ' + hdfeos_file + ' ' + json_folder + ' |& tee out_insarmaps.log'
    command2 = 'json_mbtiles2insarmaps.py -u ' + password.insaruser + ' -p ' + password.insarpass + ' --host ' + \
               'insarmaps.miami.edu -P rsmastest -U rsmas\@gmail.com --json_folder ' + \
               json_folder + ' --mbtiles_file ' + mbtiles_file + ' |& tee -a out_insarmaps.log'

    with open(inps.work_dir + '/PYSAR/run_insarmaps', 'w') as f:
        f.write(command1 + '\n')
        f.write(command2 + '\n')

    out_file = 'out_insarmaps'
    logger.log(loglevel.INFO, command1)
    messageRsmas.log(command1)
    command1 = '(' + command1 + ' | tee ' + out_file + '.o) 3>&1 1>&2 2>&3 | tee ' + out_file + '.e'
    status = subprocess.Popen(command1, shell=True).wait()
    if status is not 0:
        logger.log(loglevel.ERROR, 'ERROR in hdfeos5_2json_mbtiles.py')
        raise Exception('ERROR in hdfeos5_2json_mbtiles.py')

    # TODO: Change subprocess call to get back error code and send error code to logger
    logger.log(loglevel.INFO, command2)
    messageRsmas.log(command2)
    command2 = '(' + command2 + ' | tee -a ' + out_file + '.o) 3>&1 1>&2 2>&3 | tee -a ' + out_file + '.e'
    status = subprocess.Popen(command2, shell=True).wait()
    if status is not 0:
        logger.log(loglevel.ERROR, 'ERROR in json_mbtiles2insarmaps.py')
        raise Exception('ERROR in json_mbtiles2insarmaps.py')
Exemplo n.º 16
0
def main(argv):

    ##### Inputs
    coreNum = 1
    memory = 3700
    projectID = 'insarlab'
    parallel = 'no'
    queue = 'general'
    walltime = '5:00'
    waitJob = True

    if len(sys.argv) > 2:
        try:            opts, args = getopt.getopt(argv,'h:f:n:w:r:q:p:e:l:',\
                                           ['help','file=','number=','walltime=',\
                                            'memory=','queue=','project=','email=',\
                                            'parallel','line=','nowait'])
        except getopt.GetoptError:
            Usage()
            sys.exit(1)
        if opts == []:
            Usage()
            sys.exit(1)

        for opt, arg in opts:
            if opt in ['-h', '--help']:
                Usage()
                sys.exit()
            elif opt in ['-f', '--file']:
                run_file = arg
            elif opt in ['-n', '--number']:
                coreNum = int(arg)
            elif opt in ['-w', '--walltime']:
                walltime = arg
            elif opt in ['-r', '--memory']:
                memory = int(arg)
            elif opt in ['-q', '--queue']:
                queue = arg
            elif opt in ['-p', '--project']:
                projectID = arg
            elif opt in ['-e', '--email']:
                email = arg
            elif opt in ['-l', '--line']:
                lineNum = int(arg)
            elif opt in ['--parallel']:
                parallel = 'yes'
            elif opt in ['--nowait']:
                waitJob = False

    elif len(sys.argv) == 2 and os.path.isfile(argv[0]):
        run_file = argv[0]
    else:
        Usage()
        sys.exit(1)

    run_file = os.path.realpath(run_file)
    run_name = os.path.basename(run_file)
    workDir = os.path.dirname(run_file)
    os.chdir(workDir)
    log(os.path.basename(sys.argv[0]) + ' ' + run_name)

    #if int(coreNum) >= 16 and queue == 'general':
    #    msg('More than 16 processors is setted, change job queue to parallel.')
    #    queue = 'parallel'

    try:
        lineNum
    except:
        lineNum = coreNum

    memory *= lineNum

    #if int(memory) >= 25000:
    #    msg('Setted memory exceed the maximum, change it to the limit [25 GB].')
    #    memory = '25000'

    ##### Clean
    prefix_in = 'z_input_' + run_name + '.'
    prefix_out = 'z_output_' + run_name + '.'
    cleanCmd = 'rm ' + prefix_in + '* ' + prefix_out + '*'
    msg(cleanCmd)
    os.system(cleanCmd)

    ##### Input file info
    msg('Input file: ' + run_file)
    with open(run_file) as f_run:
        cmdNum = sum(1 for _ in f_run)
    f_run.close()
    msg('Number of lines: ' + str(cmdNum))

    ##### Split run_file according to the job/processor number
    jobNum = int((cmdNum - 0.5) / lineNum) + 1
    digitNum = len(str(jobNum))
    splitCmd = 'split -a ' + str(digitNum) + ' -l ' + str(
        lineNum) + ' -d ' + run_file + ' ' + prefix_in
    msg(splitCmd)
    os.system(splitCmd)
    z_inList = glob.glob(prefix_in + '*')

    ##### Job Info
    z_inNum = len(z_inList)
    msg('Number of jobs to submit: ' + str(z_inNum))
    msg('Queue: ' + queue)
    msg('Walltime: ' + walltime)
    msg('Memory:' + str(memory))
    msg('Processors:' + str(coreNum))
    try:
        email
        msg('Email: ' + email)
    except:
        pass

    ##### Write and Submit job file
    for z_in in z_inList:
        ##### Write job setting
        count = z_in.split(prefix_in)[1]
        job = z_in + '.sh'
        f = open(job, 'w')
        f.write('#! /bin/bash')
        f.write('\n#BSUB -J ' + run_name + '.' + count)
        f.write('\n#BSUB -P ' + projectID)
        f.write('\n#BSUB -o ' + prefix_out + count + '.%J.o')
        f.write('\n#BSUB -e ' + prefix_out + count + '.%J.e')
        f.write('\n#BSUB -W ' + walltime)
        f.write('\n#BSUB -q ' + queue)
        f.write('\n#BSUB -n ' + str(coreNum))

        try:
            f.write('\n#BSUB -R "rusage[mem=' + str(memory) + ']"')
        except:
            pass

        if queue == 'parallel':
            f.write('\n#BSUB -R "span[ptile=16]"')

        try:
            f.write('\n#BSUB -u ' + email)
            f.write('\n#BSUB -N')
        except:
            pass

        ##### Write job excutable commands
        f.write('\ncd ' + workDir + '\n')
        fz_in = open(z_in, 'r')

        ## parallel computing
        if parallel == 'yes' and lineNum > 1:
            parallelFile = z_in + '.popen.py'
            f_parallel = open(parallelFile, 'w')
            f_parallel.write('#! /usr/bin/env python\n')
            f_parallel.write('\nfrom subprocess import Popen\n')
            for line in fz_in:
                f_parallel.write('\nPopen(' + str(line.split()) + ')')
            f_parallel.close()
            os.system('chmod 755 ' + parallelFile)
            f.write('./' + parallelFile)

        ## sequential computing
        else:
            for line in fz_in:
                f.write(line)

        fz_in.close()
        f.close()

        submitCmd = 'bsub < ' + job
        msg('\n' + submitCmd)
        os.system(submitCmd)
        cleanCmd = 'rm ' + z_in
        os.system(cleanCmd)

    ##### Loop waiting until all jobs are done to exit script
    if not waitJob:
        msg('Job submission completed, exit without waiting.')
        return

    msg('\nSleeping until ' + str(z_inNum) + ' jobs are done for ' + run_name)
    im = 0
    z_outNum = len(glob.glob(prefix_out + '*.o'))
    while z_outNum < z_inNum:
        time.sleep(1)
        if im % 60 == 0:  #every minute
            msg('Current # of '+prefix_out+'*.o files in '+os.getcwd()+\
                ': <'+str(z_outNum)+'> out of <'+str(z_inNum)+'> after <'+str(im/60)+'> minutes')
        z_outNum = len(glob.glob(prefix_out + '*.o'))
        im = im + 1
    msg('All ' + str(z_inNum) + ' jobs are done for ' + run_name)
    m, s = divmod(im, 60)
    h, m = divmod(m, 60)
    msg('Total used time: %02d hours %02d mins %02d secs' % (h, m, s))

    ##### move z_* files into dedicated directory
    jobDir = run_file + '_jobs'
    msg('Moving z_* files into ' + jobDir)
    rmCmd = 'rm -rf ' + jobDir
    os.system(rmCmd)
    mkCmd = 'mkdir ' + jobDir
    os.system(mkCmd)
    mvCmd = 'mv ' + prefix_in + '* ' + prefix_out + '* ' + jobDir
    os.system(mvCmd)
    msg('Finished at ' + str(datetime.datetime.now()))
Exemplo n.º 17
0
def run_ssara(inps, run_number=1):
    """ Runs ssara_federated_query-cj.py and checks for download issues.

        Runs ssara_federated_query-cj.py and checks continuously for whether the data download has hung without
        comleting or exited with an error code. If either of the above occur, the function is run again, for a
        maxiumum of 10 times.

        Parameters: run_number: int, the current iteration the wrapper is on (maxiumum 10 before quitting)
        Returns: status_cod: int, the status of the donwload (0 for failed, 1 for success)

    """

    logger.log(loglevel.INFO, "RUN NUMBER: %s", str(run_number))
    if run_number > 10:
        return 0

    logger.log(loglevel.INFO, "PASSED RUN NUMBER > 10")

    # Compute SSARA options to use

    dataset_template = Template(inps.template)

    ssaraopt = dataset_template.generate_ssaraopt_string()

    ssaraopt = ssaraopt.split(' ')

    logger.log(loglevel.INFO, "GENERATED SSARAOPT STRING")

    # Runs ssara_federated_query-cj.py with proper options
    ssara_call = ['ssara_federated_query-cj.py'
                  ] + ssaraopt + ['--print', '--download']
    print(' '.join(ssara_call))
    messageRsmas.log(' '.join(ssara_call))
    ssara_process = subprocess.Popen(ssara_call)

    logger.log(loglevel.INFO, "STARTED PROCESS")

    completion_status = ssara_process.poll(
    )  # the completion status of the process
    hang_status = False  # whether or not the download has hung
    wait_time = 2  # 10 wait time in 'minutes' to determine hang status
    prev_size = -1  # initial download directory size
    i = 0  # index for waiting periods (for calculation of total time only)

    logger.log(loglevel.INFO, "INITIAL COMPLETION STATUS: %s",
               str(completion_status))

    # while the process has not completed
    #import pdb; pdb.set_trace()
    while completion_status is None:

        i = i + 1

        # Computer the current download directory size
        curr_size = int(
            subprocess.check_output(['du', '-s',
                                     os.getcwd()]).split()[0].decode('utf-8'))

        # Compare the current and previous directory sizes to determine determine hang status
        if prev_size == curr_size:
            hang_status = True
            logger.log(loglevel.WARNING, "SSARA Hung")
            ssara_process.terminate(
            )  # teminate the process beacause download hung
            break
            # break the completion loop

        prev_size = curr_size  # store current size for comparison after waiting

        time.sleep(
            60 * wait_time
        )  # wait 'wait_time' minutes before continuing (checking for completion)
        completion_status = ssara_process.poll()
        logger.log(
            loglevel.INFO, "{} minutes: {:.1f}GB, completion_status {}".format(
                i * wait_time, curr_size / 1024 / 1024, completion_status))

    exit_code = completion_status  # get the exit code of the command
    ssara_process.terminate()
    logger.log(loglevel.INFO, "EXIT CODE: %s", str(exit_code))

    bad_codes = [137, -9]

    # If the exit code is one that signifies an error, rerun the entire command
    if exit_code in bad_codes or hang_status:
        if exit_code in bad_codes:
            logger.log(loglevel.WARNING,
                       "Exited with bad exit code, running again")
        if hang_status:
            logger.log(loglevel.WARNING, "Hanging, running again")

        run_ssara(inps, run_number=run_number + 1)

    return 0