def waitForJobCompletion(jobIDs, logger, name=None):
  '''Sleep until all of the submitted jobs containing the provided job prefix have completed'''
  
  postfix = '.'
  if name:
      postfix = ' from flight ' + name + '.'
  logger.info("Began waiting on " + str(len(jobIDs)) + " job(s)" + postfix)
  
  jobsRunning = []
  user = icebridge_common.getUser()
  stillWorking = True
  while stillWorking:
    
    time.sleep(SLEEP_TIME)
    stillWorking = False
    
    # Look through the list for jobs with the run's date in the name        
    allJobs = getActiveJobs(user)
    
    numActiveJobs = 0
    for (jobID, jobName, status) in allJobs:
      if jobID in jobIDs:
        numActiveJobs += 1
        # Matching job found so we keep waiting
        stillWorking = True
        # Print a message if this is the first time we saw the job as running
        if (status == 'R') and (jobID not in jobsRunning):
          jobsRunning.append(jobID)
          logger.info('Started running job named ' + str(jobName) + ' with id ' + str(jobID))
          
    logger.info("Waiting on " + str(numActiveJobs) + " jobs" + postfix)
Exemplo n.º 2
0
def waitForJobCompletion(jobIDs, logger, name=None):
    '''Sleep until all of the submitted jobs containing the provided job prefix have completed'''

    postfix = '.'
    if name:
        postfix = ' from flight ' + name + '.'
    logger.info("Began waiting on " + str(len(jobIDs)) + " job(s)" + postfix)

    jobsRunning = []
    user = icebridge_common.getUser()
    stillWorking = True
    while stillWorking:

        time.sleep(SLEEP_TIME)
        stillWorking = False

        # Look through the list for jobs with the run's date in the name
        allJobs = getActiveJobs(user)

        numActiveJobs = 0
        for (jobID, jobName, status) in allJobs:
            if jobID in jobIDs:
                numActiveJobs += 1
                # Matching job found so we keep waiting
                stillWorking = True
                # Print a message if this is the first time we saw the job as running
                if (status == 'R') and (jobID not in jobsRunning):
                    jobsRunning.append(jobID)
                    logger.info('Started running job named ' + str(jobName) +
                                ' with id ' + str(jobID))

        logger.info("Waiting on " + str(numActiveJobs) + " jobs" + postfix)
Exemplo n.º 3
0
def packAndSendSummaryFolder(run, folder, logger):
    '''Archive the summary folder in case we want to look at it later'''
    
    logger.info('Archiving summary folder for run ' + str(run))
    
    # Create a local tar file
    # - Some fiddling to make the packed folders convenient
    fileName = run.getSummaryTarName()
    cmd = 'tar -chf '+ fileName +' -C '+ folder +'/.. ' + os.path.basename(folder)
    logger.info(cmd)
    (out, err, status) = asp_system_utils.executeCommand(cmd, outputPath = None, 
                                                         suppressOutput = True, redo = True,
                                                         noThrow = True)

    # This tends to print a very verbose message
    ans = out + '\n' + err
    vals = ans.split('\n')
    if len(vals) < 10:
        logger.info(ans)
    else:
        vals = vals[0:10]
        logger.info("\n".join(vals))
        logger.info("Above output truncated.")

    # Delete any existing copy of the file on lfe
    lfePath  = os.path.join(REMOTE_SUMMARY_FOLDER, fileName)
    cmd      = "ssh lfe 'rm -f " + stripHost(lfePath) + "' 2>/dev/null"
    logger.info(cmd)
    os.system(cmd)

    # Send the file to lfe using shiftc
    cmd = 'shiftc --wait -d -r ' + fileName + ' ' + lfePath
    logger.info(cmd)
    status = os.system(cmd)
    if status != 0:
        raise Exception('Failed to pack/send summary folder for run ' + str(run))
    logger.info('Finished sending summary to lfe.')

    if icebridge_common.getUser() != 'oalexan1':
        # Wipe the copy on lunokhod
        l2Path   = os.path.join(L_SUMMARY_FOLDER, fileName)
        cmd      = "ssh " + LUNOKHOD + "  'rm -f "+ stripHost(l2Path) +"' 2>/dev/null"
        logger.info(cmd)
        os.system(cmd)
        
        # Make target directory on lunokhod
        cmd = "ssh  " + LUNOKHOD + " 'mkdir -p " + os.path.dirname(stripHost(l2Path)) + \
              "' 2>/dev/null"
        logger.info(cmd)
        os.system(cmd)
        
        # Send a copy of the file to Lunokhod for convenience
        cmd = 'scp ' + fileName + ' ' + l2Path + ' 2>/dev/null'
        logger.info(cmd)
        os.system(cmd)

    # Clean up the local tar file
    os.system('rm -f ' + fileName)
def main(argsIn):

    try:
        usage = '''usage: regenerate_summary_images.py <options> '''
        parser = argparse.ArgumentParser(usage=usage)

        parser.add_argument(
            "--data-folder",
            dest="dataFolder",
            help="Where all the inputs and outputs are stored.")

        parser.add_argument("--work-folder",
                            dest="workFolder",
                            help="Where working files are stored.")

        parser.add_argument("--site", dest="site", help="Site code.")

        parser.add_argument("--yyyymmdd", dest="yyyymmdd", help="Date.")

        #parser.add_argument("--dem-tarball",  dest="demTarball", default=os.getcwd(),
        #                    help="Where all the inputs and outputs are stored.")

        #parser.add_argument("--ortho-tarball",  dest="orthoTarball", default=None,
        #                    help="Where to unpack the data.")

        #parser.add_argument("--summary-tarball",  dest="summaryTarball", default=None,
        #                    help="Where to unpack the data.")

        #parser.add_argument("--unpack-dir",  dest="unpackDir", default=None,
        #                    help="Where to unpack the data.")

        parser.add_argument(
            "--node-type",
            dest="nodeType",
            default='san',
            help="Node type to use (wes[mfe], san, ivy, has, bro)")

        #parser.add_argument("--skip-archive-summary", action="store_true",
        #                    dest="skipArchiveSummary", default=False,
        #                    help="Skip archiving the summary.")

        # Debug option
        parser.add_argument(
            '--minutes-in-devel-queue',
            dest='minutesInDevelQueue',
            type=int,
            default=0,
            help="If positive, submit to the devel queue for this many minutes."
        )

        options = parser.parse_args(argsIn)

    except argparse.ArgumentError as msg:
        parser.error(msg)

    # Check if we are on the right machine
    (host, err, status) = asp_system_utils.executeCommand(['uname', '-n'],
                                                          suppressOutput=True)
    host = host.strip()
    if 'pfe' in host and options.nodeType not in PFE_NODES:
        raise Exception("From machine " + host + " can only launch on: " +
                        " ".join(PFE_NODES))
    if 'mfe' in host and options.nodeType != 'wes':
        raise Exception("From machine " + host + " can only launch on: wes")

    # Make sure our paths will work when called from PBS
    options.dataFolder = os.path.abspath(options.dataFolder)

    #os.system('mkdir -p ' + options.unpackDir)

    # TODO: Check folders!
    run = run_helper.RunHelper(options.site, options.yyyymmdd,
                               options.workFolder)

    runFolder = os.path.join(options.workFolder, str(run))
    os.system('mkdir -p ' + runFolder)

    logFolder = os.path.join(runFolder, 'logs')

    # Set up logging in the run directory
    os.system('mkdir -p ' + logFolder)
    logLevel = logging.INFO
    logger = icebridge_common.setUpLogger(
        logFolder, logLevel, icebridge_common.manager_log_prefix())
    logger.info("Logging in: " + logFolder)

    checkRequiredTools(
    )  # Make sure all the needed tools can be found before we start

    logger.info("Disabling core dumps.")  # these just take a lot of room
    os.system("ulimit -c 0")
    os.system("umask 022")  # enforce files be readable by others

    # See how many hours we used so far. I think this counter gets updated once a day.
    (out, err, status) = asp_system_utils.executeCommand("acct_ytd",
                                                         outputPath=None,
                                                         suppressOutput=True,
                                                         redo=True,
                                                         noThrow=True)
    logger.info("Hours used so far:\n" + out + '\n' + err)

    try:

        # Fetch and extract the tarball files from Lou

        localDemFolder = os.path.join(options.dataFolder, run.name() + '_dems')
        localOrthoFolder = os.path.join(options.dataFolder,
                                        run.name() + '_orthos')
        demSummaryFolder = os.path.join(options.dataFolder,
                                        run.name() + '_dem_summaries')
        orthoSummaryFolder = os.path.join(options.dataFolder,
                                          run.name() + '_ortho_summaries')

        missingDemFiles = []
        missingOrthoFiles = []
        for f in os.listdir(localDemFolder):
            if 'temp' in f:
                raise Exception('Bad file: ' + f)
            if ('IODEM3' in f) and (f[-4:] == '.tif'):
                inputPath = os.path.join(localDemFolder, f)
                outputPath = os.path.join(
                    demSummaryFolder, f.replace('DEM.tif', 'DEM_browse.tif'))
                if not os.path.exists(outputPath):
                    missingDemFiles.append((inputPath, outputPath))

        for f in os.listdir(localOrthoFolder):
            if 'temp' in f:
                raise Exception('Bad file: ' + f)
            if ('IODIM3' in f) and (f[-4:] == '.tif'):
                inputPath = os.path.join(localOrthoFolder, f)
                outputPath = os.path.join(orthoSummaryFolder,
                                          f.replace('ORTHO.tif', 'ORTHO.jpg'))
                if not os.path.exists(outputPath):
                    missingOrthoFiles.append((inputPath, outputPath))

        #print 'Fetching and unpacking tarballs...'
        #fetchTarball(options.demTarball,     localDemFolder)
        #fetchTarball(options.orthoTarball,   localOrthoFolder)
        #fetchTarball(options.summaryTarball, localSummaryFolder)

        # If the summary tarball unpacked to []/summary/summary,
        #  work with the lower level folder from now on.
        #localSummaryFolder = descendIfNeeded(localSummaryFolder)

        # Make a list of all input files that are missing their summary file, and
        #  the desired output path for that file.
        #missingDemFiles   = getMissingSummaryFiles(localDemFolder,   localSummaryFolder, isOrtho=False)
        #missingOrthoFiles = getMissingSummaryFiles(localOrthoFolder, localSummaryFolder, isOrtho=True )

        # Divide this list into chunks and for each chunk generate a file containing all of
        #  the gdal_translate commands that need to be executed.
        print 'Writing command files...'
        commandFileLength = getParallelParams(options.nodeType)[2]
        commandFilePrefix = os.path.join(runFolder, 'convert_commands_')
        print 'Clearing existing command files.'
        os.system('rm ' + commandFilePrefix + '*')
        commandFileList = writeCommandFiles(missingDemFiles, missingOrthoFiles,
                                            commandFilePrefix,
                                            commandFileLength)
        #raise Exception('DEBUG')

        # Get the location to store the logs
        pbsLogFolder = run.getPbsLogFolder()
        logger.info("Storing logs in: " + pbsLogFolder)
        os.system('mkdir -p ' + pbsLogFolder)

        # Call multi_process_command_runner.py through PBS for each chunk.
        start_time()
        (baseName, jobIDs) = submitBatchJobs(commandFileList, options,
                                             pbsLogFolder, run, logger)

        # Wait for everything to finish.
        pbs_functions.waitForJobCompletion(jobIDs, logger, baseName)
        stop_time("pbs_jobs", logger)

        # Check that we now have all of the summary files.
        # - Both of these should now be empty.
        #newMissingDemFiles   = getMissingSummaryFiles(localDemFolder,   demSummaryFolder, isOrtho=False)
        #newMissingOrthoFiles = getMissingSummaryFiles(localOrthoFolder, orthoSummaryFolder, isOrtho=True )
        numDemsMissing = 0
        numOrthosMissing = 0
        for pair in missingDemFiles:
            if not os.path.exists(pair[1]):
                numDemsMissing += 1
        for pair in missingOrthoFiles:
            if not os.path.exists(pair[1]):
                numOrthosMissing += 1

        resultText = (
            'After regeneration, missing %d DEM summaries and %d ORTHO summaries'
            % (numDemsMissing, numOrthosMissing))
        logger.info(resultText)

        runWasSuccess = ((numDemsMissing == 0) and (numOrthosMissing == 0))

        # If successful, create a new tarball and send it to Lou.

        #if runWasSuccess and (not options.skipArchiveSummary):
        #    start_time()
        #    archive_functions.packAndSendSummaryFolder(run, localSummaryFolder, logger)
        #    stop_time("archive summary", logger)

    except Exception as e:
        resultText = 'Caught exception: ' + str(
            e) + '\n' + traceback.format_exc()
        runWasSuccess = False

    # Send a summary email.
    emailAddress = getEmailAddress(icebridge_common.getUser())
    logger.info("Sending email to: " + emailAddress)
    if runWasSuccess:
        sendEmail(emailAddress, 'OIB summary regen passed', resultText)
    else:
        sendEmail(emailAddress, '"OIB summary regen failed', resultText)

    # TODO: Add automated delete command!
    #if options.wipeProcessed:
    #    processedFolder = run.getProcessFolder()
    #    logger.info("Will delete: " + processedFolder)
    #    os.system("rm -rf " + processedFolder)

    logger.info(
        '==== regenerate_summary_images script has finished for run: ' +
        str(run) + ' ====')
Exemplo n.º 5
0
        return 'lfe:/u/smcmich1/icebridge/output'
    elif user == 'oalexan1':
        return 'lfe:/u/oalexan1/projects/data/icebridge/output'
    else:
        raise Exception("Unknown user: "******"Unknown user: " + user)
    
if icebridge_common.getUser() == 'smcmich1':
    REMOTE_CAMERA_FOLDER    = 'lfe:/u/smcmich1/icebridge/camera'
    REMOTE_ALIGN_CAM_FOLDER = 'lfe:/u/smcmich1/icebridge/aligned_cameras'
    REMOTE_ORTHO_FOLDER     = 'lfe:/u/smcmich1/icebridge/ortho'
    REMOTE_SUMMARY_FOLDER   = 'lfe:/u/smcmich1/icebridge/summaries'
    REMOTE_LABEL_FOLDER     = 'lfe:/u/smcmich1/icebridge/labels'
    LUNOKHOD                = 'lunokhod2'
    L_SUMMARY_FOLDER        = LUNOKHOD + ':/home/smcmich1/data/icebridge_summaries'
elif icebridge_common.getUser() == 'oalexan1':
    REMOTE_CAMERA_FOLDER    = 'lfe:/u/oalexan1/projects/data/icebridge/camera'
    REMOTE_ALIGN_CAM_FOLDER = 'lfe:/u/oalexan1/projects/data/icebridge/aligned_cameras'
    REMOTE_SUMMARY_FOLDER   = 'lfe:/u/oalexan1/projects/data/icebridge/summaries'
    REMOTE_LABEL_FOLDER     = 'lfe:/u/oalexan1/projects/data/icebridge/labels'
    LUNOKHOD                = 'lunokhod1'
    L_SUMMARY_FOLDER        = LUNOKHOD + ':/home/oalexan1/projects/data/icebridge/summaries'
Exemplo n.º 6
0
def packAndSendSummaryFolder(run, folder, logger):
    '''Archive the summary folder in case we want to look at it later'''

    logger.info('Archiving summary folder for run ' + str(run))

    cwd = os.getcwd()
    os.chdir(run.parentFolder)

    fileName = run.getSummaryTarName()

    # Create a local tar file.

    # Turn this approach off, new approach below.
    # - Some fiddling to make the packed folders convenient
    cmd = 'tar -chf ' + fileName + ' -C ' + folder + '/.. ' + os.path.basename(
        folder)
    logger.info(cmd)
    (out, err, status) = asp_system_utils.executeCommand(cmd,
                                                         outputPath=None,
                                                         suppressOutput=True,
                                                         redo=True,
                                                         noThrow=True)
    # This tends to print a very verbose message
    ans = out + '\n' + err
    vals = ans.split('\n')
    if len(vals) < 10:
        logger.info(ans)
    else:
        vals = vals[0:10]
        logger.info("\n".join(vals))
        logger.info("Above output truncated.")

    # Use shiftc to create a local copy, and we want to include log files too
    #runFolder = str(run)
    #sumName = os.path.basename(run.getSummaryFolder())
    #cmd = 'shiftc --wait -d -r --dereference --include=\'^.*?('  \
    #      + icebridge_common.logFilePrefix() + '|' \
    #      + runFolder + '/' + sumName        + '|' \
    #      + icebridge_common.manager_log_prefix()  \
    #      + ')\' --create-tar ' + runFolder        \
    #      +  ' ' + fileName

    #logger.info(cmd)
    #os.system(cmd)

    # Delete any existing copy of the file on lfe
    lfePath = os.path.join(REMOTE_SUMMARY_FOLDER, fileName)
    cmd = "ssh lfe 'rm -f " + stripHost(lfePath) + "' 2>/dev/null"
    logger.info(cmd)
    os.system(cmd)

    # Send the file to lfe using shiftc
    cmd = 'shiftc --wait -d -r ' + fileName + ' ' + lfePath
    robust_shiftc(cmd, logger)

    logger.info('Finished sending summary to lfe.')

    if icebridge_common.getUser() != 'oalexan1':
        # Wipe the copy on lunokhod
        l2Path = os.path.join(L_SUMMARY_FOLDER, fileName)
        cmd = "ssh " + LUNOKHOD + "  'rm -f " + stripHost(
            l2Path) + "' 2>/dev/null"
        logger.info(cmd)
        os.system(cmd)

        # Make target directory on lunokhod
        cmd = "ssh  " + LUNOKHOD + " 'mkdir -p " + os.path.dirname(stripHost(l2Path)) + \
              "' 2>/dev/null"
        logger.info(cmd)
        os.system(cmd)

        # Send a copy of the file to Lunokhod for convenience
        cmd = 'scp ' + fileName + ' ' + l2Path + ' 2>/dev/null'
        logger.info(cmd)
        os.system(cmd)

    # Clean up the local tar file
    cmd = 'rm -f ' + fileName
    logger.info(cmd)
    os.system(cmd)

    os.chdir(cwd)
def main(argsIn):

    try:
        usage = '''usage: regenerate_summary_images.py <options> '''
        parser = argparse.ArgumentParser(usage=usage)

        parser.add_argument("--data-folder",  dest="dataFolder",
                            help="Where all the inputs and outputs are stored.")

        parser.add_argument("--work-folder",  dest="workFolder",
                            help="Where working files are stored.")

        parser.add_argument("--site",  dest="site", help="Site code.")

        parser.add_argument("--yyyymmdd",  dest="yyyymmdd", help="Date.")

        #parser.add_argument("--dem-tarball",  dest="demTarball", default=os.getcwd(),
        #                    help="Where all the inputs and outputs are stored.")

        #parser.add_argument("--ortho-tarball",  dest="orthoTarball", default=None,
        #                    help="Where to unpack the data.")

        #parser.add_argument("--summary-tarball",  dest="summaryTarball", default=None,
        #                    help="Where to unpack the data.")

        #parser.add_argument("--unpack-dir",  dest="unpackDir", default=None,
        #                    help="Where to unpack the data.")

        parser.add_argument("--node-type",  dest="nodeType", default='san',
                            help="Node type to use (wes[mfe], san, ivy, has, bro)")

        #parser.add_argument("--skip-archive-summary", action="store_true",
        #                    dest="skipArchiveSummary", default=False,
        #                    help="Skip archiving the summary.")

        # Debug option
        parser.add_argument('--minutes-in-devel-queue', dest='minutesInDevelQueue', type=int,
                            default=0,
                            help="If positive, submit to the devel queue for this many minutes.")

        options = parser.parse_args(argsIn)

    except argparse.ArgumentError as msg:
        parser.error(msg)

    # Check if we are on the right machine
    (host, err, status) = asp_system_utils.executeCommand(['uname', '-n'],
                                                         suppressOutput = True)
    host = host.strip()
    if 'pfe' in host and options.nodeType not in PFE_NODES:
        raise Exception("From machine " + host + " can only launch on: " + " ".join(PFE_NODES)) 
    if 'mfe' in host and options.nodeType != 'wes':
        raise Exception("From machine " + host + " can only launch on: wes")

    # Make sure our paths will work when called from PBS
    options.dataFolder= os.path.abspath(options.dataFolder)

    #os.system('mkdir -p ' + options.unpackDir)

    # TODO: Check folders!
    run = run_helper.RunHelper(options.site, options.yyyymmdd, options.workFolder)

    runFolder = os.path.join(options.workFolder, str(run))
    os.system('mkdir -p ' + runFolder)

    logFolder = os.path.join(runFolder, 'logs')

    # Set up logging in the run directory
    os.system('mkdir -p ' + logFolder)
    logLevel = logging.INFO
    logger   = icebridge_common.setUpLogger(logFolder, logLevel,
                                            icebridge_common.manager_log_prefix())
    logger.info("Logging in: " + logFolder)
    
    
    checkRequiredTools() # Make sure all the needed tools can be found before we start

    logger.info("Disabling core dumps.") # these just take a lot of room
    os.system("ulimit -c 0")
    os.system("umask 022") # enforce files be readable by others

    # See how many hours we used so far. I think this counter gets updated once a day.
    (out, err, status) = asp_system_utils.executeCommand("acct_ytd", outputPath = None, 
                                                         suppressOutput = True, redo = True,
                                                         noThrow = True)
    logger.info("Hours used so far:\n" + out + '\n' + err)


    try:
      
        # Fetch and extract the tarball files from Lou

        localDemFolder     = os.path.join(options.dataFolder, run.name()+'_dems')
        localOrthoFolder   = os.path.join(options.dataFolder, run.name()+'_orthos')
        demSummaryFolder   = os.path.join(options.dataFolder, run.name()+'_dem_summaries')
        orthoSummaryFolder = os.path.join(options.dataFolder, run.name()+'_ortho_summaries')

        missingDemFiles   = []
        missingOrthoFiles = []
        for f in os.listdir(localDemFolder):
            if 'temp' in f:
                raise Exception('Bad file: ' + f)
            if ('IODEM3' in f) and (f[-4:] == '.tif'):
                inputPath  = os.path.join(localDemFolder, f)
                outputPath = os.path.join(demSummaryFolder, f.replace('DEM.tif', 'DEM_browse.tif'))
                if not os.path.exists(outputPath):
                    missingDemFiles.append((inputPath, outputPath))

        for f in os.listdir(localOrthoFolder):
            if 'temp' in f:
                raise Exception('Bad file: ' + f)
            if ('IODIM3' in f) and (f[-4:] == '.tif'):
                inputPath  = os.path.join(localOrthoFolder, f)
                outputPath = os.path.join(orthoSummaryFolder, f.replace('ORTHO.tif', 'ORTHO.jpg'))
                if not os.path.exists(outputPath):
                    missingOrthoFiles.append((inputPath, outputPath))

        #print 'Fetching and unpacking tarballs...'
        #fetchTarball(options.demTarball,     localDemFolder)
        #fetchTarball(options.orthoTarball,   localOrthoFolder)
        #fetchTarball(options.summaryTarball, localSummaryFolder)

        # If the summary tarball unpacked to []/summary/summary,
        #  work with the lower level folder from now on.
        #localSummaryFolder = descendIfNeeded(localSummaryFolder)

        # Make a list of all input files that are missing their summary file, and
        #  the desired output path for that file.
        #missingDemFiles   = getMissingSummaryFiles(localDemFolder,   localSummaryFolder, isOrtho=False)
        #missingOrthoFiles = getMissingSummaryFiles(localOrthoFolder, localSummaryFolder, isOrtho=True )

        # Divide this list into chunks and for each chunk generate a file containing all of
        #  the gdal_translate commands that need to be executed.
        print 'Writing command files...'
        commandFileLength = getParallelParams(options.nodeType)[2]
        commandFilePrefix = os.path.join(runFolder, 'convert_commands_')
        print 'Clearing existing command files.'
        os.system('rm ' + commandFilePrefix + '*')
        commandFileList   = writeCommandFiles(missingDemFiles, missingOrthoFiles,
                                              commandFilePrefix, commandFileLength)
        #raise Exception('DEBUG')

        # Get the location to store the logs
        pbsLogFolder = run.getPbsLogFolder()
        logger.info("Storing logs in: " + pbsLogFolder)
        os.system('mkdir -p ' + pbsLogFolder)

        # Call multi_process_command_runner.py through PBS for each chunk.
        start_time()
        (baseName, jobIDs) = submitBatchJobs(commandFileList, options, pbsLogFolder, run, logger)

        # Wait for everything to finish.
        pbs_functions.waitForJobCompletion(jobIDs, logger, baseName)
        stop_time("pbs_jobs", logger)

        # Check that we now have all of the summary files.
        # - Both of these should now be empty.
        #newMissingDemFiles   = getMissingSummaryFiles(localDemFolder,   demSummaryFolder, isOrtho=False)
        #newMissingOrthoFiles = getMissingSummaryFiles(localOrthoFolder, orthoSummaryFolder, isOrtho=True )
        numDemsMissing   = 0
        numOrthosMissing = 0
        for pair in missingDemFiles:
            if not os.path.exists(pair[1]):
                numDemsMissing += 1
        for pair in missingOrthoFiles:
            if not os.path.exists(pair[1]):
                numOrthosMissing += 1


        resultText = ('After regeneration, missing %d DEM summaries and %d ORTHO summaries' 
                      % (numDemsMissing, numOrthosMissing))
        logger.info(resultText)

        runWasSuccess = ((numDemsMissing == 0) and (numOrthosMissing == 0))

        # If successful, create a new tarball and send it to Lou.

        #if runWasSuccess and (not options.skipArchiveSummary):
        #    start_time()
        #    archive_functions.packAndSendSummaryFolder(run, localSummaryFolder, logger)
        #    stop_time("archive summary", logger)


    except Exception as e:
        resultText = 'Caught exception: ' + str(e) + '\n' + traceback.format_exc()
        runWasSuccess = False

    # Send a summary email.
    emailAddress = getEmailAddress(icebridge_common.getUser())
    logger.info("Sending email to: " + emailAddress)
    if runWasSuccess:
        sendEmail(emailAddress, 'OIB summary regen passed', resultText)
    else:
        sendEmail(emailAddress, '"OIB summary regen failed', resultText)

    # TODO: Add automated delete command!
    #if options.wipeProcessed:
    #    processedFolder = run.getProcessFolder()
    #    logger.info("Will delete: " + processedFolder)
    #    os.system("rm -rf " + processedFolder)

    logger.info('==== regenerate_summary_images script has finished for run: ' + str(run) + ' ====')
Exemplo n.º 8
0
os.environ["PATH"] = basepath       + os.pathsep + os.environ["PATH"]
os.environ["PATH"] = pythonpath     + os.pathsep + os.environ["PATH"]
os.environ["PATH"] = libexecpath    + os.pathsep + os.environ["PATH"]
os.environ["PATH"] = icebridgepath  + os.pathsep + os.environ["PATH"]

REMOTE_INPUT_FOLDER     = 'lfe:/u/oalexan1/projects/data/icebridge'

def stripHost(val):
    # Replace lfe:/path with /path
    m = re.match("^.*?:\s*(.*?)$", val)
    if m:
        return m.group(1)
    else:
        return val
    
if icebridge_common.getUser() == 'smcmich1':
    REMOTE_CAMERA_FOLDER    = 'lfe:/u/smcmich1/icebridge/camera'
    REMOTE_ALIGN_CAM_FOLDER = 'lfe:/u/smcmich1/icebridge/aligned_cameras'
    REMOTE_ORTHO_FOLDER     = 'lfe:/u/smcmich1/icebridge/ortho'
    REMOTE_OUTPUT_FOLDER    = 'lfe:/u/smcmich1/icebridge/output'
    REMOTE_SUMMARY_FOLDER   = 'lfe:/u/smcmich1/icebridge/summaries'
    REMOTE_LABEL_FOLDER     = 'lfe:/u/smcmich1/icebridge/labels'
    LUNOKHOD                = 'lunokhod2'
    L_SUMMARY_FOLDER        = LUNOKHOD + ':/home/smcmich1/data/icebridge_summaries'
elif icebridge_common.getUser() == 'oalexan1':
    REMOTE_CAMERA_FOLDER    = 'lfe:/u/oalexan1/projects/data/icebridge/camera'
    REMOTE_ALIGN_CAM_FOLDER = 'lfe:/u/oalexan1/projects/data/icebridge/aligned_cameras'
    REMOTE_ORTHO_FOLDER     = 'lfe:/u/oalexan1/projects/data/icebridge/ortho'
    REMOTE_OUTPUT_FOLDER    = 'lfe:/u/oalexan1/projects/data/icebridge/output'
    REMOTE_SUMMARY_FOLDER   = 'lfe:/u/oalexan1/projects/data/icebridge/summaries'
    REMOTE_LABEL_FOLDER     = 'lfe:/u/oalexan1/projects/data/icebridge/labels'
def main(argsIn):

    # Parse the input arguments
    if len(argsIn) < 2:
        print 'Usage: multi_flight_runner <input_flight_list> <finished_flight_list>'
        return 0

    inputFlightLog  = argsIn[0]
    outputFlightLog = argsIn[1]

    # Stop processing early if we build up too many flights!
    MAX_RUNS_RETAINED = 3

    runsRetained = []
    runsDeleted  = []
    with open(inputFlightLog, 'r') as inLog:

        for line in inLog:

            # Make run object and check if we already did this run
            parts = line.split('_')
            run   = RunHelper(parts[0], parts[1])

            if checkLogFile(outputFlightLog, run):
                print 'Skipping already completed run: ' + str(run)
                continue

            print 'Going to process run: ' + str(run)

            runFolder = os.path.join('/nobackup/smcmich1/icebridge/', str(run))

            # Set up the processing command
            # - This will generate all the labels for the current flight and then wipe everything
            #   if all of the label files were generated.

            TOOL_PATH = 'python ~/repo/StereoPipeline/src/asp/IceBridge/pleiades_manager.py'

            cmd = TOOL_PATH + '  --base-dir  /nobackup/smcmich1/icebridge/ --node-type san --camera-calibration-folder /nobackup/smcmich1/icebridge/calib_files/ --reference-dem-folder /nobackup/smcmich1/icebridge/reference_dems/ --bundle-length 2 --simple-cameras --skip-archive-cameras  --skip-archive-aligned-cameras  --skip-archive-orthos  --skip-archive-summary  --skip-archive-run --skip-ortho-gen --skip-check-outputs --skip-report  --skip-process --skip-blend --skip-convert  --skip-validate --generate-labels   --archive-labels --wipe-all'

            cmd += ' --site ' + run.site + ' --yyyymmdd ' + run.yyyymmdd

            print cmd
            os.system(cmd)

            # Whether or not we succeeded, log that we processed the flight.
            updateLogFile(outputFlightLog, run)

            if not os.path.exists(runFolder):
                print 'Run successful, deleted!'
                runsDeleted.append(run.name())
            else:
                print 'Run failed, retained!''
                runsRetained.append(run.name())

            if len(runsRetained) >= MAX_RUNS_RETAINED:
                print 'Too many flights failed, quitting now!' 
                break


    numRuns = len(runsRetained) + len(runsDeleted)
    print '---=== Finished processing ' + str(numRuns) + ' flights! ===---'


    # Send an email with a record of the runs we processed

    emailAddress = getEmailAddress(icebridge_common.getUser())
    print("Sending email to: " + emailAddress)
    subject = 'Finished running batch flight labelling script!'
    body    = '\nThe following runs were retained (some files missing):'
    for r in runsRetained:
        body += r + '\n'
    body    = '\nThe following runs were deleted (all files created):'
    for r in runsDeleted:
        body += r + '\n'

    sendEmail(emailAddress, subject, body)
Exemplo n.º 10
0
def packAndSendSummaryFolder(run, folder, logger):
    '''Archive the summary folder in case we want to look at it later'''
    
    logger.info('Archiving summary folder for run ' + str(run))
    
    cwd = os.getcwd()
    os.chdir(run.parentFolder)

    fileName = run.getSummaryTarName()

    # Create a local tar file.

    # Turn this approach off, new approach below.
    # - Some fiddling to make the packed folders convenient
    cmd = 'tar -chf '+ fileName +' -C '+ folder +'/.. ' + os.path.basename(folder)
    logger.info(cmd)
    (out, err, status) = asp_system_utils.executeCommand(cmd, outputPath = None, 
                                                         suppressOutput = True, redo = True,
                                                         noThrow = True)
    # This tends to print a very verbose message
    ans = out + '\n' + err
    vals = ans.split('\n')
    if len(vals) < 10:
        logger.info(ans)
    else:
        vals = vals[0:10]
        logger.info("\n".join(vals))
        logger.info("Above output truncated.")

    # Use shiftc to create a local copy, and we want to include log files too
    #runFolder = str(run)
    #sumName = os.path.basename(run.getSummaryFolder())
    #cmd = 'shiftc --wait -d -r --dereference --include=\'^.*?('  \
    #      + icebridge_common.logFilePrefix() + '|' \
    #      + runFolder + '/' + sumName        + '|' \
    #      + icebridge_common.manager_log_prefix()  \
    #      + ')\' --create-tar ' + runFolder        \
    #      +  ' ' + fileName
    
    #logger.info(cmd)
    #os.system(cmd)
    
    # Delete any existing copy of the file on lfe
    lfePath  = os.path.join(REMOTE_SUMMARY_FOLDER, fileName)
    cmd      = "ssh lfe 'rm -f " + stripHost(lfePath) + "' 2>/dev/null"
    logger.info(cmd)
    os.system(cmd)

    # Send the file to lfe using shiftc
    cmd = 'shiftc --wait -d -r ' + fileName + ' ' + lfePath
    robust_shiftc(cmd, logger)

    logger.info('Finished sending summary to lfe.')

    if icebridge_common.getUser() != 'oalexan1':
        # Wipe the copy on lunokhod
        l2Path   = os.path.join(L_SUMMARY_FOLDER, fileName)
        cmd      = "ssh " + LUNOKHOD + "  'rm -f "+ stripHost(l2Path) +"' 2>/dev/null"
        logger.info(cmd)
        os.system(cmd)
        
        # Make target directory on lunokhod
        cmd = "ssh  " + LUNOKHOD + " 'mkdir -p " + os.path.dirname(stripHost(l2Path)) + \
              "' 2>/dev/null"
        logger.info(cmd)
        os.system(cmd)
        
        # Send a copy of the file to Lunokhod for convenience
        cmd = 'scp ' + fileName + ' ' + l2Path + ' 2>/dev/null'
        logger.info(cmd)
        os.system(cmd)

    # Clean up the local tar file
    cmd = 'rm -f ' + fileName
    logger.info(cmd)
    os.system(cmd)

    os.chdir(cwd)