if len(options.yyyymmdd) != 8 and len(options.yyyymmdd) != 9: # Make an exception for 20100422a raise Exception("The --yyyymmdd field must have length 8 or 9.") if options.outputFolder is None: options.outputFolder = icebridge_common.outputFolder( options.site, options.yyyymmdd) if options.stopAfterIndexFetch: options.stopAfterFetch = True os.system('mkdir -p ' + options.outputFolder) logLevel = logging.INFO # Record everything logger = icebridge_common.setUpLogger(options.outputFolder, logLevel, 'icebridge_processing_log_frames_' + \ str(options.startFrame) + "_" + str(options.stopFrame)) # Make sure we later know what we were doing logger.info("full_processing_script.py " + " ".join(argsIn)) (out, err, status) = asp_system_utils.executeCommand(['uname', '-a'], suppressOutput=True) logger.info("Running on machine: " + out) logger.info("Work dir is " + os.getcwd()) os.system("ulimit -c 0") # disable core dumps os.system("umask 022") # enforce files be readable by others # Perform some input checks and initializations # These are not needed unless cameras are initialized
def main(argsIn): try: # Sample usage: # python full_processing_script.py \ # --yyyymmdd 20091016 --site AN --num-processes 1 --num-threads 12 --bundle-length 12 \ # --start-frame 350 --stop-frame 353 --skip-validate \ # --camera-calibration-folder camera_calib \ # --reference-dem-folder ref_dem_folder # An output folder will be crated automatically (with a name like # AN_20091016), or its name can be specified via the --output-folder # option. usage = '''full_processing_script.py <options>''' parser = argparse.ArgumentParser(usage=usage) # Run selection parser.add_argument("--yyyymmdd", dest="yyyymmdd", required=True, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument("--site", dest="site", required=True, help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--output-folder", dest="outputFolder", default=None, help="Name of the output folder. If not specified, " + \ "use something like AN_YYYYMMDD.") parser.add_argument("--camera-lookup-file", dest="cameraLookupFile", default=None, help="The file to use to find which camera was used for which " + \ "flight. By default it is in the same directory as this script " + \ "and named camera_lookup.txt.") # Processing options parser.add_argument('--bundle-length', dest='bundleLength', default=2, type=int, help="The number of images to bundle adjust and process " + \ "in a single batch.") # TODO: Compute this automatically?? parser.add_argument('--overlap-limit', dest='overlapLimit', default=2, type=int, help="The number of images to treat as overlapping for " + \ "bundle adjustment.") parser.add_argument('--stereo-arguments', dest='stereoArgs', # set --min-xcorr-level 0 to do the left-to-right # and right-to-left consistency check at the lowest level. default='--stereo-algorithm 2 --min-xcorr-level 0', help='Extra arguments to pass to stereo.') parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument('--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on.') parser.add_argument('--frames-file', dest='framesFile', default="", help='Specific frames to run ortho2pinhole on within this frame range.') parser.add_argument('--max-num-lidar-to-fetch', dest='maxNumLidarToFetch', default=None, type=int, help="The maximum number of lidar files to fetch. " + \ "This is used in debugging.") parser.add_argument("--camera-calibration-folder", dest="inputCalFolder", default=None, help="The folder containing camera calibration.") parser.add_argument("--input-calibration-camera", dest="inputCalCamera", default="", help="Instead of looking up the calibrated camera in the calibration folder, use this one.") parser.add_argument("--output-calibration-camera", dest="outputCalCamera", default="", help="If specified, float the intrinsics and write the optimized model here.") parser.add_argument("--output-model-type", dest="outputModelType", default="RPC", help="Generate a distortion model of type RPC, RPC5, or RPC6.") parser.add_argument("--reference-dem-folder", dest="refDemFolder", default=None, help="The folder containing DEMs that created orthoimages.") parser.add_argument("--processing-subfolder", dest="processingSubfolder", default=None, help="Specify a subfolder name where the processing outputs will go. " + \ "fault is no additional folder") parser.add_argument("--simple-cameras", action="store_true", dest="simpleCameras", default=False, help="Don't use orthoimages to refine the camera models.") # This option is only needed when generating camera models from the nav files. parser.add_argument('--camera-mounting', default=0, dest='cameraMounting', type=int, help='0=right-forwards, 1=left-forwards, 2=top-forwards, 3=bottom-forwards.') # Performance options parser.add_argument('--num-processes', dest='numProcesses', default=1, type=int, help='The number of simultaneous processes to run.') parser.add_argument('--num-ortho-processes', dest='numOrthoProcesses', default=-1, type=int, help='The number of simultaneous ortho processes to run.') parser.add_argument('--num-threads', dest='numThreads', default=8, type=int, help='The number of threads per process.') # Action control parser.add_argument("--skip-fetch", action="store_true", dest="noFetch", default=False, help="Skip data fetching.") parser.add_argument("--skip-convert", action="store_true", dest="noConvert", default=False, help="Skip data conversion.") parser.add_argument("--stop-after-fetch", action="store_true", dest="stopAfterFetch", default=False, help="Stop program after data fetching.") parser.add_argument("--stop-after-convert", action="store_true", dest="stopAfterConvert", default=False, help="Stop program after data conversion.") parser.add_argument("--skip-validate", action="store_true", dest="skipValidate", default=False, help="Skip input data validation.") parser.add_argument("--ignore-missing-lidar", action="store_true", dest="ignoreMissingLidar", default=False, help="Keep going if the lidar is missing.") parser.add_argument("--log-batches", action="store_true", dest="logBatches", default=False, help="Log the required batch commands without running them.") parser.add_argument('--cleanup', action='store_true', default=False, dest='cleanup', help='If the final result is produced delete intermediate files.') parser.add_argument('--many-ip', action='store_true', default=False, dest='manyip', help='If to use a lot of IP in bundle adjustment from the beginning.') parser.add_argument("--dry-run", action="store_true", dest="dryRun", default=False, help="Set up the input directories but do not fetch/process any imagery.") parser.add_argument("--refetch", action="store_true", dest="reFetch", default=False, help="Try fetching again if some files turned out invalid " + \ "during conversions.") parser.add_argument("--refetch-index", action="store_true", dest="refetchIndex", default=False, help="Force refetch of the index file.") parser.add_argument("--refetch-nav", action="store_true", dest="refetchNav", default=False, help="Force refetch of the nav file.") parser.add_argument("--stop-after-index-fetch", action="store_true", dest="stopAfterIndexFetch", default=False, help="Stop after fetching the indices.") parser.add_argument("--no-nav", action="store_true", dest="noNavFetch", default=False, help="Don't fetch or convert the nav data.") parser.add_argument("--no-lidar-convert", action="store_true", dest="noLidarConvert", default=False, help="Skip lidar files in the conversion step.") parser.add_argument("--no-ortho-convert", action="store_true", dest="noOrthoConvert", default=False, help="Skip generating camera models in the conversion step.") parser.add_argument("--skip-fast-conversions", action="store_true", dest="skipFastConvert", default=False, help="Skips all non-ortho conversions.") options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) icebridge_common.switchWorkDir() if options.numOrthoProcesses < 0: options.numOrthoProcesses = options.numProcesses isSouth = icebridge_common.checkSite(options.site) # Turned off elevation limits here since they are being set from LIDAR data. ## Add the site based elevation limits to the stereoArgs option #altLimits = icebridge_common.getElevationLimits(options.site) #options.stereoArgs = (' %s --elevation-limit %f %f ' # % (options.stereoArgs, altLimits[0], altLimits[1])) options.stereoArgs = (' %s ' % (options.stereoArgs)) if options.cameraLookupFile is None: options.cameraLookupFile = P.join(basepath, 'camera_lookup.txt') if not os.path.isfile(options.cameraLookupFile): raise Exception("Can't find camera file: " + options.cameraLookupFile) if len(options.yyyymmdd) != 8 and len(options.yyyymmdd) != 9: # Make an exception for 20100422a raise Exception("The --yyyymmdd field must have length 8 or 9.") if options.outputFolder is None: options.outputFolder = icebridge_common.outputFolder(options.site, options.yyyymmdd) if options.stopAfterIndexFetch: options.stopAfterFetch = True os.system('mkdir -p ' + options.outputFolder) logLevel = logging.INFO # Record everything logger = icebridge_common.setUpLogger(options.outputFolder, logLevel, 'icebridge_processing_log_frames_' + \ str(options.startFrame) + "_" + str(options.stopFrame)) # Make sure we later know what we were doing logger.info("full_processing_script.py " + " ".join(argsIn)) (out, err, status) = asp_system_utils.executeCommand(['uname', '-a'], suppressOutput = True) logger.info("Running on machine: " + out) logger.info("Work dir is " + os.getcwd()) os.system("ulimit -c 0") # disable core dumps os.system("umask 022") # enforce files be readable by others # Perform some input checks and initializations # These are not needed unless cameras are initialized if options.inputCalFolder is None or not os.path.exists(options.inputCalFolder): raise Exception("Missing camera calibration folder.") if options.refDemFolder is None or not os.path.exists(options.refDemFolder): raise Exception("Missing reference DEM folder.") refDemName = icebridge_common.getReferenceDemName(options.site) refDemPath = os.path.join(options.refDemFolder, refDemName) if not os.path.exists(refDemPath): raise Exception("Missing reference DEM: " + refDemPath) # TODO: CLEAN UP!!! # Set up the output folders cameraFolder = icebridge_common.getCameraFolder(options.outputFolder) imageFolder = icebridge_common.getImageFolder(options.outputFolder) jpegFolder = icebridge_common.getJpegFolder(options.outputFolder) orthoFolder = icebridge_common.getOrthoFolder(options.outputFolder) fireballFolder = icebridge_common.getFireballFolder(options.outputFolder) corrFireballFolder = icebridge_common.getCorrFireballFolder(options.outputFolder) lidarFolder = icebridge_common.getLidarFolder(options.outputFolder) navFolder = icebridge_common.getNavFolder(options.outputFolder) navCameraFolder = icebridge_common.getNavCameraFolder(options.outputFolder) processedFolder = icebridge_common.getProcessedFolder(options.outputFolder) if options.outputCalCamera != "": # Prepare to solve for intrinsics. Note that this modifies some things along the way. (options, cameraFolder, navCameraFolder, processedFolder) = \ solveIntrinsics_Part1(options, jpegFolder, cameraFolder, navCameraFolder, processedFolder, logger) # Handle subfolder option. This is useful for comparing results with different parameters! if options.processingSubfolder: processedFolder = os.path.join(processedFolder, options.processingSubfolder) logger.info('Will write to processing subfolder: ' + options.processingSubfolder) # If something failed in the first attempt either in fetch or in # convert, we will wipe bad files, and try to refetch/re-convert. numAttempts = 1 if options.reFetch and (not options.noFetch): numAttempts = 2 for attempt in range(numAttempts): if numAttempts > 1: logger.info("Fetch/convert attempt: " + str(attempt+1)) ans = runFetchConvert(options, isSouth, cameraFolder, imageFolder, jpegFolder, orthoFolder, fireballFolder, corrFireballFolder, lidarFolder, processedFolder, navFolder, navCameraFolder, refDemPath, logger) if ans == 0: break if options.stopAfterFetch or options.dryRun or options.stopAfterConvert: logger.info('Fetch/convert finished!') return 0 # Call the processing routine processTheRun(options, imageFolder, cameraFolder, lidarFolder, orthoFolder, corrFireballFolder, processedFolder, isSouth, refDemPath) if options.outputCalCamera != "": # Finish solving for intrinscs. solveIntrinsics_Part2(options, imageFolder, cameraFolder, lidarFolder, orthoFolder, processedFolder, isSouth, logger)
def main(argsIn): try: # Sample usage: # python ~/projects/StereoPipeline/src/asp/IceBridge/gen_ortho.py --site GR \ # --yyyymmdd 20120315 --start-frame 2490 --stop-frame 2491 --bundle-length 2 \ # --num-threads 8 --num-processes 3. usage = '''gen_ortho.py <options>''' parser = argparse.ArgumentParser(usage=usage) # Run selection parser.add_argument( "--yyyymmdd", dest="yyyymmdd", required=True, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument( "--site", dest="site", required=True, help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--output-folder", dest="outputFolder", default=None, help="Name of the output folder. If not specified, " + \ "use something like AN_YYYYMMDD.") # Processing options parser.add_argument('--bundle-length', dest='bundleLength', default=2, type=int, help="The number of images to bundle adjust and process " + \ "in a single batch.") parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument( '--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on. This frame will also be processed.') parser.add_argument( '--camera-mounting', dest='cameraMounting', default=0, type=int, help= '0=right-forwards, 1=left-forwards, 2=top-forwards, 3=bottom-forwards.' ) parser.add_argument("--processing-subfolder", dest="processingSubfolder", default=None, help="Specify a subfolder name where the processing outputs will go. "+\ "The default is no additional folder.") # Performance options parser.add_argument( '--num-processes', dest='numProcesses', default=1, type=int, help='The number of simultaneous processes to run.') parser.add_argument('--num-threads', dest='numThreads', default=8, type=int, help='The number of threads per process.') options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) icebridge_common.switchWorkDir() if len(options.yyyymmdd) != 8 and len(options.yyyymmdd) != 9: # Make an exception for 20100422a raise Exception("The --yyyymmdd field must have length 8 or 9.") if options.outputFolder is None: options.outputFolder = icebridge_common.outputFolder( options.site, options.yyyymmdd) os.system('mkdir -p ' + options.outputFolder) logLevel = logging.INFO # Make this an option?? logger = icebridge_common.setUpLogger(options.outputFolder, logLevel, 'icebridge_ortho_log') (out, err, status) = asp_system_utils.executeCommand(['uname', '-a'], suppressOutput=True) logger.info("Running on machine: " + out) processFolder = os.path.join(options.outputFolder, 'processed') # Handle subfolder option. This is useful for comparing results with different parameters! if options.processingSubfolder: processFolder = os.path.join(processFolder, options.processingSubfolder) logger.info('Reading from processing subfolder: ' + options.processingSubfolder) jpegFolder = icebridge_common.getJpegFolder(options.outputFolder) jpegIndexPath = icebridge_common.csvIndexFile(jpegFolder) if not os.path.exists(jpegIndexPath): raise Exception("Error: Missing jpeg index file: " + jpegIndexPath + ".") (jpegFrameDict, jpegUrlDict) = icebridge_common.readIndexFile(jpegIndexPath, prependFolder=True) threadText = '' if options.numThreads: threadText = '--threads ' + str(options.numThreads) redo = False suppressOutput = True taskHandles = [] if options.numProcesses > 1: pool = multiprocessing.Pool(options.numProcesses) # Bound the frames sortedFrames = sorted(jpegFrameDict.keys()) if len(sortedFrames) > 0: if options.startFrame < sortedFrames[0]: options.startFrame = sortedFrames[0] if options.stopFrame > sortedFrames[-1] + 1: options.stopFrame = sortedFrames[-1] + 1 else: # No jpeg files, that means nothing to do options.startFrame = 0 options.stopFrame = 0 for frame in range(options.startFrame, options.stopFrame): if not frame in jpegFrameDict: logger.info("Error: Missing jpeg file for frame: " + str(frame) + ".") continue # Find the right image currImage = jpegFrameDict[frame] args = (frame, processFolder, currImage, options.bundleLength, options.cameraMounting, threadText, redo, suppressOutput) # Run things sequentially if only one process, to make it easy to debug if options.numProcesses > 1: taskHandles.append(pool.apply_async(runOrtho, args)) else: runOrtho(*args) if options.numProcesses > 1: icebridge_common.waitForTaskCompletionOrKeypress(taskHandles, logger, interactive=False, quitKey='q', sleepTime=20) icebridge_common.stopTaskPool(pool)
def main(argsIn): try: usage = '''usage: regenerate_summary_images.py <options> ''' parser = argparse.ArgumentParser(usage=usage) parser.add_argument( "--data-folder", dest="dataFolder", help="Where all the inputs and outputs are stored.") parser.add_argument("--work-folder", dest="workFolder", help="Where working files are stored.") parser.add_argument("--site", dest="site", help="Site code.") parser.add_argument("--yyyymmdd", dest="yyyymmdd", help="Date.") #parser.add_argument("--dem-tarball", dest="demTarball", default=os.getcwd(), # help="Where all the inputs and outputs are stored.") #parser.add_argument("--ortho-tarball", dest="orthoTarball", default=None, # help="Where to unpack the data.") #parser.add_argument("--summary-tarball", dest="summaryTarball", default=None, # help="Where to unpack the data.") #parser.add_argument("--unpack-dir", dest="unpackDir", default=None, # help="Where to unpack the data.") parser.add_argument( "--node-type", dest="nodeType", default='san', help="Node type to use (wes[mfe], san, ivy, has, bro)") #parser.add_argument("--skip-archive-summary", action="store_true", # dest="skipArchiveSummary", default=False, # help="Skip archiving the summary.") # Debug option parser.add_argument( '--minutes-in-devel-queue', dest='minutesInDevelQueue', type=int, default=0, help="If positive, submit to the devel queue for this many minutes." ) options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) # Check if we are on the right machine (host, err, status) = asp_system_utils.executeCommand(['uname', '-n'], suppressOutput=True) host = host.strip() if 'pfe' in host and options.nodeType not in PFE_NODES: raise Exception("From machine " + host + " can only launch on: " + " ".join(PFE_NODES)) if 'mfe' in host and options.nodeType != 'wes': raise Exception("From machine " + host + " can only launch on: wes") # Make sure our paths will work when called from PBS options.dataFolder = os.path.abspath(options.dataFolder) #os.system('mkdir -p ' + options.unpackDir) # TODO: Check folders! run = run_helper.RunHelper(options.site, options.yyyymmdd, options.workFolder) runFolder = os.path.join(options.workFolder, str(run)) os.system('mkdir -p ' + runFolder) logFolder = os.path.join(runFolder, 'logs') # Set up logging in the run directory os.system('mkdir -p ' + logFolder) logLevel = logging.INFO logger = icebridge_common.setUpLogger( logFolder, logLevel, icebridge_common.manager_log_prefix()) logger.info("Logging in: " + logFolder) checkRequiredTools( ) # Make sure all the needed tools can be found before we start logger.info("Disabling core dumps.") # these just take a lot of room os.system("ulimit -c 0") os.system("umask 022") # enforce files be readable by others # See how many hours we used so far. I think this counter gets updated once a day. (out, err, status) = asp_system_utils.executeCommand("acct_ytd", outputPath=None, suppressOutput=True, redo=True, noThrow=True) logger.info("Hours used so far:\n" + out + '\n' + err) try: # Fetch and extract the tarball files from Lou localDemFolder = os.path.join(options.dataFolder, run.name() + '_dems') localOrthoFolder = os.path.join(options.dataFolder, run.name() + '_orthos') demSummaryFolder = os.path.join(options.dataFolder, run.name() + '_dem_summaries') orthoSummaryFolder = os.path.join(options.dataFolder, run.name() + '_ortho_summaries') missingDemFiles = [] missingOrthoFiles = [] for f in os.listdir(localDemFolder): if 'temp' in f: raise Exception('Bad file: ' + f) if ('IODEM3' in f) and (f[-4:] == '.tif'): inputPath = os.path.join(localDemFolder, f) outputPath = os.path.join( demSummaryFolder, f.replace('DEM.tif', 'DEM_browse.tif')) if not os.path.exists(outputPath): missingDemFiles.append((inputPath, outputPath)) for f in os.listdir(localOrthoFolder): if 'temp' in f: raise Exception('Bad file: ' + f) if ('IODIM3' in f) and (f[-4:] == '.tif'): inputPath = os.path.join(localOrthoFolder, f) outputPath = os.path.join(orthoSummaryFolder, f.replace('ORTHO.tif', 'ORTHO.jpg')) if not os.path.exists(outputPath): missingOrthoFiles.append((inputPath, outputPath)) #print 'Fetching and unpacking tarballs...' #fetchTarball(options.demTarball, localDemFolder) #fetchTarball(options.orthoTarball, localOrthoFolder) #fetchTarball(options.summaryTarball, localSummaryFolder) # If the summary tarball unpacked to []/summary/summary, # work with the lower level folder from now on. #localSummaryFolder = descendIfNeeded(localSummaryFolder) # Make a list of all input files that are missing their summary file, and # the desired output path for that file. #missingDemFiles = getMissingSummaryFiles(localDemFolder, localSummaryFolder, isOrtho=False) #missingOrthoFiles = getMissingSummaryFiles(localOrthoFolder, localSummaryFolder, isOrtho=True ) # Divide this list into chunks and for each chunk generate a file containing all of # the gdal_translate commands that need to be executed. print 'Writing command files...' commandFileLength = getParallelParams(options.nodeType)[2] commandFilePrefix = os.path.join(runFolder, 'convert_commands_') print 'Clearing existing command files.' os.system('rm ' + commandFilePrefix + '*') commandFileList = writeCommandFiles(missingDemFiles, missingOrthoFiles, commandFilePrefix, commandFileLength) #raise Exception('DEBUG') # Get the location to store the logs pbsLogFolder = run.getPbsLogFolder() logger.info("Storing logs in: " + pbsLogFolder) os.system('mkdir -p ' + pbsLogFolder) # Call multi_process_command_runner.py through PBS for each chunk. start_time() (baseName, jobIDs) = submitBatchJobs(commandFileList, options, pbsLogFolder, run, logger) # Wait for everything to finish. pbs_functions.waitForJobCompletion(jobIDs, logger, baseName) stop_time("pbs_jobs", logger) # Check that we now have all of the summary files. # - Both of these should now be empty. #newMissingDemFiles = getMissingSummaryFiles(localDemFolder, demSummaryFolder, isOrtho=False) #newMissingOrthoFiles = getMissingSummaryFiles(localOrthoFolder, orthoSummaryFolder, isOrtho=True ) numDemsMissing = 0 numOrthosMissing = 0 for pair in missingDemFiles: if not os.path.exists(pair[1]): numDemsMissing += 1 for pair in missingOrthoFiles: if not os.path.exists(pair[1]): numOrthosMissing += 1 resultText = ( 'After regeneration, missing %d DEM summaries and %d ORTHO summaries' % (numDemsMissing, numOrthosMissing)) logger.info(resultText) runWasSuccess = ((numDemsMissing == 0) and (numOrthosMissing == 0)) # If successful, create a new tarball and send it to Lou. #if runWasSuccess and (not options.skipArchiveSummary): # start_time() # archive_functions.packAndSendSummaryFolder(run, localSummaryFolder, logger) # stop_time("archive summary", logger) except Exception as e: resultText = 'Caught exception: ' + str( e) + '\n' + traceback.format_exc() runWasSuccess = False # Send a summary email. emailAddress = getEmailAddress(icebridge_common.getUser()) logger.info("Sending email to: " + emailAddress) if runWasSuccess: sendEmail(emailAddress, 'OIB summary regen passed', resultText) else: sendEmail(emailAddress, '"OIB summary regen failed', resultText) # TODO: Add automated delete command! #if options.wipeProcessed: # processedFolder = run.getProcessFolder() # logger.info("Will delete: " + processedFolder) # os.system("rm -rf " + processedFolder) logger.info( '==== regenerate_summary_images script has finished for run: ' + str(run) + ' ====')
for val in f: val = val.strip() done.add(val) if options.site_yyyymmdd in done: print("Skipping done flight: " + options.site_yyyymmdd) return 0 run = run_helper.RunHelper(options.site, options.yyyymmdd, os.getcwd()) # Set up logging in the run directory. Log outside of the run dir, # as that one we will wipe logFolder = os.path.abspath( os.path.join(run.getFolder(), '..', 'push_logs')) os.system('mkdir -p ' + logFolder) logLevel = logging.INFO logger = icebridge_common.setUpLogger(logFolder, logLevel, "push") logger.info("Logging in: " + logFolder) # Check the lftp version. On some machines it is too old. (out, err, status) = asp_system_utils.executeCommand(['lftp', '--version'], suppressOutput=True) m = re.match('^.*?LFTP\s+\|\s+Version\s+4.5', out) if not m: raise Exception('Expecting LFTP version 4.5.') else: logger.info("Found an acceptable version of LFTP.") pushByType(run, options, logger, 'DEM') #pushByType(run, options, logger, 'ORTHO') # need to wait for format decision # Wipe at the end
def main(argsIn): try: # Sample usage: # python ~/projects/StereoPipeline/src/asp/IceBridge/blend_dems.py --site GR \ # --yyyymmdd 20120315 --start-frame 2490 --stop-frame 2491 --bundle-length 2 \ # --num-threads 8 --num-processes 10 usage = '''blend_dems.py <options>''' parser = argparse.ArgumentParser(usage=usage) # Run selection parser.add_argument("--yyyymmdd", dest="yyyymmdd", required=True, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument("--site", dest="site", required=True, help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--output-folder", dest="outputFolder", default=None, help="Name of the output folder. If not specified, " + \ "use something like AN_YYYYMMDD.") # Processing options parser.add_argument('--bundle-length', dest='bundleLength', default=2, type=int, help="The number of images to bundle adjust and process " + \ "in a single batch.") parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument('--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on. This frame will also be processed.') parser.add_argument("--processing-subfolder", dest="processingSubfolder", default=None, help="Specify a subfolder name where the processing outputs will go. "+\ "The default is no additional folder.") parser.add_argument("--compute-diff-to-prev-dem", action="store_true", dest="computeDiffToPrev", default=False, help="Compute the absolute difference between the current DEM " + "and the one before it.") parser.add_argument("--blend-to-fireball-footprint", action="store_true", dest="blendToFireball", default=False, help="Create additional blended DEMs having the same " + \ "footprint as Fireball DEMs.") # Performance options parser.add_argument('--num-processes', dest='numProcesses', default=1, type=int, help='The number of simultaneous processes to run.') parser.add_argument('--num-threads', dest='numThreads', default=8, type=int, help='The number of threads per process.') options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) icebridge_common.switchWorkDir() os.system("ulimit -c 0") # disable core dumps os.system("rm -f core.*") # these keep on popping up os.system("umask 022") # enforce files be readable by others if len(options.yyyymmdd) != 8 and len(options.yyyymmdd) != 9: # Make an exception for 20100422a raise Exception("The --yyyymmdd field must have length 8 or 9.") if options.outputFolder is None: options.outputFolder = icebridge_common.outputFolder(options.site, options.yyyymmdd) os.system('mkdir -p ' + options.outputFolder) logLevel = logging.INFO # Make this an option?? logger = icebridge_common.setUpLogger(options.outputFolder, logLevel, 'icebridge_blend_log') (out, err, status) = asp_system_utils.executeCommand(['uname', '-a'], suppressOutput = True) logger.info("Running on machine: " + out) logger.info(str(argsIn)) processFolder = os.path.join(options.outputFolder, 'processed') # Handle subfolder option. This is useful for comparing results with different parameters! if options.processingSubfolder: processFolder = os.path.join(processFolder, options.processingSubfolder) logger.info('Reading from processing subfolder: ' + options.processingSubfolder) orthoFolder = icebridge_common.getOrthoFolder(options.outputFolder) orthoIndexPath = icebridge_common.csvIndexFile(orthoFolder) if not os.path.exists(orthoIndexPath): raise Exception("Error: Missing ortho index file: " + orthoIndexPath + ".") (orthoFrameDict, orthoUrlDict) = icebridge_common.readIndexFile(orthoIndexPath) if options.blendToFireball: fireballFrameDict = icebridge_common.getCorrectedFireballDems(options.outputFolder) lidarFolder = icebridge_common.getLidarFolder(options.outputFolder) threadText = '' if options.numThreads: threadText = '--threads ' + str(options.numThreads) redo = False suppressOutput = True taskHandles = [] if options.numProcesses > 1: pool = multiprocessing.Pool(options.numProcesses) # Bound the frames sortedFrames = sorted(orthoFrameDict.keys()) if len(sortedFrames) > 0: if options.startFrame < sortedFrames[0]: options.startFrame = sortedFrames[0] if options.stopFrame > sortedFrames[-1] + 1: options.stopFrame = sortedFrames[-1] + 1 else: # No ortho files, that means nothing to do options.startFrame = 0 options.stopFrame = 0 for frame in range(options.startFrame, options.stopFrame): if not frame in orthoFrameDict: logger.info("Error: Missing ortho file for frame: " + str(frame) + ".") continue orthoFile = orthoFrameDict[frame] try: lidarFile = icebridge_common.findMatchingLidarFile(orthoFile, lidarFolder) except: # Skip if no lidar file matches this frame continue fireballDEM = "" if options.blendToFireball: if frame in fireballFrameDict: fireballDEM = fireballFrameDict[frame] else: logger.info("No fireball DEM for frame: " + str(frame)) args = (frame, processFolder, lidarFile, fireballDEM, options, threadText, redo, suppressOutput) # Run things sequentially if only one process, to make it easy to debug if options.numProcesses > 1: taskHandles.append(pool.apply_async(runBlend, args)) else: runBlend(*args) if options.numProcesses > 1: icebridge_common.waitForTaskCompletionOrKeypress(taskHandles, logger, interactive = False, quitKey='q', sleepTime=20) icebridge_common.stopTaskPool(pool)
def main(argsIn): try: # Sample usage: # python full_processing_script.py \ # --yyyymmdd 20091016 --site AN --num-processes 1 --num-threads 12 --bundle-length 12 \ # --start-frame 350 --stop-frame 353 --skip-validate \ # --camera-calibration-folder camera_calib \ # --reference-dem-folder ref_dem_folder # An output folder will be crated automatically (with a name like # AN_20091016), or its name can be specified via the --output-folder # option. usage = '''full_processing_script.py <options>''' parser = argparse.ArgumentParser(usage=usage) # Run selection parser.add_argument( "--yyyymmdd", dest="yyyymmdd", required=True, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument( "--site", dest="site", required=True, help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--output-folder", dest="outputFolder", default=None, help="Name of the output folder. If not specified, " + \ "use something like AN_YYYYMMDD.") parser.add_argument("--camera-lookup-file", dest="cameraLookupFile", default=None, help="The file to use to find which camera was used for which " + \ "flight. By default it is in the same directory as this script " + \ "and named camera_lookup.txt.") # Processing options parser.add_argument('--bundle-length', dest='bundleLength', default=2, type=int, help="The number of images to bundle adjust and process " + \ "in a single batch.") # TODO: Compute this automatically?? parser.add_argument('--overlap-limit', dest='overlapLimit', default=2, type=int, help="The number of images to treat as overlapping for " + \ "bundle adjustment.") parser.add_argument( '--max-overlap-ratio', dest='maxOverlapRatio', default=0.85, type=float, help= 'The maximum ratio of overlap between images to be accepted as part of a stereo pair. When floating intrinsics, this will be set to 1, to not upset some bookkeeping.' ) parser.add_argument( '--stereo-arguments', dest='stereoArgs', # set --min-xcorr-level 0 to do the left-to-right # and right-to-left consistency check at the lowest level. default='--stereo-algorithm 2 --min-xcorr-level 0', help='Extra arguments to pass to stereo.') parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument('--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on.') parser.add_argument( '--frames-file', dest='framesFile', default="", help= 'Specific frames to run ortho2pinhole on within this frame range.') parser.add_argument('--max-num-lidar-to-fetch', dest='maxNumLidarToFetch', default=None, type=int, help="The maximum number of lidar files to fetch. " + \ "This is used in debugging.") parser.add_argument("--camera-calibration-folder", dest="inputCalFolder", default=None, help="The folder containing camera calibration.") parser.add_argument( "--input-calibration-camera", dest="inputCalCamera", default="", help= "Instead of looking up the calibrated camera in the calibration folder, use this one." ) parser.add_argument( "--output-calibration-camera", dest="outputCalCamera", default="", help= "If specified, float the intrinsics and write the optimized model here." ) parser.add_argument( "--output-model-type", dest="outputModelType", default="RPC", help="Generate a distortion model of type RPC, RPC5, or RPC6.") parser.add_argument( "--reference-dem-folder", dest="refDemFolder", default=None, help="The folder containing DEMs that created orthoimages.") parser.add_argument("--processing-subfolder", dest="processingSubfolder", default=None, help="Specify a subfolder name where the processing outputs will go. " + \ "fault is no additional folder") parser.add_argument( "--simple-cameras", action="store_true", dest="simpleCameras", default=False, help="Don't use orthoimages to refine the camera models.") # This option is only needed when generating camera models from the nav files. parser.add_argument( '--camera-mounting', default=0, dest='cameraMounting', type=int, help= '0=right-forwards, 1=left-forwards, 2=top-forwards, 3=bottom-forwards.' ) # Performance options parser.add_argument( '--num-processes', dest='numProcesses', default=1, type=int, help='The number of simultaneous processes to run.') parser.add_argument( '--num-ortho-processes', dest='numOrthoProcesses', default=-1, type=int, help='The number of simultaneous ortho processes to run.') parser.add_argument('--num-threads', dest='numThreads', default=8, type=int, help='The number of threads per process.') # Action control parser.add_argument("--skip-fetch", action="store_true", dest="noFetch", default=False, help="Skip data fetching.") parser.add_argument("--skip-convert", action="store_true", dest="noConvert", default=False, help="Skip data conversion.") parser.add_argument("--stop-after-fetch", action="store_true", dest="stopAfterFetch", default=False, help="Stop program after data fetching.") parser.add_argument("--stop-after-convert", action="store_true", dest="stopAfterConvert", default=False, help="Stop program after data conversion.") parser.add_argument("--skip-validate", action="store_true", dest="skipValidate", default=False, help="Skip input data validation.") parser.add_argument("--ignore-missing-lidar", action="store_true", dest="ignoreMissingLidar", default=False, help="Keep going if the lidar is missing.") parser.add_argument( "--log-batches", action="store_true", dest="logBatches", default=False, help="Log the required batch commands without running them.") parser.add_argument( '--cleanup', action='store_true', default=False, dest='cleanup', help='If the final result is produced delete intermediate files.') parser.add_argument( '--many-ip', action='store_true', default=False, dest='manyip', help= 'If to use a lot of IP in bundle adjustment from the beginning.') parser.add_argument( "--dry-run", action="store_true", dest="dryRun", default=False, help= "Set up the input directories but do not fetch/process any imagery." ) parser.add_argument("--refetch", action="store_true", dest="reFetch", default=False, help="Try fetching again if some files turned out invalid " + \ "during conversions.") parser.add_argument("--refetch-index", action="store_true", dest="refetchIndex", default=False, help="Force refetch of the index file.") parser.add_argument("--refetch-nav", action="store_true", dest="refetchNav", default=False, help="Force refetch of the nav file.") parser.add_argument("--stop-after-index-fetch", action="store_true", dest="stopAfterIndexFetch", default=False, help="Stop after fetching the indices.") parser.add_argument("--no-nav", action="store_true", dest="noNavFetch", default=False, help="Don't fetch or convert the nav data.") parser.add_argument("--no-lidar-convert", action="store_true", dest="noLidarConvert", default=False, help="Skip lidar files in the conversion step.") parser.add_argument( "--no-ortho-convert", action="store_true", dest="noOrthoConvert", default=False, help="Skip generating camera models in the conversion step.") parser.add_argument("--skip-fast-conversions", action="store_true", dest="skipFastConvert", default=False, help="Skips all non-ortho conversions.") options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) icebridge_common.switchWorkDir() if options.numOrthoProcesses < 0: options.numOrthoProcesses = options.numProcesses isSouth = icebridge_common.checkSite(options.site) # Turned off elevation limits here since they are being set from LIDAR data. ## Add the site based elevation limits to the stereoArgs option #altLimits = icebridge_common.getElevationLimits(options.site) #options.stereoArgs = (' %s --elevation-limit %f %f ' # % (options.stereoArgs, altLimits[0], altLimits[1])) options.stereoArgs = (' %s ' % (options.stereoArgs)) if options.cameraLookupFile is None: options.cameraLookupFile = P.join(basepath, 'camera_lookup.txt') if not os.path.isfile(options.cameraLookupFile): raise Exception("Can't find camera file: " + options.cameraLookupFile) if len(options.yyyymmdd) != 8 and len(options.yyyymmdd) != 9: # Make an exception for 20100422a raise Exception("The --yyyymmdd field must have length 8 or 9.") if options.outputFolder is None: options.outputFolder = icebridge_common.outputFolder( options.site, options.yyyymmdd) if options.stopAfterIndexFetch: options.stopAfterFetch = True os.system('mkdir -p ' + options.outputFolder) logLevel = logging.INFO # Record everything logger = icebridge_common.setUpLogger(options.outputFolder, logLevel, 'icebridge_processing_log_frames_' + \ str(options.startFrame) + "_" + str(options.stopFrame)) # Make sure we later know what we were doing logger.info("full_processing_script.py " + " ".join(argsIn)) (out, err, status) = asp_system_utils.executeCommand(['uname', '-a'], suppressOutput=True) logger.info("Running on machine: " + out) logger.info("Work dir is " + os.getcwd()) os.system("ulimit -c 0") # disable core dumps os.system("umask 022") # enforce files be readable by others # Perform some input checks and initializations # These are not needed unless cameras are initialized if options.inputCalFolder is None or not os.path.exists( options.inputCalFolder): raise Exception("Missing camera calibration folder.") if options.refDemFolder is None or not os.path.exists( options.refDemFolder): raise Exception("Missing reference DEM folder.") refDemName = icebridge_common.getReferenceDemName(options.site) refDemPath = os.path.join(options.refDemFolder, refDemName) if not os.path.exists(refDemPath): raise Exception("Missing reference DEM: " + refDemPath) # TODO: CLEAN UP!!! # Set up the output folders cameraFolder = icebridge_common.getCameraFolder(options.outputFolder) imageFolder = icebridge_common.getImageFolder(options.outputFolder) jpegFolder = icebridge_common.getJpegFolder(options.outputFolder) orthoFolder = icebridge_common.getOrthoFolder(options.outputFolder) fireballFolder = icebridge_common.getFireballFolder(options.outputFolder) corrFireballFolder = icebridge_common.getCorrFireballFolder( options.outputFolder) lidarFolder = icebridge_common.getLidarFolder(options.outputFolder) navFolder = icebridge_common.getNavFolder(options.outputFolder) navCameraFolder = icebridge_common.getNavCameraFolder(options.outputFolder) processedFolder = icebridge_common.getProcessedFolder(options.outputFolder) if options.outputCalCamera != "": if options.maxOverlapRatio < 1: raise Exception ("For optimizing intrinsics, must set --max-overlap-ratio to 1, " + \ "to always use consecutive frames.") # Prepare to solve for intrinsics. Note that this modifies some things along the way. (options, cameraFolder, navCameraFolder, processedFolder) = \ solveIntrinsics_Part1(options, jpegFolder, cameraFolder, navCameraFolder, processedFolder, logger) # Handle subfolder option. This is useful for comparing results with different parameters! if options.processingSubfolder: processedFolder = os.path.join(processedFolder, options.processingSubfolder) logger.info('Will write to processing subfolder: ' + options.processingSubfolder) # If something failed in the first attempt either in fetch or in # convert, we will wipe bad files, and try to refetch/re-convert. numAttempts = 1 if options.reFetch and (not options.noFetch): numAttempts = 2 for attempt in range(numAttempts): if numAttempts > 1: logger.info("Fetch/convert attempt: " + str(attempt + 1)) ans = runFetchConvert(options, isSouth, cameraFolder, imageFolder, jpegFolder, orthoFolder, fireballFolder, corrFireballFolder, lidarFolder, processedFolder, navFolder, navCameraFolder, refDemPath, logger) if ans == 0: break if options.stopAfterFetch or options.dryRun or options.stopAfterConvert: logger.info('Fetch/convert finished!') return 0 # Call the processing routine processTheRun(options, imageFolder, cameraFolder, lidarFolder, orthoFolder, corrFireballFolder, processedFolder, isSouth, refDemPath) if options.outputCalCamera != "": # Finish solving for intrinscs. solveIntrinsics_Part2(options, imageFolder, cameraFolder, lidarFolder, orthoFolder, processedFolder, isSouth, logger)
def main(argsIn): try: usage = '''usage: push_to_nsidc.py <options> ''' parser = argparse.ArgumentParser(usage=usage) parser.add_argument( "--yyyymmdd", dest="yyyymmdd", default="", help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument( "--site", dest="site", default="", help="Name of the location of the images (AN, GR, or AL)") parser.add_argument( "--site_yyyymmdd", dest="site_yyyymmdd", default="", help= "A value like GR_20150330, which will be split into site and yyyymmdd by this script." ) parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument('--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on.') parser.add_argument("--camera-calibration-folder", dest="inputCalFolder", default=None, help="The folder containing camera calibration.") parser.add_argument( "--reference-dem-folder", dest="refDemFolder", default=None, help="The folder containing DEMs that created orthoimages.") parser.add_argument("--login-info", dest="loginInfo", default=None, help="user,password destination.nsidc.org.") parser.add_argument("--done-file", dest="doneFile", default=None, help="List of runs that were done by now.") options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) # parse --site_yyyymmdd. Sometimes it is easier to pass this than # to pass separately --site and --yyyymmdd. m = re.match('^(\w+)_(\w+)', options.site_yyyymmdd) if m: options.site = m.group(1) options.yyyymmdd = m.group(2) else: options.site_yyyymmdd = options.site + "_" + options.yyyymmdd # Read the done file and exit if the current flight is done done = set() if options.doneFile != "": with open(options.doneFile, 'r') as f: for val in f: val = val.strip() done.add(val) if options.site_yyyymmdd in done: print("Skipping done flight: " + options.site_yyyymmdd) return 0 run = run_helper.RunHelper(options.site, options.yyyymmdd, os.getcwd()) # Set up logging in the run directory. Log outside of the run dir, # as that one we will wipe logFolder = os.path.abspath( os.path.join(run.getFolder(), '..', 'push_logs')) os.system('mkdir -p ' + logFolder) logLevel = logging.INFO logger = icebridge_common.setUpLogger(logFolder, logLevel, "push") logger.info("Logging in: " + logFolder) # Check the lftp version. On some machines it is too old. (out, err, status) = asp_system_utils.executeCommand(['lftp', '--version'], suppressOutput=True) m = re.match('^.*?LFTP\s+\|\s+Version\s+4.5', out) if not m: raise Exception('Expecting LFTP version 4.5.') else: logger.info("Found an acceptable version of LFTP.") pushByType(run, options, logger, 'DEM') #pushByType(run, options, logger, 'ORTHO') # need to wait for format decision # Wipe at the end cmd = "rm -rf " + run.getFolder() logger.info(cmd) os.system(cmd)
def main(argsIn): try: usage = '''usage: regenerate_summary_images.py <options> ''' parser = argparse.ArgumentParser(usage=usage) parser.add_argument("--data-folder", dest="dataFolder", help="Where all the inputs and outputs are stored.") parser.add_argument("--work-folder", dest="workFolder", help="Where working files are stored.") parser.add_argument("--site", dest="site", help="Site code.") parser.add_argument("--yyyymmdd", dest="yyyymmdd", help="Date.") #parser.add_argument("--dem-tarball", dest="demTarball", default=os.getcwd(), # help="Where all the inputs and outputs are stored.") #parser.add_argument("--ortho-tarball", dest="orthoTarball", default=None, # help="Where to unpack the data.") #parser.add_argument("--summary-tarball", dest="summaryTarball", default=None, # help="Where to unpack the data.") #parser.add_argument("--unpack-dir", dest="unpackDir", default=None, # help="Where to unpack the data.") parser.add_argument("--node-type", dest="nodeType", default='san', help="Node type to use (wes[mfe], san, ivy, has, bro)") #parser.add_argument("--skip-archive-summary", action="store_true", # dest="skipArchiveSummary", default=False, # help="Skip archiving the summary.") # Debug option parser.add_argument('--minutes-in-devel-queue', dest='minutesInDevelQueue', type=int, default=0, help="If positive, submit to the devel queue for this many minutes.") options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) # Check if we are on the right machine (host, err, status) = asp_system_utils.executeCommand(['uname', '-n'], suppressOutput = True) host = host.strip() if 'pfe' in host and options.nodeType not in PFE_NODES: raise Exception("From machine " + host + " can only launch on: " + " ".join(PFE_NODES)) if 'mfe' in host and options.nodeType != 'wes': raise Exception("From machine " + host + " can only launch on: wes") # Make sure our paths will work when called from PBS options.dataFolder= os.path.abspath(options.dataFolder) #os.system('mkdir -p ' + options.unpackDir) # TODO: Check folders! run = run_helper.RunHelper(options.site, options.yyyymmdd, options.workFolder) runFolder = os.path.join(options.workFolder, str(run)) os.system('mkdir -p ' + runFolder) logFolder = os.path.join(runFolder, 'logs') # Set up logging in the run directory os.system('mkdir -p ' + logFolder) logLevel = logging.INFO logger = icebridge_common.setUpLogger(logFolder, logLevel, icebridge_common.manager_log_prefix()) logger.info("Logging in: " + logFolder) checkRequiredTools() # Make sure all the needed tools can be found before we start logger.info("Disabling core dumps.") # these just take a lot of room os.system("ulimit -c 0") os.system("umask 022") # enforce files be readable by others # See how many hours we used so far. I think this counter gets updated once a day. (out, err, status) = asp_system_utils.executeCommand("acct_ytd", outputPath = None, suppressOutput = True, redo = True, noThrow = True) logger.info("Hours used so far:\n" + out + '\n' + err) try: # Fetch and extract the tarball files from Lou localDemFolder = os.path.join(options.dataFolder, run.name()+'_dems') localOrthoFolder = os.path.join(options.dataFolder, run.name()+'_orthos') demSummaryFolder = os.path.join(options.dataFolder, run.name()+'_dem_summaries') orthoSummaryFolder = os.path.join(options.dataFolder, run.name()+'_ortho_summaries') missingDemFiles = [] missingOrthoFiles = [] for f in os.listdir(localDemFolder): if 'temp' in f: raise Exception('Bad file: ' + f) if ('IODEM3' in f) and (f[-4:] == '.tif'): inputPath = os.path.join(localDemFolder, f) outputPath = os.path.join(demSummaryFolder, f.replace('DEM.tif', 'DEM_browse.tif')) if not os.path.exists(outputPath): missingDemFiles.append((inputPath, outputPath)) for f in os.listdir(localOrthoFolder): if 'temp' in f: raise Exception('Bad file: ' + f) if ('IODIM3' in f) and (f[-4:] == '.tif'): inputPath = os.path.join(localOrthoFolder, f) outputPath = os.path.join(orthoSummaryFolder, f.replace('ORTHO.tif', 'ORTHO.jpg')) if not os.path.exists(outputPath): missingOrthoFiles.append((inputPath, outputPath)) #print 'Fetching and unpacking tarballs...' #fetchTarball(options.demTarball, localDemFolder) #fetchTarball(options.orthoTarball, localOrthoFolder) #fetchTarball(options.summaryTarball, localSummaryFolder) # If the summary tarball unpacked to []/summary/summary, # work with the lower level folder from now on. #localSummaryFolder = descendIfNeeded(localSummaryFolder) # Make a list of all input files that are missing their summary file, and # the desired output path for that file. #missingDemFiles = getMissingSummaryFiles(localDemFolder, localSummaryFolder, isOrtho=False) #missingOrthoFiles = getMissingSummaryFiles(localOrthoFolder, localSummaryFolder, isOrtho=True ) # Divide this list into chunks and for each chunk generate a file containing all of # the gdal_translate commands that need to be executed. print 'Writing command files...' commandFileLength = getParallelParams(options.nodeType)[2] commandFilePrefix = os.path.join(runFolder, 'convert_commands_') print 'Clearing existing command files.' os.system('rm ' + commandFilePrefix + '*') commandFileList = writeCommandFiles(missingDemFiles, missingOrthoFiles, commandFilePrefix, commandFileLength) #raise Exception('DEBUG') # Get the location to store the logs pbsLogFolder = run.getPbsLogFolder() logger.info("Storing logs in: " + pbsLogFolder) os.system('mkdir -p ' + pbsLogFolder) # Call multi_process_command_runner.py through PBS for each chunk. start_time() (baseName, jobIDs) = submitBatchJobs(commandFileList, options, pbsLogFolder, run, logger) # Wait for everything to finish. pbs_functions.waitForJobCompletion(jobIDs, logger, baseName) stop_time("pbs_jobs", logger) # Check that we now have all of the summary files. # - Both of these should now be empty. #newMissingDemFiles = getMissingSummaryFiles(localDemFolder, demSummaryFolder, isOrtho=False) #newMissingOrthoFiles = getMissingSummaryFiles(localOrthoFolder, orthoSummaryFolder, isOrtho=True ) numDemsMissing = 0 numOrthosMissing = 0 for pair in missingDemFiles: if not os.path.exists(pair[1]): numDemsMissing += 1 for pair in missingOrthoFiles: if not os.path.exists(pair[1]): numOrthosMissing += 1 resultText = ('After regeneration, missing %d DEM summaries and %d ORTHO summaries' % (numDemsMissing, numOrthosMissing)) logger.info(resultText) runWasSuccess = ((numDemsMissing == 0) and (numOrthosMissing == 0)) # If successful, create a new tarball and send it to Lou. #if runWasSuccess and (not options.skipArchiveSummary): # start_time() # archive_functions.packAndSendSummaryFolder(run, localSummaryFolder, logger) # stop_time("archive summary", logger) except Exception as e: resultText = 'Caught exception: ' + str(e) + '\n' + traceback.format_exc() runWasSuccess = False # Send a summary email. emailAddress = getEmailAddress(icebridge_common.getUser()) logger.info("Sending email to: " + emailAddress) if runWasSuccess: sendEmail(emailAddress, 'OIB summary regen passed', resultText) else: sendEmail(emailAddress, '"OIB summary regen failed', resultText) # TODO: Add automated delete command! #if options.wipeProcessed: # processedFolder = run.getProcessFolder() # logger.info("Will delete: " + processedFolder) # os.system("rm -rf " + processedFolder) logger.info('==== regenerate_summary_images script has finished for run: ' + str(run) + ' ====')
def main(argsIn): try: # Sample usage: # python fetcher_script.py \ # --yyyymmdd 20091016 --site AN --start-frame 350 --stop-frame 353 --skip-validate # An output folder will be crated automatically (with a name like # AN_20091016), or its name can be specified via the --output-folder # option. usage = '''usage: fetcher_script.py <options>''' parser = optparse.OptionParser(usage=usage) # Run selection parser.add_option("--yyyymmdd", dest="yyyymmdd", default=None, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_option("--site", dest="site", default=None, help="Name of the location of the images (AN, GR, or AL)") parser.add_option("--camera-calibration-folder", dest="inputCalFolder", default=None, help="The folder containing camera calibration.") parser.add_option("--reference-dem-folder", dest="refDemFolder", default=None, help="The folder containing DEMs that created orthoimages.") # Python treats numbers starting with 0 as being in octal rather than decimal. # Ridiculous. So read them as strings and convert to int. parser.add_option('--start-frame', dest='startFrameStr', default=None, help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_option('--stop-frame', dest='stopFrameStr', default=None, help='Frame to stop on.') parser.add_option('--max-num-lidar-to-fetch', dest='maxNumLidarToFetch', default=100000000, type='int', help='The maximum number of lidar files to fetch. ' + \ 'This is used in debugging.') parser.add_option("--skip-validate", action="store_true", dest="skipValidate", default=False, help="Skip input data validation.") parser.add_option("--ignore-missing-lidar", action="store_true", dest="ignoreMissingLidar", default=False, help="Keep going if the lidar is missing.") parser.add_option("--no-lidar-convert", action="store_true", dest="noLidarConvert", default=False, help="Skip lidar files in the conversion step.") parser.add_option("--no-nav", action="store_true", dest="noNav", default=False, help="Skip dealing with raw nav data.") parser.add_option("--skip-processing", action="store_true", dest="skipProcessing", default=False, help="Skip fetch, validate, and convert. Assume all data is ready.") parser.add_option("--refetch-index", action="store_true", dest="refetchIndex", default=False, help="Force refetch of the index file.") parser.add_option("--stop-after-index-fetch", action="store_true", dest="stopAfterIndexFetch", default=False, help="Stop after fetching the indices.") parser.add_option("--tar", action="store_true", dest="tar", default=False, help="After fetching all data and performing all conversions and " + \ "validations, make a tarball on lou. Only valid on Pleiades!") parser.add_option("--wipe", action="store_true", dest="wipe", default=False, help="After making a tarball with --tar, wipe the directory. ") parser.add_option("--start-with-lou-archive", action="store_true", dest="startWithLouArchive", default=False, help="Untar an existing archive from lou, then continue.") (options, args) = parser.parse_args(argsIn) except optparse.OptionError as msg: raise Usage(msg) if options.yyyymmdd is None or options.site is None: print("The flight date and site must be specified.") return -1 options.outputFolder = icebridge_common.outputFolder(options.site, options.yyyymmdd) os.system('mkdir -p ' + options.outputFolder) logLevel = logging.INFO logger = icebridge_common.setUpLogger(options.outputFolder, logLevel, 'icebridge_fetcher_log') # Explicitely go from strings to integers, per earlier note. if options.startFrameStr is not None: startFrame = int(options.startFrameStr) else: startFrame = icebridge_common.getSmallestFrame() if options.stopFrameStr is not None: stopFrame = int(options.stopFrameStr) else: stopFrame = icebridge_common.getLargestFrame() # Unarchive, then continue with fetching if options.startWithLouArchive: start_time() startWithLouArchive(options, logger) stop_time("fetch_from_lou", logger) cmd = (('--yyyymmdd %s --site %s --start-frame %d --stop-frame %d ' + '--max-num-lidar-to-fetch %d --stop-after-convert --no-ortho-convert --refetch') % (options.yyyymmdd, options.site, startFrame, stopFrame, options.maxNumLidarToFetch)) if options.refetchIndex: cmd += ' --refetch-index' # this was not right in older fetched runs if options.stopAfterIndexFetch: cmd += ' --stop-after-index-fetch' if options.skipValidate: cmd += ' --skip-validate' if options.ignoreMissingLidar: cmd += ' --ignore-missing-lidar' if options.noLidarConvert: cmd += ' --no-lidar-convert' if options.noNav: cmd += ' --no-nav' if options.inputCalFolder is not None: cmd += ' --camera-calibration-folder ' + options.inputCalFolder if options.refDemFolder is not None: cmd += ' --reference-dem-folder ' + options.refDemFolder if not options.skipProcessing: # Refetch all nav stuff, as it was unreliable in the past navFolder = icebridge_common.getNavFolder(options.outputFolder) navCameraFolder = icebridge_common.getNavCameraFolder(options.outputFolder) if os.path.exists(navFolder): logger.info("Wiping: " + navFolder) os.system('rm -rfv ' + navFolder) if os.path.exists(navCameraFolder): logger.info("Wiping: " + navCameraFolder) os.system('rm -rfv ' + navCameraFolder) # Wipe processed lidar, as sometimes errors crept in. logger.info("Wiping processed lidar:") lidarFolder = icebridge_common.getLidarFolder(options.outputFolder) if os.path.exists(lidarFolder): os.system('rm -fv ' + lidarFolder + '/*csv') pairedFolder = icebridge_common.getPairedLidarFolder(lidarFolder) if os.path.exists(pairedFolder): os.system('rm -rfv ' + pairedFolder) logger.info("full_processing_script.py " + cmd) start_time() if full_processing_script.main(cmd.split()) < 0: return -1 stop_time("fetch_validate", logger) # Archive after fetching if options.tar: tarAndWipe(options, logger)
def main(argsIn): try: # Sample usage: # python fetcher_script.py \ # --yyyymmdd 20091016 --site AN --start-frame 350 --stop-frame 353 --skip-validate # An output folder will be crated automatically (with a name like # AN_20091016), or its name can be specified via the --output-folder # option. usage = '''usage: fetcher_script.py <options>''' parser = optparse.OptionParser(usage=usage) # Run selection parser.add_option( "--yyyymmdd", dest="yyyymmdd", default=None, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_option( "--site", dest="site", default=None, help="Name of the location of the images (AN, GR, or AL)") parser.add_option("--camera-calibration-folder", dest="inputCalFolder", default=None, help="The folder containing camera calibration.") parser.add_option( "--reference-dem-folder", dest="refDemFolder", default=None, help="The folder containing DEMs that created orthoimages.") # Python treats numbers starting with 0 as being in octal rather than decimal. # Ridiculous. So read them as strings and convert to int. parser.add_option('--start-frame', dest='startFrameStr', default=None, help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_option('--stop-frame', dest='stopFrameStr', default=None, help='Frame to stop on.') parser.add_option('--max-num-lidar-to-fetch', dest='maxNumLidarToFetch', default=100000000, type='int', help='The maximum number of lidar files to fetch. ' + \ 'This is used in debugging.') parser.add_option("--skip-validate", action="store_true", dest="skipValidate", default=False, help="Skip input data validation.") parser.add_option("--ignore-missing-lidar", action="store_true", dest="ignoreMissingLidar", default=False, help="Keep going if the lidar is missing.") parser.add_option("--no-lidar-convert", action="store_true", dest="noLidarConvert", default=False, help="Skip lidar files in the conversion step.") parser.add_option("--no-nav", action="store_true", dest="noNav", default=False, help="Skip dealing with raw nav data.") parser.add_option( "--skip-processing", action="store_true", dest="skipProcessing", default=False, help="Skip fetch, validate, and convert. Assume all data is ready." ) parser.add_option("--refetch-index", action="store_true", dest="refetchIndex", default=False, help="Force refetch of the index file.") parser.add_option("--stop-after-index-fetch", action="store_true", dest="stopAfterIndexFetch", default=False, help="Stop after fetching the indices.") parser.add_option("--tar", action="store_true", dest="tar", default=False, help="After fetching all data and performing all conversions and " + \ "validations, make a tarball on lou. Only valid on Pleiades!") parser.add_option( "--wipe", action="store_true", dest="wipe", default=False, help="After making a tarball with --tar, wipe the directory. ") parser.add_option( "--start-with-lou-archive", action="store_true", dest="startWithLouArchive", default=False, help="Untar an existing archive from lou, then continue.") (options, args) = parser.parse_args(argsIn) except optparse.OptionError as msg: raise Usage(msg) if options.yyyymmdd is None or options.site is None: print("The flight date and site must be specified.") return -1 options.outputFolder = icebridge_common.outputFolder( options.site, options.yyyymmdd) os.system('mkdir -p ' + options.outputFolder) logLevel = logging.INFO logger = icebridge_common.setUpLogger(options.outputFolder, logLevel, 'icebridge_fetcher_log') # Explicitely go from strings to integers, per earlier note. if options.startFrameStr is not None: startFrame = int(options.startFrameStr) else: startFrame = icebridge_common.getSmallestFrame() if options.stopFrameStr is not None: stopFrame = int(options.stopFrameStr) else: stopFrame = icebridge_common.getLargestFrame() # Unarchive, then continue with fetching if options.startWithLouArchive: start_time() startWithLouArchive(options, logger) stop_time("fetch_from_lou", logger) cmd = (( '--yyyymmdd %s --site %s --start-frame %d --stop-frame %d ' + '--max-num-lidar-to-fetch %d --stop-after-convert --no-ortho-convert --refetch' ) % (options.yyyymmdd, options.site, startFrame, stopFrame, options.maxNumLidarToFetch)) if options.refetchIndex: cmd += ' --refetch-index' # this was not right in older fetched runs if options.stopAfterIndexFetch: cmd += ' --stop-after-index-fetch' if options.skipValidate: cmd += ' --skip-validate' if options.ignoreMissingLidar: cmd += ' --ignore-missing-lidar' if options.noLidarConvert: cmd += ' --no-lidar-convert' if options.noNav: cmd += ' --no-nav' if options.inputCalFolder is not None: cmd += ' --camera-calibration-folder ' + options.inputCalFolder if options.refDemFolder is not None: cmd += ' --reference-dem-folder ' + options.refDemFolder if not options.skipProcessing: # Refetch all nav stuff, as it was unreliable in the past navFolder = icebridge_common.getNavFolder(options.outputFolder) navCameraFolder = icebridge_common.getNavCameraFolder( options.outputFolder) if os.path.exists(navFolder): logger.info("Wiping: " + navFolder) os.system('rm -rfv ' + navFolder) if os.path.exists(navCameraFolder): logger.info("Wiping: " + navCameraFolder) os.system('rm -rfv ' + navCameraFolder) # Wipe processed lidar, as sometimes errors crept in. logger.info("Wiping processed lidar:") lidarFolder = icebridge_common.getLidarFolder(options.outputFolder) if os.path.exists(lidarFolder): os.system('rm -fv ' + lidarFolder + '/*csv') pairedFolder = icebridge_common.getPairedLidarFolder(lidarFolder) if os.path.exists(pairedFolder): os.system('rm -rfv ' + pairedFolder) logger.info("full_processing_script.py " + cmd) start_time() if full_processing_script.main(cmd.split()) < 0: return -1 stop_time("fetch_validate", logger) # Archive after fetching if options.tar: tarAndWipe(options, logger)
def main(argsIn): # Command line parsing try: usage = "usage: camera_models_from_nav.py <image_folder> <ortho_folder> <cal_folder> <nav_folder> <output_folder> [options]" parser = optparse.OptionParser(usage=usage) parser.add_option('--start-frame', dest='startFrame', default=-1, type='int', help='The frame number to start processing with.') parser.add_option('--stop-frame', dest='stopFrame', default=999999, type='int', help='The frame number to finish processing with.') parser.add_option("--input-calibration-camera", dest="inputCalCamera", default="", help="Use this input calibrated camera.") parser.add_option( '--camera-mounting', dest='cameraMounting', default=0, type='int', help= '0=right-forwards, 1=left-forwards, 2=top-forwards, 3=bottom-forwards.' ) (options, args) = parser.parse_args(argsIn) if len(args) < 5: print('Error: Missing arguments.') print(usage) return -1 imageFolder = os.path.abspath(args[0]) orthoFolder = os.path.abspath(args[1]) calFolder = os.path.abspath(args[2]) navFolder = os.path.abspath(args[3]) outputFolder = os.path.abspath(args[4]) except optparse.OptionError as msg: raise Usage(msg) runDir = os.path.dirname(orthoFolder) os.system("mkdir -p " + runDir) logLevel = logging.INFO # Make this an option?? logger = icebridge_common.setUpLogger(runDir, logLevel, 'camera_models_from_nav_log') if not os.path.exists(orthoFolder): logger.error('Ortho folder ' + orthoFolder + ' does not exist!') return -1 # Find the nav file # - There should only be one or two nav files per flight. fileList = os.listdir(navFolder) fileList = [x for x in fileList if '.out' in x] if len(fileList) == 0: logger.error('No nav files in: ' + navFolder) return -1 navPath = os.path.join(navFolder, fileList[0]) parsedNavPath = navPath.replace('.out', '.txt') if not asp_file_utils.fileIsNonZero(navPath): logger.error('Nav file ' + navPath + ' is invalid!') return -1 # Create the output file only if it is empty or does not exist isNonEmpty = asp_file_utils.fileIsNonZero(parsedNavPath) if not isNonEmpty: # Initialize the output file as being empty logger.info("Create empty file: " + parsedNavPath) open(parsedNavPath, 'w').close() # Append to the output parsed nav file for fileName in fileList: # Convert the nav file from binary to text navPath = os.path.join(navFolder, fileName) with open(navPath, 'r') as f: try: text = f.readline() if 'HTML' in text: # Sometimes the server is down, and instead of the binary nav file # we are given an html file with an error message. logger.info("Have invalid nav file: " + navPath) return -1 # Die in this case! except UnicodeDecodeError as e: # Got a binary file, that means likely we are good pass cmd = asp_system_utils.which( 'sbet2txt.pl') + ' -q ' + navPath + ' >> ' + parsedNavPath logger.info(cmd) if not isNonEmpty: os.system(cmd) cameraPath = options.inputCalCamera if cameraPath == "": # No input camera file provided, look one up. It does not matter much, # as later ortho2pinhole will insert the correct intrinsics. goodFile = False fileList = os.listdir(calFolder) fileList = [x for x in fileList if (('.tsai' in x) and ('~' not in x))] if not fileList: logger.error('Unable to find any camera files in ' + calFolder) return -1 for fileName in fileList: cameraPath = os.path.join(calFolder, fileName) # Check if this path is valid with open(cameraPath, 'r') as f: for line in f: if 'fu' in line: goodFile = True break if goodFile: break # Get the ortho list orthoFiles = icebridge_common.getTifs(orthoFolder) logger.info('Found ' + str(len(orthoFiles)) + ' ortho files.') # Look up the frame numbers for each ortho file infoDict = {} for ortho in orthoFiles: if ('gray' in ortho) or ('sub' in ortho): continue frame = icebridge_common.getFrameNumberFromFilename(ortho) if frame < options.startFrame or frame > options.stopFrame: continue infoDict[frame] = [ortho, ''] # Get the image file list try: imageFiles = icebridge_common.getTifs(imageFolder) except Exception as e: raise Exception( "Cannot continue with nav generation, will resume later when images are created. This is not a fatal error. " + str(e)) logger.info('Found ' + str(len(imageFiles)) + ' image files.') # Update the second part of each dictionary object for image in imageFiles: if ('gray' in image) or ('sub' in image): continue frame = icebridge_common.getFrameNumberFromFilename(image) if frame < options.startFrame or frame > options.stopFrame: continue if frame not in infoDict: logger.info('Image missing ortho file: ' + image) # don't throw here, that will mess the whole batch, we will recover # the missing one later. continue infoDict[frame][1] = image os.system('mkdir -p ' + outputFolder) orthoListFile = os.path.join( outputFolder, 'ortho_file_list_' + str(options.startFrame) + "_" + str(options.stopFrame) + '.csv') # Open the output file for writing logger.info("Writing: " + orthoListFile) with open(orthoListFile, 'w') as outputFile: # Loop through frames in order for key in sorted(infoDict): # Write the ortho name and the output camera name to the file (ortho, image) = infoDict[key] if not image: #raise Exception('Ortho missing image file: ' +ortho) continue camera = image.replace('.tif', '.tsai') outputFile.write(ortho + ', ' + camera + '\n') # Check if we already have all of the output camera files. haveAllFiles = True with open(orthoListFile, 'r') as inputFile: for line in inputFile: parts = line.split(',') camPath = os.path.join(outputFolder, parts[1].strip()) if not asp_file_utils.fileIsNonZero(camPath): logger.info('Missing file -> ' + camPath) haveAllFiles = False break # Call the C++ tool to generate a camera model for each ortho file if not haveAllFiles: cmd = ( 'nav2cam --input-cam %s --nav-file %s --cam-list %s --output-folder %s --camera-mounting %d' % (cameraPath, parsedNavPath, orthoListFile, outputFolder, options.cameraMounting)) logger.info(cmd) os.system(cmd) else: logger.info("All nav files were already generated.") # Generate a kml file for the nav camera files kmlPath = os.path.join(outputFolder, 'nav_cameras.kml') # This is a hack. If we are invoked from a Pleiades node, do not # create this kml file, as nodes will just overwrite each other. # This job may happen anyway earlier or later when on the head node. if not 'PBS_NODEFILE' in os.environ: try: tempPath = os.path.join(outputFolder, 'list.txt') logger.info('Generating nav camera kml file: ' + kmlPath) os.system('ls ' + outputFolder + '/*.tsai > ' + tempPath) orbitviz_pinhole = asp_system_utils.which('orbitviz_pinhole') cmd = orbitviz_pinhole + ' --hide-labels -o ' + kmlPath + ' --input-list ' + tempPath logger.info(cmd) asp_system_utils.executeCommand(cmd, kmlPath, suppressOutput=True, redo=False) os.remove(tempPath) except Exception as e: logger.info("Warning: " + str(e)) logger.info('Finished generating camera models from nav!') return 0
def main(argsIn): try: # Sample usage: # python ~/projects/StereoPipeline/src/asp/IceBridge/gen_ortho.py --site GR \ # --yyyymmdd 20120315 --start-frame 2490 --stop-frame 2491 --bundle-length 2 \ # --num-threads 8 --num-processes 3. usage = '''gen_ortho.py <options>''' parser = argparse.ArgumentParser(usage=usage) # Run selection parser.add_argument("--yyyymmdd", dest="yyyymmdd", required=True, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument("--site", dest="site", required=True, help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--output-folder", dest="outputFolder", default=None, help="Name of the output folder. If not specified, " + \ "use something like AN_YYYYMMDD.") # Processing options parser.add_argument('--bundle-length', dest='bundleLength', default=2, type=int, help="The number of images to bundle adjust and process " + \ "in a single batch.") parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument('--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on. This frame will also be processed.') parser.add_argument('--camera-mounting', dest='cameraMounting', default=0, type=int, help='0=right-forwards, 1=left-forwards, 2=top-forwards, 3=bottom-forwards.') parser.add_argument("--processing-subfolder", dest="processingSubfolder", default=None, help="Specify a subfolder name where the processing outputs will go. "+\ "The default is no additional folder.") # Performance options parser.add_argument('--num-processes', dest='numProcesses', default=1, type=int, help='The number of simultaneous processes to run.') parser.add_argument('--num-threads', dest='numThreads', default=8, type=int, help='The number of threads per process.') options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) icebridge_common.switchWorkDir() if len(options.yyyymmdd) != 8 and len(options.yyyymmdd) != 9: # Make an exception for 20100422a raise Exception("The --yyyymmdd field must have length 8 or 9.") if options.outputFolder is None: options.outputFolder = icebridge_common.outputFolder(options.site, options.yyyymmdd) os.system('mkdir -p ' + options.outputFolder) logLevel = logging.INFO # Make this an option?? logger = icebridge_common.setUpLogger(options.outputFolder, logLevel, 'icebridge_ortho_log') (out, err, status) = asp_system_utils.executeCommand(['uname', '-a'], suppressOutput = True) logger.info("Running on machine: " + out) processFolder = os.path.join(options.outputFolder, 'processed') # Handle subfolder option. This is useful for comparing results with different parameters! if options.processingSubfolder: processFolder = os.path.join(processFolder, options.processingSubfolder) logger.info('Reading from processing subfolder: ' + options.processingSubfolder) jpegFolder = icebridge_common.getJpegFolder(options.outputFolder) jpegIndexPath = icebridge_common.csvIndexFile(jpegFolder) if not os.path.exists(jpegIndexPath): raise Exception("Error: Missing jpeg index file: " + jpegIndexPath + ".") (jpegFrameDict, jpegUrlDict) = icebridge_common.readIndexFile(jpegIndexPath, prependFolder = True) threadText = '' if options.numThreads: threadText = '--threads ' + str(options.numThreads) redo = False suppressOutput = True taskHandles = [] if options.numProcesses > 1: pool = multiprocessing.Pool(options.numProcesses) # Bound the frames sortedFrames = sorted(jpegFrameDict.keys()) if len(sortedFrames) > 0: if options.startFrame < sortedFrames[0]: options.startFrame = sortedFrames[0] if options.stopFrame > sortedFrames[-1] + 1: options.stopFrame = sortedFrames[-1] + 1 else: # No jpeg files, that means nothing to do options.startFrame = 0 options.stopFrame = 0 for frame in range(options.startFrame, options.stopFrame): if not frame in jpegFrameDict: logger.info("Error: Missing jpeg file for frame: " + str(frame) + ".") continue # Find the right image currImage = jpegFrameDict[frame] args = (frame, processFolder, currImage, options.bundleLength, options.cameraMounting, threadText, redo, suppressOutput) # Run things sequentially if only one process, to make it easy to debug if options.numProcesses > 1: taskHandles.append(pool.apply_async(runOrtho, args)) else: runOrtho(*args) if options.numProcesses > 1: icebridge_common.waitForTaskCompletionOrKeypress(taskHandles, logger, interactive = False, quitKey='q', sleepTime=20) icebridge_common.stopTaskPool(pool)
imageFolder = os.path.abspath(args[0]) orthoFolder = os.path.abspath(args[1]) calFolder = os.path.abspath(args[2]) navFolder = os.path.abspath(args[3]) outputFolder = os.path.abspath(args[4]) except optparse.OptionError, msg: raise Usage(msg) runDir = os.path.dirname(orthoFolder) os.system("mkdir -p " + runDir) logLevel = logging.INFO # Make this an option?? logger = icebridge_common.setUpLogger(runDir, logLevel, 'camera_models_from_nav_log') if not os.path.exists(orthoFolder): logger.error('Ortho folder ' + orthoFolder + ' does not exist!') return -1 # Find the nav file # - There should only be one or two nav files per flight. fileList = os.listdir(navFolder) fileList = [x for x in fileList if '.out' in x] if len(fileList) == 0: logger.error('No nav files in: ' + navFolder) return -1 navPath = os.path.join(navFolder, fileList[0]) parsedNavPath = navPath.replace('.out', '.txt')
def main(argsIn): try: usage = '''usage: push_to_nsidc.py <options> ''' parser = argparse.ArgumentParser(usage=usage) parser.add_argument("--yyyymmdd", dest="yyyymmdd", default="", help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument("--site", dest="site", default = "", help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--site_yyyymmdd", dest="site_yyyymmdd", default = "", help="A value like GR_20150330, which will be split into site and yyyymmdd by this script.") parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument('--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on.') parser.add_argument("--camera-calibration-folder", dest="inputCalFolder", default=None, help="The folder containing camera calibration.") parser.add_argument("--reference-dem-folder", dest="refDemFolder", default=None, help="The folder containing DEMs that created orthoimages.") parser.add_argument("--login-info", dest="loginInfo", default=None, help="user,password destination.nsidc.org.") parser.add_argument("--done-file", dest="doneFile", default=None, help="List of runs that were done by now.") options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) # parse --site_yyyymmdd. Sometimes it is easier to pass this than # to pass separately --site and --yyyymmdd. m = re.match('^(\w+)_(\w+)', options.site_yyyymmdd) if m: options.site = m.group(1) options.yyyymmdd = m.group(2) else: options.site_yyyymmdd = options.site + "_" + options.yyyymmdd # Read the done file and exit if the current flight is done done = set() if options.doneFile != "": with open(options.doneFile, 'r') as f: for val in f: val = val.strip() done.add(val) if options.site_yyyymmdd in done: print("Skipping done flight: " + options.site_yyyymmdd) return 0 run = run_helper.RunHelper(options.site, options.yyyymmdd, os.getcwd()) # Set up logging in the run directory. Log outside of the run dir, # as that one we will wipe logFolder = os.path.abspath(os.path.join(run.getFolder(), '..', 'push_logs')) os.system('mkdir -p ' + logFolder) logLevel = logging.INFO logger = icebridge_common.setUpLogger(logFolder, logLevel, "push") logger.info("Logging in: " + logFolder) # Check the lftp version. On some machines it is too old. (out, err, status) = asp_system_utils.executeCommand(['lftp', '--version'], suppressOutput = True) m = re.match('^.*?LFTP\s+\|\s+Version\s+4.5', out) if not m: raise Exception('Expecting LFTP version 4.5.') else: logger.info("Found an acceptable version of LFTP.") pushByType(run, options, logger, 'DEM') #pushByType(run, options, logger, 'ORTHO') # need to wait for format decision # Wipe at the end cmd = "rm -rf " + run.getFolder() logger.info(cmd) os.system(cmd)
except argparse.ArgumentError, msg: parser.error(msg) icebridge_common.switchWorkDir() if len(options.yyyymmdd) != 8 and len(options.yyyymmdd) != 9: # Make an exception for 20100422a raise Exception("The --yyyymmdd field must have length 8 or 9.") if options.outputFolder is None: options.outputFolder = icebridge_common.outputFolder( options.site, options.yyyymmdd) os.system('mkdir -p ' + options.outputFolder) logLevel = logging.INFO # Make this an option?? logger = icebridge_common.setUpLogger(options.outputFolder, logLevel, 'icebridge_ortho_log') (out, err, status) = asp_system_utils.executeCommand(['uname', '-a'], suppressOutput=True) logger.info("Running on machine: " + out) processFolder = os.path.join(options.outputFolder, 'processed') # Handle subfolder option. This is useful for comparing results with different parameters! if options.processingSubfolder: processFolder = os.path.join(processFolder, options.processingSubfolder) logger.info('Reading from processing subfolder: ' + options.processingSubfolder) jpegFolder = icebridge_common.getJpegFolder(options.outputFolder)
def main(argsIn): try: # Sample usage: # python ~/projects/StereoPipeline/src/asp/IceBridge/blend_dems.py --site GR \ # --yyyymmdd 20120315 --start-frame 2490 --stop-frame 2491 --bundle-length 2 \ # --num-threads 8 --num-processes 10 usage = '''blend_dems.py <options>''' parser = argparse.ArgumentParser(usage=usage) # Run selection parser.add_argument( "--yyyymmdd", dest="yyyymmdd", required=True, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument( "--site", dest="site", required=True, help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--output-folder", dest="outputFolder", default=None, help="Name of the output folder. If not specified, " + \ "use something like AN_YYYYMMDD.") # Processing options parser.add_argument('--bundle-length', dest='bundleLength', default=2, type=int, help="The number of images to bundle adjust and process " + \ "in a single batch.") parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument( '--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on. This frame will also be processed.') parser.add_argument("--processing-subfolder", dest="processingSubfolder", default=None, help="Specify a subfolder name where the processing outputs will go. "+\ "The default is no additional folder.") parser.add_argument( "--compute-diff-to-prev-dem", action="store_true", dest="computeDiffToPrev", default=False, help="Compute the absolute difference between the current DEM " + "and the one before it.") parser.add_argument("--blend-to-fireball-footprint", action="store_true", dest="blendToFireball", default=False, help="Create additional blended DEMs having the same " + \ "footprint as Fireball DEMs.") # Performance options parser.add_argument( '--num-processes', dest='numProcesses', default=1, type=int, help='The number of simultaneous processes to run.') parser.add_argument('--num-threads', dest='numThreads', default=8, type=int, help='The number of threads per process.') options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) icebridge_common.switchWorkDir() os.system("ulimit -c 0") # disable core dumps os.system("rm -f core.*") # these keep on popping up os.system("umask 022") # enforce files be readable by others if len(options.yyyymmdd) != 8 and len(options.yyyymmdd) != 9: # Make an exception for 20100422a raise Exception("The --yyyymmdd field must have length 8 or 9.") if options.outputFolder is None: options.outputFolder = icebridge_common.outputFolder( options.site, options.yyyymmdd) os.system('mkdir -p ' + options.outputFolder) logLevel = logging.INFO # Make this an option?? logger = icebridge_common.setUpLogger(options.outputFolder, logLevel, 'icebridge_blend_log') (out, err, status) = asp_system_utils.executeCommand(['uname', '-a'], suppressOutput=True) logger.info("Running on machine: " + out) logger.info(str(argsIn)) processFolder = os.path.join(options.outputFolder, 'processed') # Handle subfolder option. This is useful for comparing results with different parameters! if options.processingSubfolder: processFolder = os.path.join(processFolder, options.processingSubfolder) logger.info('Reading from processing subfolder: ' + options.processingSubfolder) orthoFolder = icebridge_common.getOrthoFolder(options.outputFolder) orthoIndexPath = icebridge_common.csvIndexFile(orthoFolder) if not os.path.exists(orthoIndexPath): raise Exception("Error: Missing ortho index file: " + orthoIndexPath + ".") (orthoFrameDict, orthoUrlDict) = icebridge_common.readIndexFile(orthoIndexPath) if options.blendToFireball: fireballFrameDict = icebridge_common.getCorrectedFireballDems( options.outputFolder) lidarFolder = icebridge_common.getLidarFolder(options.outputFolder) threadText = '' if options.numThreads: threadText = '--threads ' + str(options.numThreads) redo = False suppressOutput = True taskHandles = [] if options.numProcesses > 1: pool = multiprocessing.Pool(options.numProcesses) # Bound the frames sortedFrames = sorted(orthoFrameDict.keys()) if len(sortedFrames) > 0: if options.startFrame < sortedFrames[0]: options.startFrame = sortedFrames[0] if options.stopFrame > sortedFrames[-1] + 1: options.stopFrame = sortedFrames[-1] + 1 else: # No ortho files, that means nothing to do options.startFrame = 0 options.stopFrame = 0 for frame in range(options.startFrame, options.stopFrame): if not frame in orthoFrameDict: logger.info("Error: Missing ortho file for frame: " + str(frame) + ".") continue orthoFile = orthoFrameDict[frame] try: lidarFile = icebridge_common.findMatchingLidarFile( orthoFile, lidarFolder) except: # Skip if no lidar file matches this frame continue fireballDEM = "" if options.blendToFireball: if frame in fireballFrameDict: fireballDEM = fireballFrameDict[frame] else: logger.info("No fireball DEM for frame: " + str(frame)) args = (frame, processFolder, lidarFile, fireballDEM, options, threadText, redo, suppressOutput) # Run things sequentially if only one process, to make it easy to debug if options.numProcesses > 1: taskHandles.append(pool.apply_async(runBlend, args)) else: runBlend(*args) if options.numProcesses > 1: icebridge_common.waitForTaskCompletionOrKeypress(taskHandles, logger, interactive=False, quitKey='q', sleepTime=20) icebridge_common.stopTaskPool(pool)