options.stopFrame = sortedFrames[-1] + 1 else: # No ortho files, that means nothing to do options.startFrame = 0 options.stopFrame = 0 for frame in range(options.startFrame, options.stopFrame): if not frame in orthoFrameDict: logger.info("Error: Missing ortho file for frame: " + str(frame) + ".") continue orthoFile = orthoFrameDict[frame] try: lidarFile = icebridge_common.findMatchingLidarFile( orthoFile, lidarFolder) except: # Skip if no lidar file matches this frame continue fireballDEM = "" if options.blendToFireball: if frame in fireballFrameDict: fireballDEM = fireballFrameDict[frame] else: logger.info("No fireball DEM for frame: " + str(frame)) args = (frame, processFolder, lidarFile, fireballDEM, options, threadText, redo, suppressOutput) # Run things sequentially if only one process, to make it easy to debug if options.numProcesses > 1:
def solveIntrinsics_Part2(options, imageFolder, cameraFolder, lidarFolder, orthoFolder, processedFolder, isSouth, logger): '''Create a camera model with optimized intrinsics. By now we processed a bunch of images and created bundle-adjusted and pc_aligned cameras and DEMs while using a camera model with distortion implemented using RPC coefficients which was obtained from the photometrics model. We now use the obtained cameras as inputs to a bundle adjust problem where we will optimize the intrinsics, including the distortion RPC coefficients, using the lidar as an external constraint, and many dense IP pairs and triplets (no quadruplets yet, even if 4 images overlap).''' # Get a list of all the input files imageCameraPairs = icebridge_common.getImageCameraPairs(imageFolder, cameraFolder, options.startFrame, options.stopFrame, logger) # The paired lidar file for the first image should be huge enough to contain # all images. lidarFile = icebridge_common.findMatchingLidarFile(imageCameraPairs[0][0], lidarFolder) logger.info('Found matching lidar file ' + lidarFile) lidarCsvFormatString = icebridge_common.getLidarCsvFormat(lidarFile) numFiles = len(imageCameraPairs) if numFiles < 2: raise Exception('Failed to find any image camera pairs!') if numFiles % 2 != 0: raise Exception("When solving for intrinsics, must have an even number of frames to use.") # Collect pc_align-ed cameras, unaligned disparities, and dense match files images = [] cameras = [] dispFiles = [] for it in range(numFiles/2): begFrame = options.startFrame + 2*it endFrame = begFrame + 1 batchFolderName = icebridge_common.batchFolderName(begFrame, endFrame, options.bundleLength) thisOutputFolder = os.path.join(processedFolder, batchFolderName) # Find all the cameras after bundle adjustment and pc_align. pattern = icebridge_common.getAlignedBundlePrefix(thisOutputFolder) + '*.tsai' alignedCameras = glob.glob(pattern) if len(alignedCameras) != options.bundleLength: raise Exception("Expected " + str(options.bundleLength) + " cameras, here's what " + " was obtained instead: " + " ".join(alignedCameras)) img0 = ""; cam0 = ""; img1 = ""; cam1 = "" for cam in alignedCameras: frame = icebridge_common.getFrameNumberFromFilename(cam) if begFrame == frame: img0 = imageCameraPairs[2*it][0] cam0 = cam if endFrame == frame: img1 = imageCameraPairs[2*it+1][0] cam1 = cam images.append(img0); images.append(img1) cameras.append(cam0); cameras.append(cam1) # Unaligned disparity stereoFolder = os.path.join(thisOutputFolder, 'stereo_pair_'+str(0)) currDispFiles = glob.glob(os.path.join(stereoFolder, '*unaligned-D.tif')) if len(currDispFiles) != 1: raise Exception("Expecting a single unaligned disparity file in " + stereoFolder) dispFiles.append(currDispFiles[0]) # Match files matchFiles = [] for it in range(numFiles-1): begFrame = options.startFrame + it endFrame = begFrame + 1 batchFolderName = icebridge_common.batchFolderName(begFrame, endFrame, options.bundleLength) thisOutputFolder = os.path.join(processedFolder, batchFolderName) stereoFolder = os.path.join(thisOutputFolder, 'stereo_pair_'+str(0)) DISP_PREFIX = "disp-" currMatchFiles = glob.glob(os.path.join(stereoFolder, '*' + DISP_PREFIX + '*.match')) if len(currMatchFiles) != 1: raise Exception("Expecting a single dense match file in " + stereoFolder) matchFiles.append(currMatchFiles[0]) # Create output directory for bundle adjustment and copy there the match files baDir = os.path.join(processedFolder, "bundle_intrinsics") baPrefix = os.path.join(baDir, "out") os.system("mkdir -p " + baDir) for matchFile in matchFiles: dstFile = os.path.basename(matchFile) dstFile = dstFile.replace(DISP_PREFIX, '') dstFile = os.path.join(baDir, dstFile) cmd = "cp -f " + matchFile + " " + dstFile logger.info(cmd) os.system(cmd) # The bundle adjustment cmd = "bundle_adjust " + " ".join(images) + " " + " ".join(cameras) + \ ' --reference-terrain ' + lidarFile + \ ' --disparity-list "' + " ".join(dispFiles) + '"' + \ ' --datum wgs84 -t nadirpinhole --create-pinhole-cameras --robust-threshold 2' + \ ' --camera-weight 1 --solve-intrinsics --csv-format ' + lidarCsvFormatString + \ ' --overlap-limit 1 --max-disp-error 10 --max-iterations 100 ' + \ ' --parameter-tolerance 1e-12 -o ' + baPrefix logger.info(cmd) os.system(cmd) # Generate DEMs of residuals before and after optimization projString = icebridge_common.getEpsgCode(isSouth, asString=True) for val in ['initial', 'final']: cmd = 'point2dem --t_srs ' + projString + ' --tr 2' + \ ' --csv-format 1:lon,2:lat,4:height_above_datum' + \ ' ' + baPrefix + '-' + val + '_residuals_no_loss_function_pointmap_point_log.csv' logger.info(cmd) os.system(cmd) cmd = 'point2dem --t_srs ' + projString + ' --tr 2' + \ ' --csv-format 1:lon,2:lat,4:height_above_datum' + \ ' ' + baPrefix + '-' + val +'_residuals_no_loss_function_reference_terrain.txt' logger.info(cmd) os.system(cmd) # Look at the latest written tsai file, that will be the optimized distortion file. # Force the initial rotation and translation to be the identity, this is # expected by ortho2pinhole. outFiles = filter(os.path.isfile, glob.glob(baPrefix + '*.tsai')) outFiles.sort(key=lambda x: os.path.getmtime(x)) optFile = outFiles[-1] logger.info("Reading optimized file: " + optFile) with open(optFile, 'r') as f: lines = f.readlines() for it in range(len(lines)): lines[it] = lines[it].strip() if re.match("^C\s*=\s*", lines[it]): lines[it] = "C = 0 0 0" if re.match("^R\s*=\s*", lines[it]): lines[it] = "R = 1 0 0 0 1 0 0 0 1" # Write the final desired optimized RPC file logger.info("Writing final optimized file: " + options.outputCalCamera) # Below is a bugfix, must take full path to find the dir, otherwise it may fail. os.system("mkdir -p " + os.path.dirname(os.path.abspath(options.outputCalCamera))) with open(options.outputCalCamera, 'w') as f: for line in lines: f.write(line + "\n")
def solveIntrinsics_Part2(options, imageFolder, cameraFolder, lidarFolder, orthoFolder, processedFolder, isSouth, logger): '''Create a camera model with optimized intrinsics. By now we processed a bunch of images and created bundle-adjusted and pc_aligned cameras and DEMs while using a camera model with distortion implemented using RPC coefficients which was obtained from the photometrics model. We now use the obtained cameras as inputs to a bundle adjust problem where we will optimize the intrinsics, including the distortion RPC coefficients, using the lidar as an external constraint, and many dense IP pairs and triplets (no quadruplets yet, even if 4 images overlap).''' # Get a list of all the input files imageCameraPairs = icebridge_common.getImageCameraPairs( imageFolder, cameraFolder, options.startFrame, options.stopFrame, logger) # The paired lidar file for the first image should be huge enough to contain # all images. lidarFile = icebridge_common.findMatchingLidarFile(imageCameraPairs[0][0], lidarFolder) logger.info('Found matching lidar file ' + lidarFile) lidarCsvFormatString = icebridge_common.getLidarCsvFormat(lidarFile) numFiles = len(imageCameraPairs) if numFiles < 2: raise Exception('Failed to find any image camera pairs!') if numFiles % 2 != 0: raise Exception( "When solving for intrinsics, must have an even number of frames to use." ) # Collect pc_align-ed cameras, unaligned disparities, and dense match files images = [] cameras = [] dispFiles = [] for it in range(numFiles / 2): begFrame = options.startFrame + 2 * it endFrame = begFrame + 1 batchFolderName = icebridge_common.batchFolderName( begFrame, endFrame, options.bundleLength) thisOutputFolder = os.path.join(processedFolder, batchFolderName) # Find all the cameras after bundle adjustment and pc_align. pattern = icebridge_common.getAlignedBundlePrefix( thisOutputFolder) + '*.tsai' alignedCameras = glob.glob(pattern) if len(alignedCameras) != options.bundleLength: raise Exception("Expected " + str(options.bundleLength) + " cameras, here's what " + " was obtained instead: " + " ".join(alignedCameras)) img0 = "" cam0 = "" img1 = "" cam1 = "" for cam in alignedCameras: frame = icebridge_common.getFrameNumberFromFilename(cam) if begFrame == frame: img0 = imageCameraPairs[2 * it][0] cam0 = cam if endFrame == frame: img1 = imageCameraPairs[2 * it + 1][0] cam1 = cam images.append(img0) images.append(img1) cameras.append(cam0) cameras.append(cam1) # Unaligned disparity stereoFolder = os.path.join(thisOutputFolder, 'stereo_pair_' + str(0)) currDispFiles = glob.glob( os.path.join(stereoFolder, '*unaligned-D.tif')) if len(currDispFiles) != 1: raise Exception("Expecting a single unaligned disparity file in " + stereoFolder) dispFiles.append(currDispFiles[0]) # Match files matchFiles = [] for it in range(numFiles - 1): begFrame = options.startFrame + it endFrame = begFrame + 1 batchFolderName = icebridge_common.batchFolderName( begFrame, endFrame, options.bundleLength) thisOutputFolder = os.path.join(processedFolder, batchFolderName) stereoFolder = os.path.join(thisOutputFolder, 'stereo_pair_' + str(0)) DISP_PREFIX = "disp-" currMatchFiles = glob.glob( os.path.join(stereoFolder, '*' + DISP_PREFIX + '*.match')) if len(currMatchFiles) != 1: raise Exception("Expecting a single dense match file in " + stereoFolder) matchFiles.append(currMatchFiles[0]) # Create output directory for bundle adjustment and copy there the match files baDir = os.path.join(processedFolder, "bundle_intrinsics") baPrefix = os.path.join(baDir, "out") os.system("mkdir -p " + baDir) for matchFile in matchFiles: dstFile = os.path.basename(matchFile) dstFile = dstFile.replace(DISP_PREFIX, '') dstFile = os.path.join(baDir, dstFile) cmd = "cp -f " + matchFile + " " + dstFile logger.info(cmd) os.system(cmd) # The bundle adjustment cmd = "bundle_adjust " + " ".join(images) + " " + " ".join(cameras) + \ ' --reference-terrain ' + lidarFile + \ ' --disparity-list "' + " ".join(dispFiles) + '"' + \ ' --datum wgs84 -t nadirpinhole --local-pinhole --robust-threshold 2' + \ ' --camera-weight 1 --solve-intrinsics --csv-format ' + lidarCsvFormatString + \ ' --overlap-limit 1 --max-disp-error 10 --max-iterations 100 ' + \ ' --parameter-tolerance 1e-12 -o ' + baPrefix logger.info(cmd) os.system(cmd) # Generate DEMs of residuals before and after optimization projString = icebridge_common.getEpsgCode(isSouth, asString=True) for val in ['initial', 'final']: cmd = 'point2dem --t_srs ' + projString + ' --tr 2' + \ ' --csv-format 1:lon,2:lat,4:height_above_datum' + \ ' ' + baPrefix + '-' + val + '_residuals_no_loss_function_pointmap_point_log.csv' logger.info(cmd) os.system(cmd) cmd = 'point2dem --t_srs ' + projString + ' --tr 2' + \ ' --csv-format 1:lon,2:lat,4:height_above_datum' + \ ' ' + baPrefix + '-' + val +'_residuals_no_loss_function_reference_terrain.txt' logger.info(cmd) os.system(cmd) # Look at the latest written tsai file, that will be the optimized distortion file. # Force the initial rotation and translation to be the identity, this is # expected by ortho2pinhole. outFiles = filter(os.path.isfile, glob.glob(baPrefix + '*.tsai')) outFiles.sort(key=lambda x: os.path.getmtime(x)) optFile = outFiles[-1] logger.info("Reading optimized file: " + optFile) with open(optFile, 'r') as f: lines = f.readlines() for it in range(len(lines)): lines[it] = lines[it].strip() if re.match("^C\s*=\s*", lines[it]): lines[it] = "C = 0 0 0" if re.match("^R\s*=\s*", lines[it]): lines[it] = "R = 1 0 0 0 1 0 0 0 1" # Write the final desired optimized RPC file logger.info("Writing final optimized file: " + options.outputCalCamera) os.system("mkdir -p " + os.path.dirname(options.outputCalCamera)) with open(options.outputCalCamera, 'w') as f: for line in lines: f.write(line + "\n")
def main(argsIn): try: # Sample usage: # python ~/projects/StereoPipeline/src/asp/IceBridge/blend_dems.py --site GR \ # --yyyymmdd 20120315 --start-frame 2490 --stop-frame 2491 --bundle-length 2 \ # --num-threads 8 --num-processes 10 usage = '''blend_dems.py <options>''' parser = argparse.ArgumentParser(usage=usage) # Run selection parser.add_argument("--yyyymmdd", dest="yyyymmdd", required=True, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument("--site", dest="site", required=True, help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--output-folder", dest="outputFolder", default=None, help="Name of the output folder. If not specified, " + \ "use something like AN_YYYYMMDD.") # Processing options parser.add_argument('--bundle-length', dest='bundleLength', default=2, type=int, help="The number of images to bundle adjust and process " + \ "in a single batch.") parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument('--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on. This frame will also be processed.') parser.add_argument("--processing-subfolder", dest="processingSubfolder", default=None, help="Specify a subfolder name where the processing outputs will go. "+\ "The default is no additional folder.") parser.add_argument("--compute-diff-to-prev-dem", action="store_true", dest="computeDiffToPrev", default=False, help="Compute the absolute difference between the current DEM " + "and the one before it.") parser.add_argument("--blend-to-fireball-footprint", action="store_true", dest="blendToFireball", default=False, help="Create additional blended DEMs having the same " + \ "footprint as Fireball DEMs.") # Performance options parser.add_argument('--num-processes', dest='numProcesses', default=1, type=int, help='The number of simultaneous processes to run.') parser.add_argument('--num-threads', dest='numThreads', default=8, type=int, help='The number of threads per process.') options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) icebridge_common.switchWorkDir() os.system("ulimit -c 0") # disable core dumps os.system("rm -f core.*") # these keep on popping up os.system("umask 022") # enforce files be readable by others if len(options.yyyymmdd) != 8 and len(options.yyyymmdd) != 9: # Make an exception for 20100422a raise Exception("The --yyyymmdd field must have length 8 or 9.") if options.outputFolder is None: options.outputFolder = icebridge_common.outputFolder(options.site, options.yyyymmdd) os.system('mkdir -p ' + options.outputFolder) logLevel = logging.INFO # Make this an option?? logger = icebridge_common.setUpLogger(options.outputFolder, logLevel, 'icebridge_blend_log') (out, err, status) = asp_system_utils.executeCommand(['uname', '-a'], suppressOutput = True) logger.info("Running on machine: " + out) logger.info(str(argsIn)) processFolder = os.path.join(options.outputFolder, 'processed') # Handle subfolder option. This is useful for comparing results with different parameters! if options.processingSubfolder: processFolder = os.path.join(processFolder, options.processingSubfolder) logger.info('Reading from processing subfolder: ' + options.processingSubfolder) orthoFolder = icebridge_common.getOrthoFolder(options.outputFolder) orthoIndexPath = icebridge_common.csvIndexFile(orthoFolder) if not os.path.exists(orthoIndexPath): raise Exception("Error: Missing ortho index file: " + orthoIndexPath + ".") (orthoFrameDict, orthoUrlDict) = icebridge_common.readIndexFile(orthoIndexPath) if options.blendToFireball: fireballFrameDict = icebridge_common.getCorrectedFireballDems(options.outputFolder) lidarFolder = icebridge_common.getLidarFolder(options.outputFolder) threadText = '' if options.numThreads: threadText = '--threads ' + str(options.numThreads) redo = False suppressOutput = True taskHandles = [] if options.numProcesses > 1: pool = multiprocessing.Pool(options.numProcesses) # Bound the frames sortedFrames = sorted(orthoFrameDict.keys()) if len(sortedFrames) > 0: if options.startFrame < sortedFrames[0]: options.startFrame = sortedFrames[0] if options.stopFrame > sortedFrames[-1] + 1: options.stopFrame = sortedFrames[-1] + 1 else: # No ortho files, that means nothing to do options.startFrame = 0 options.stopFrame = 0 for frame in range(options.startFrame, options.stopFrame): if not frame in orthoFrameDict: logger.info("Error: Missing ortho file for frame: " + str(frame) + ".") continue orthoFile = orthoFrameDict[frame] try: lidarFile = icebridge_common.findMatchingLidarFile(orthoFile, lidarFolder) except: # Skip if no lidar file matches this frame continue fireballDEM = "" if options.blendToFireball: if frame in fireballFrameDict: fireballDEM = fireballFrameDict[frame] else: logger.info("No fireball DEM for frame: " + str(frame)) args = (frame, processFolder, lidarFile, fireballDEM, options, threadText, redo, suppressOutput) # Run things sequentially if only one process, to make it easy to debug if options.numProcesses > 1: taskHandles.append(pool.apply_async(runBlend, args)) else: runBlend(*args) if options.numProcesses > 1: icebridge_common.waitForTaskCompletionOrKeypress(taskHandles, logger, interactive = False, quitKey='q', sleepTime=20) icebridge_common.stopTaskPool(pool)
# Pick the output projection to be used PROJ_STRING_NORTH = '"+proj=stere +lat_0=90 +lat_ts=70 +lon_0=-45 +k=1 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs"' PROJ_STRING_SOUTH = '"+proj=stere +lat_0=-90 +lat_ts=-71 +lon_0=0 +k=1 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs"' projString = PROJ_STRING_NORTH if options.isSouth: projString = PROJ_STRING_SOUTH if not os.path.exists(outputFolder): os.mkdir(outputFolder) # If a lidar folder was specified, find the best lidar file. lidarFile = None if options.lidarFolder: logger.info('Searching for matching lidar file...') lidarFile = icebridge_common.findMatchingLidarFile( inputPairs[0][0], options.lidarFolder) logger.info('Found matching lidar file ' + lidarFile) # Does this ever change? # This format is used for reading LIDAR files. LIDAR_CSV_FORMAT_STRING = '"1:lat 2:lon 3:height_above_datum"' suppressOutput = False redo = False logger.info('Starting processing...') outputPrefix = os.path.join(outputFolder, 'out') threadText = '' if options.numThreads:
def main(argsIn): try: # Sample usage: # python ~/projects/StereoPipeline/src/asp/IceBridge/blend_dems.py --site GR \ # --yyyymmdd 20120315 --start-frame 2490 --stop-frame 2491 --bundle-length 2 \ # --num-threads 8 --num-processes 10 usage = '''blend_dems.py <options>''' parser = argparse.ArgumentParser(usage=usage) # Run selection parser.add_argument( "--yyyymmdd", dest="yyyymmdd", required=True, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument( "--site", dest="site", required=True, help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--output-folder", dest="outputFolder", default=None, help="Name of the output folder. If not specified, " + \ "use something like AN_YYYYMMDD.") # Processing options parser.add_argument('--bundle-length', dest='bundleLength', default=2, type=int, help="The number of images to bundle adjust and process " + \ "in a single batch.") parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument( '--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on. This frame will also be processed.') parser.add_argument("--processing-subfolder", dest="processingSubfolder", default=None, help="Specify a subfolder name where the processing outputs will go. "+\ "The default is no additional folder.") parser.add_argument( "--compute-diff-to-prev-dem", action="store_true", dest="computeDiffToPrev", default=False, help="Compute the absolute difference between the current DEM " + "and the one before it.") parser.add_argument("--blend-to-fireball-footprint", action="store_true", dest="blendToFireball", default=False, help="Create additional blended DEMs having the same " + \ "footprint as Fireball DEMs.") # Performance options parser.add_argument( '--num-processes', dest='numProcesses', default=1, type=int, help='The number of simultaneous processes to run.') parser.add_argument('--num-threads', dest='numThreads', default=8, type=int, help='The number of threads per process.') options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) icebridge_common.switchWorkDir() os.system("ulimit -c 0") # disable core dumps os.system("rm -f core.*") # these keep on popping up os.system("umask 022") # enforce files be readable by others if len(options.yyyymmdd) != 8 and len(options.yyyymmdd) != 9: # Make an exception for 20100422a raise Exception("The --yyyymmdd field must have length 8 or 9.") if options.outputFolder is None: options.outputFolder = icebridge_common.outputFolder( options.site, options.yyyymmdd) os.system('mkdir -p ' + options.outputFolder) logLevel = logging.INFO # Make this an option?? logger = icebridge_common.setUpLogger(options.outputFolder, logLevel, 'icebridge_blend_log') (out, err, status) = asp_system_utils.executeCommand(['uname', '-a'], suppressOutput=True) logger.info("Running on machine: " + out) logger.info(str(argsIn)) processFolder = os.path.join(options.outputFolder, 'processed') # Handle subfolder option. This is useful for comparing results with different parameters! if options.processingSubfolder: processFolder = os.path.join(processFolder, options.processingSubfolder) logger.info('Reading from processing subfolder: ' + options.processingSubfolder) orthoFolder = icebridge_common.getOrthoFolder(options.outputFolder) orthoIndexPath = icebridge_common.csvIndexFile(orthoFolder) if not os.path.exists(orthoIndexPath): raise Exception("Error: Missing ortho index file: " + orthoIndexPath + ".") (orthoFrameDict, orthoUrlDict) = icebridge_common.readIndexFile(orthoIndexPath) if options.blendToFireball: fireballFrameDict = icebridge_common.getCorrectedFireballDems( options.outputFolder) lidarFolder = icebridge_common.getLidarFolder(options.outputFolder) threadText = '' if options.numThreads: threadText = '--threads ' + str(options.numThreads) redo = False suppressOutput = True taskHandles = [] if options.numProcesses > 1: pool = multiprocessing.Pool(options.numProcesses) # Bound the frames sortedFrames = sorted(orthoFrameDict.keys()) if len(sortedFrames) > 0: if options.startFrame < sortedFrames[0]: options.startFrame = sortedFrames[0] if options.stopFrame > sortedFrames[-1] + 1: options.stopFrame = sortedFrames[-1] + 1 else: # No ortho files, that means nothing to do options.startFrame = 0 options.stopFrame = 0 for frame in range(options.startFrame, options.stopFrame): if not frame in orthoFrameDict: logger.info("Error: Missing ortho file for frame: " + str(frame) + ".") continue orthoFile = orthoFrameDict[frame] try: lidarFile = icebridge_common.findMatchingLidarFile( orthoFile, lidarFolder) except: # Skip if no lidar file matches this frame continue fireballDEM = "" if options.blendToFireball: if frame in fireballFrameDict: fireballDEM = fireballFrameDict[frame] else: logger.info("No fireball DEM for frame: " + str(frame)) args = (frame, processFolder, lidarFile, fireballDEM, options, threadText, redo, suppressOutput) # Run things sequentially if only one process, to make it easy to debug if options.numProcesses > 1: taskHandles.append(pool.apply_async(runBlend, args)) else: runBlend(*args) if options.numProcesses > 1: icebridge_common.waitForTaskCompletionOrKeypress(taskHandles, logger, interactive=False, quitKey='q', sleepTime=20) icebridge_common.stopTaskPool(pool)