def massRename(self, startFrame, stopFrame, logger): '''We changed how the timestamp for images and cameras is computed. Make all existing converted images, cameras, nav cameras, and aligned cameras conform.''' logger.info("Renaming files with timestamp. This is slow.") # Need to do a mass rename for incorrect timestamp for: # converted images, nav cameras, cameras, and bundle aligned cameras outputFolder = self.getFolder() cameraFolder = icebridge_common.getCameraFolder(outputFolder) imageFolder = icebridge_common.getImageFolder(outputFolder) jpegFolder = icebridge_common.getJpegFolder(outputFolder) orthoFolder = icebridge_common.getOrthoFolder(outputFolder) processedFolder = icebridge_common.getProcessedFolder(outputFolder) navFolder = icebridge_common.getNavFolder(outputFolder) navCameraFolder = icebridge_common.getNavCameraFolder(outputFolder) # Need the orthos to get the timestamp orthoFolder = icebridge_common.getOrthoFolder( os.path.dirname(jpegFolder)) orthoIndexPath = icebridge_common.csvIndexFile(orthoFolder) if not os.path.exists(orthoIndexPath): raise Exception("Error: Missing ortho index file: " + orthoIndexPath + ".") (orthoFrameDict, orthoUrlDict) = icebridge_common.readIndexFile(orthoIndexPath, prependFolder=True) logger.info('Renaming camera files...') self.massRenameByGlob(startFrame, stopFrame, orthoFrameDict, os.path.join(cameraFolder, '*DMS*tsai'), logger) logger.info('Renaming nav camera files...') self.massRenameByGlob(startFrame, stopFrame, orthoFrameDict, os.path.join(navCameraFolder, '*DMS*tsai'), logger) logger.info('Renaming converted images...') self.massRenameByGlob(startFrame, stopFrame, orthoFrameDict, os.path.join(imageFolder, '*DMS*tif'), logger) logger.info('Renaming aligned cameras...') self.massRenameByGlob( startFrame, stopFrame, orthoFrameDict, os.path.join(processedFolder, 'batch*', icebridge_common.alignedBundleStr() + '*DMS*tsai'), logger)
def lidarFilesInRange(lidarDict, lidarFolder, startFrame, stopFrame): '''Fetch only lidar files for the given frame range. Do that as follows. For each ortho frame in [startFrame, stopFrame], find the lidar file with the closest timestamp. Collect them all. Add the two neighboring ones, to help with finding lidar pairs later.''' lidarList = [] for frame in sorted(lidarDict.keys()): lidarList.append(lidarDict[frame]) # If we requested all frames, also get all the lidar files. if ((startFrame == icebridge_common.getSmallestFrame()) and (stopFrame == icebridge_common.getLargestFrame())): minLidarIndex = 0 maxLidarIndex = len(lidarList) - 1 else: minLidarIndex = len(lidarList) maxLidarIndex = 0 # Build up a list of lidar files that match the requested input frames orthoFolder = icebridge_common.getOrthoFolder( os.path.dirname(lidarFolder)) orthoIndexPath = icebridge_common.csvIndexFile(orthoFolder) (orthoFrameDict, orthoUrlDict) = icebridge_common.readIndexFile(orthoIndexPath) for frame in sorted(orthoFrameDict.keys()): if ((frame < startFrame) or (frame > stopFrame)): continue orthoFrame = orthoFrameDict[frame] try: matchingLidar = icebridge_common.findMatchingLidarFileFromList( orthoFrame, lidarList) except: # Some image files don't have a matching lidar file, just keep going. continue for index in range(len(lidarList)): if lidarList[index] == matchingLidar: if minLidarIndex > index: minLidarIndex = index if maxLidarIndex < index: maxLidarIndex = index # We will fetch neighboring lidar files as well if minLidarIndex > 0: minLidarIndex = minLidarIndex - 1 if maxLidarIndex + 1 < len(lidarList): maxLidarIndex = maxLidarIndex + 1 lidarsToFetch = set() if lidarList: for index in range(minLidarIndex, maxLidarIndex + 1): # Fetch only the requested lidar files. lidarsToFetch.add(lidarList[index]) return lidarsToFetch
def lidarFilesInRange(lidarDict, lidarFolder, startFrame, stopFrame): '''Fetch only lidar files for the given frame range. Do that as follows. For each ortho frame in [startFrame, stopFrame], find the lidar file with the closest timestamp. Collect them all. Add the two neighboring ones, to help with finding lidar pairs later.''' lidarList = [] for frame in sorted(lidarDict.keys()): lidarList.append(lidarDict[frame]) # If we requested all frames, also get all the lidar files. if ((startFrame == icebridge_common.getSmallestFrame()) and (stopFrame == icebridge_common.getLargestFrame() ) ): minLidarIndex = 0 maxLidarIndex = len(lidarList)-1 else: minLidarIndex = len(lidarList) maxLidarIndex = 0 # Build up a list of lidar files that match the requested input frames orthoFolder = icebridge_common.getOrthoFolder(os.path.dirname(lidarFolder)) orthoIndexPath = icebridge_common.csvIndexFile(orthoFolder) (orthoFrameDict, orthoUrlDict) = icebridge_common.readIndexFile(orthoIndexPath) for frame in sorted(orthoFrameDict.keys()): if ((frame < startFrame) or (frame > stopFrame) ): continue orthoFrame = orthoFrameDict[frame] try: matchingLidar = icebridge_common.findMatchingLidarFileFromList(orthoFrame, lidarList) except: # Some image files don't have a matching lidar file, just keep going. continue for index in range(len(lidarList)): if lidarList[index] == matchingLidar: if minLidarIndex > index: minLidarIndex = index if maxLidarIndex < index: maxLidarIndex = index # We will fetch neighboring lidar files as well if minLidarIndex > 0: minLidarIndex = minLidarIndex -1 if maxLidarIndex + 1 < len(lidarList): maxLidarIndex = maxLidarIndex + 1 lidarsToFetch = set() if lidarList: for index in range(minLidarIndex, maxLidarIndex+1): # Fetch only the requested lidar files. lidarsToFetch.add(lidarList[index]) return lidarsToFetch
def massRename(self, startFrame, stopFrame, logger): '''We changed how the timestamp for images and cameras is computed. Make all existing converted images, cameras, nav cameras, and aligned cameras conform.''' logger.info("Renaming files with timestamp. This is slow.") # Need to do a mass rename for incorrect timestamp for: # converted images, nav cameras, cameras, and bundle aligned cameras outputFolder = self.getFolder() cameraFolder = icebridge_common.getCameraFolder(outputFolder) imageFolder = icebridge_common.getImageFolder(outputFolder) jpegFolder = icebridge_common.getJpegFolder(outputFolder) orthoFolder = icebridge_common.getOrthoFolder(outputFolder) processedFolder = icebridge_common.getProcessedFolder(outputFolder) navFolder = icebridge_common.getNavFolder(outputFolder) navCameraFolder = icebridge_common.getNavCameraFolder(outputFolder) # Need the orthos to get the timestamp orthoFolder = icebridge_common.getOrthoFolder(os.path.dirname(jpegFolder)) orthoIndexPath = icebridge_common.csvIndexFile(orthoFolder) if not os.path.exists(orthoIndexPath): raise Exception("Error: Missing ortho index file: " + orthoIndexPath + ".") (orthoFrameDict, orthoUrlDict) = icebridge_common.readIndexFile(orthoIndexPath, prependFolder = True) logger.info('Renaming camera files...') self.massRenameByGlob(startFrame, stopFrame, orthoFrameDict, os.path.join(cameraFolder, '*DMS*tsai'), logger) logger.info('Renaming nav camera files...') self.massRenameByGlob(startFrame, stopFrame, orthoFrameDict, os.path.join(navCameraFolder, '*DMS*tsai'), logger) logger.info('Renaming converted images...') self.massRenameByGlob(startFrame, stopFrame, orthoFrameDict, os.path.join(imageFolder, '*DMS*tif'), logger) logger.info('Renaming aligned cameras...') self.massRenameByGlob(startFrame, stopFrame, orthoFrameDict, os.path.join(processedFolder, 'batch*', icebridge_common.alignedBundleStr() + '*DMS*tsai'), logger)
def lidarFilesInRange(lidarDict, lidarFolder, startFrame, stopFrame): '''Fetch only lidar files for the given frame range. Do that as follows. ''' '''For each ortho frame in [startFrame, stopFrame], find the lidar ''' '''file with the closest timestamp. Collect them all. ''' '''Add the two neighboring ones, to help with finding lidar pairs later. ''' lidarList = [] for frame in sorted(lidarDict.keys()): lidarList.append(lidarDict[frame]) orthoFolder = icebridge_common.getOrthoFolder(os.path.dirname(lidarFolder)) orthoIndexPath = icebridge_common.csvIndexFile(orthoFolder) (orthoFrameDict, orthoUrlDict) = icebridge_common.readIndexFile(orthoIndexPath) minLidarIndex = len(lidarList) maxLidarIndex = 0 for frame in sorted(orthoFrameDict.keys()): if ((frame < startFrame) or (frame > stopFrame)): continue orthoFrame = orthoFrameDict[frame] matchingLidar = icebridge_common.findMatchingLidarFileFromList( orthoFrame, lidarList) for index in range(len(lidarList)): if lidarList[index] == matchingLidar: if minLidarIndex > index: minLidarIndex = index if maxLidarIndex < index: maxLidarIndex = index # We will fetch neighboring lidar files as well if minLidarIndex > 0: minLidarIndex = minLidarIndex - 1 if maxLidarIndex + 1 < len(lidarList): maxLidarIndex = maxLidarIndex + 1 lidarsToFetch = set() for index in range(minLidarIndex, maxLidarIndex + 1): lidarsToFetch.add(lidarList[index]) return lidarsToFetch
def validateOrthosAndFireball(options, fileType, logger): '''Validate ortho and fireball files within the current frame range. This is expected to be in called in parallel for smaller chunks. Lidar files will be validated serially. Jpegs get validated when converted to tif. Return True if all is good.''' badFiles = False logger.info("Validating files of type: " + fileType) if fileType == 'ortho': dataFolder = icebridge_common.getOrthoFolder(options.outputFolder) elif fileType == 'fireball': dataFolder = icebridge_common.getFireballFolder(options.outputFolder) else: raise Exception("Unknown file type: " + fileType) indexPath = icebridge_common.csvIndexFile(dataFolder) if not os.path.exists(indexPath): # The issue of what to do when the index does not exist should # have been settled by now. return (not badFiles) # Fetch from disk the set of already validated files, if any validFilesList = icebridge_common.validFilesList(options.outputFolder, options.startFrame, options.stopFrame) validFilesSet = set() validFilesSet = icebridge_common.updateValidFilesListFromDisk(validFilesList, validFilesSet) numInitialValidFiles = len(validFilesSet) (frameDict, urlDict) = icebridge_common.readIndexFile(indexPath, prependFolder = True) for frame in frameDict.keys(): if frame < options.startFrame or frame > options.stopFrame: continue outputPath = frameDict[frame] xmlFile = icebridge_common.xmlFile(outputPath) if outputPath in validFilesSet and os.path.exists(outputPath) and \ xmlFile in validFilesSet and os.path.exists(xmlFile): #logger.info('Previously validated: ' + outputPath + ' ' + xmlFile) continue else: isGood = icebridge_common.hasValidChkSum(outputPath, logger) if not isGood: logger.info('Found invalid data. Will wipe: ' + outputPath + ' ' + xmlFile) os.system('rm -f ' + outputPath) # will not throw os.system('rm -f ' + xmlFile) # will not throw badFiles = True else: logger.info('Valid file: ' + outputPath) validFilesSet.add(outputPath) validFilesSet.add(xmlFile) if fileType != 'fireball': continue # Also validate tfw tfwFile = icebridge_common.tfwFile(outputPath) xmlFile = icebridge_common.xmlFile(tfwFile) if tfwFile in validFilesSet and os.path.exists(tfwFile) and \ xmlFile in validFilesSet and os.path.exists(xmlFile): #logger.info('Previously validated: ' + tfwFile + ' ' + xmlFile) continue else: isGood = icebridge_common.isValidTfw(tfwFile, logger) if not isGood: logger.info('Found invalid tfw. Will wipe: ' + tfwFile + ' ' + xmlFile) os.system('rm -f ' + tfwFile) # will not throw os.system('rm -f ' + xmlFile) # will not throw badFiles = True else: logger.info('Valid tfw file: ' + tfwFile) validFilesSet.add(tfwFile) validFilesSet.add(xmlFile) # Write to disk the list of validated files, but only if new # validations happened. First re-read that list, in case a # different process modified it in the meantime, such as if two # managers are running at the same time. numFinalValidFiles = len(validFilesSet) if numInitialValidFiles != numFinalValidFiles: validFilesSet = \ icebridge_common.updateValidFilesListFromDisk(validFilesList, validFilesSet) icebridge_common.writeValidFilesList(validFilesList, validFilesSet) return (not badFiles)
def validateOrthosAndFireball(options, fileType, logger): '''Validate ortho and fireball files within the current frame range. This is expected to be in called in parallel for smaller chunks. Lidar files will be validated serially. Jpegs get validated when converted to tif. Return True if all is good.''' badFiles = False logger.info("Validating files of type: " + fileType) if fileType == 'ortho': dataFolder = icebridge_common.getOrthoFolder(options.outputFolder) elif fileType == 'fireball': dataFolder = icebridge_common.getFireballFolder(options.outputFolder) else: raise Exception("Unknown file type: " + fileType) indexPath = icebridge_common.csvIndexFile(dataFolder) if not os.path.exists(indexPath): # The issue of what to do when the index does not exist should # have been settled by now. return (not badFiles) # Fetch from disk the set of already validated files, if any validFilesList = icebridge_common.validFilesList(options.outputFolder, options.startFrame, options.stopFrame) validFilesSet = set() validFilesSet = icebridge_common.updateValidFilesListFromDisk( validFilesList, validFilesSet) numInitialValidFiles = len(validFilesSet) (frameDict, urlDict) = icebridge_common.readIndexFile(indexPath, prependFolder=True) for frame in frameDict.keys(): if frame < options.startFrame or frame > options.stopFrame: continue outputPath = frameDict[frame] xmlFile = icebridge_common.xmlFile(outputPath) if outputPath in validFilesSet and os.path.exists(outputPath) and \ xmlFile in validFilesSet and os.path.exists(xmlFile): #logger.info('Previously validated: ' + outputPath + ' ' + xmlFile) continue else: isGood = icebridge_common.hasValidChkSum(outputPath, logger) if not isGood: logger.info('Found invalid data. Will wipe: ' + outputPath + ' ' + xmlFile) os.system('rm -f ' + outputPath) # will not throw os.system('rm -f ' + xmlFile) # will not throw badFiles = True else: logger.info('Valid file: ' + outputPath) validFilesSet.add(outputPath) validFilesSet.add(xmlFile) if fileType != 'fireball': continue # Also validate tfw tfwFile = icebridge_common.tfwFile(outputPath) xmlFile = icebridge_common.xmlFile(tfwFile) if tfwFile in validFilesSet and os.path.exists(tfwFile) and \ xmlFile in validFilesSet and os.path.exists(xmlFile): #logger.info('Previously validated: ' + tfwFile + ' ' + xmlFile) continue else: isGood = icebridge_common.isValidTfw(tfwFile, logger) if not isGood: logger.info('Found invalid tfw. Will wipe: ' + tfwFile + ' ' + xmlFile) os.system('rm -f ' + tfwFile) # will not throw os.system('rm -f ' + xmlFile) # will not throw badFiles = True else: logger.info('Valid tfw file: ' + tfwFile) validFilesSet.add(tfwFile) validFilesSet.add(xmlFile) # Write to disk the list of validated files, but only if new # validations happened. First re-read that list, in case a # different process modified it in the meantime, such as if two # managers are running at the same time. numFinalValidFiles = len(validFilesSet) if numInitialValidFiles != numFinalValidFiles: validFilesSet = \ icebridge_common.updateValidFilesListFromDisk(validFilesList, validFilesSet) icebridge_common.writeValidFilesList(validFilesList, validFilesSet) return (not badFiles)
def fetchAndParseIndexFile(options, isSouth, baseCurlCmd, outputFolder): '''Create a list of all files that must be fetched unless done already.''' # For AN 20091112, etc, some of the ortho images are stored at the # beginning of the next day's flight. Need to sort this out, and # it is tricky. More comments within the code. fetchNextDay = True separateByLat = (options.type == 'ortho' and isInSeparateByLatTable(options.yyyymmdd)) if separateByLat: # Here we won't fetch the next day, we will just separate by latitude within # a given day fetchNextDay = False orthoOrFireball = ((options.type == 'ortho') or (options.type == 'fireball')) if fetchNextDay: # Normally we fetch for next day only for ortho or fireball. However, # for one single special flight, we do it for jpeg too, as then # the jpegs are also split. if orthoOrFireball or \ ((options.type == 'jpeg') and twoFlightsInOneDay(options.site, options.yyyymmdd)): fetchNextDay = True else: fetchNextDay = False # If we need to parse the next flight day as well, as expected in some runs, # we will fetch two html files, but create a single index out of them. dayVals = [0] if fetchNextDay: dayVals.append(1) indexPath = icebridge_common.htmlIndexFile(outputFolder) currIndexPath = indexPath parsedIndexPath = icebridge_common.csvIndexFile(outputFolder) if options.refetchIndex: os.system('rm -f ' + indexPath) os.system('rm -f ' + parsedIndexPath) if icebridge_common.fileNonEmpty(parsedIndexPath): logger.info('Already have the index file ' + parsedIndexPath + ', keeping it.') return parsedIndexPath frameDict = {} urlDict = {} # We need the list of jpeg frames. Sometimes when fetching ortho images, # and we have to fetch from the next day, don't fetch unless # in the jpeg index. if len(dayVals) > 1 and options.type != 'jpeg': jpegFolder = icebridge_common.getJpegFolder( os.path.dirname(outputFolder)) jpegIndexPath = icebridge_common.csvIndexFile(jpegFolder) (jpegFrameDict, jpegUrlDict) = icebridge_common.readIndexFile(jpegIndexPath) orthoStamp = {} if options.type == 'fireball': # This is a bugfix. Ensure that the fireball DEM has not just # the same frame number, but also same timestamp as the ortho. orthoFolder = icebridge_common.getOrthoFolder( os.path.dirname(outputFolder)) orthoIndexPath = icebridge_common.csvIndexFile(orthoFolder) (orthoFrameDict, orthoUrlDict) = icebridge_common.readIndexFile(orthoIndexPath) for frame in sorted(orthoFrameDict.keys()): filename = orthoFrameDict[frame] [imageDateString, imageTimeString] = icebridge_common.parseTimeStamps(filename) orthoStamp[frame] = imageTimeString for dayVal in dayVals: if len(dayVals) > 1: currIndexPath = indexPath + '.day' + str(dayVal) if options.refetchIndex: os.system('rm -f ' + currIndexPath) # Find folderUrl which contains all of the files if options.type in LIDAR_TYPES: options.allFrames = True # For lidar, always get all the frames! # For lidar, the data can come from one of three sources. # Unfortunately sometimes there is more than one source, and then # we need to pick by latitude. folderUrls = [] lidar_types = [] for lidar in LIDAR_TYPES: folderUrl = getFolderUrl( options.yyyymmdd, options.year, options.month, options.day, dayVal, # note here the dayVal options.site, lidar) logger.info('Checking lidar URL: ' + folderUrl) if checkIfUrlExists(folderUrl): logger.info('Found match with lidar type: ' + lidar) folderUrls.append(folderUrl) lidar_types.append(lidar) if len(folderUrls) == 0: logger.info( 'WARNING: Could not find any lidar data for the given date!' ) elif len(folderUrls) == 1: # Unique solution folderUrl = folderUrls[0] options.type = lidar_types[0] elif len(folderUrls) >= 2: # Multiple solutions. Pick the good one by latitude. logger.info("Multiples URLs to search: " + " ".join(folderUrls)) count = -1 isGood = False for folderUrl in folderUrls: count += 1 (localFrameDict, localUrlDict) = \ fetchAndParseIndexFileAux(isSouth, separateByLat, dayVal, baseCurlCmd, folderUrl, currIndexPath, lidar_types[count]) for frame in sorted(localFrameDict.keys()): filename = localFrameDict[frame] xmlFile = icebridge_common.xmlFile(filename) url = os.path.join(folderUrl, xmlFile) # Download the file curlCmd = baseCurlCmd + ' ' + url + ' > ' + xmlFile logger.info(curlCmd) p = subprocess.Popen(curlCmd, shell=True) os.waitpid(p.pid, 0) latitude = icebridge_common.parseLatitude(xmlFile) if os.path.exists(xmlFile): os.remove(xmlFile) if hasGoodLat(latitude, isSouth): isGood = True options.type = lidar_types[count] logger.info("Good latitude " + str(latitude) + ", will use " + folderUrl + " of type " + lidar_types[count]) else: logger.info("Bad latitude " + str(latitude) + ", will not use " + folderUrl + " of type " + lidar_types[count]) # Stop at first file no matter what break if isGood: break if not isGood: if options.type in LIDAR_TYPES and options.ignoreMissingLidar: logger.info("No lidar. None of these URLs are good: " + " ".join(folderUrls)) else: raise Exception("None of these URLs are good: " + " ".join(folderUrls)) else: # Other cases are simpler folderUrl = getFolderUrl( options.yyyymmdd, options.year, options.month, options.day, dayVal, # note here the dayVal options.site, options.type) logger.info('Fetching from URL: ' + folderUrl) (localFrameDict, localUrlDict) = \ fetchAndParseIndexFileAux(isSouth, separateByLat, dayVal, baseCurlCmd, folderUrl, currIndexPath, options.type) # Append to the main index for frame in sorted(localFrameDict.keys()): if options.type == 'fireball': # This is a bugfix. Ensure that the fireball DEM has not just # the same frame number, but also same timestamp as the ortho. # Otherwise we may accidentally getting one from next day. [imageDateString, imageTimeString] = \ icebridge_common.parseTimeStamps(localFrameDict[frame]) if frame not in orthoStamp: #logger.info("Missing ortho for fireball: " + localFrameDict[frame]) continue if abs(int(imageTimeString) - int(orthoStamp[frame])) > 1000: # Apparently a tolerance is needed. Use 10 seconds, so the number 1000. #logger.info("Will not use fireball DEM whose timestamp differs from ortho.") #logger.info("Fireball is: " + localFrameDict[frame]) #logger.info("Ortho is: " + orthoFrameDict[frame]) continue # Fetch from next day, unless already have a value. And don't fetch # frames not in the jpeg index. if len(dayVals) > 1 and options.type != 'jpeg': if not frame in jpegFrameDict.keys(): continue if frame in frameDict.keys(): continue frameDict[frame] = localFrameDict[frame] urlDict[frame] = localUrlDict[frame] # Write the combined index file icebridge_common.writeIndexFile(parsedIndexPath, frameDict, urlDict) return parsedIndexPath
def main(argsIn): try: # Sample usage: # python ~/projects/StereoPipeline/src/asp/IceBridge/blend_dems.py --site GR \ # --yyyymmdd 20120315 --start-frame 2490 --stop-frame 2491 --bundle-length 2 \ # --num-threads 8 --num-processes 10 usage = '''blend_dems.py <options>''' parser = argparse.ArgumentParser(usage=usage) # Run selection parser.add_argument("--yyyymmdd", dest="yyyymmdd", required=True, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument("--site", dest="site", required=True, help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--output-folder", dest="outputFolder", default=None, help="Name of the output folder. If not specified, " + \ "use something like AN_YYYYMMDD.") # Processing options parser.add_argument('--bundle-length', dest='bundleLength', default=2, type=int, help="The number of images to bundle adjust and process " + \ "in a single batch.") parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument('--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on. This frame will also be processed.') parser.add_argument("--processing-subfolder", dest="processingSubfolder", default=None, help="Specify a subfolder name where the processing outputs will go. "+\ "The default is no additional folder.") parser.add_argument("--compute-diff-to-prev-dem", action="store_true", dest="computeDiffToPrev", default=False, help="Compute the absolute difference between the current DEM " + "and the one before it.") parser.add_argument("--blend-to-fireball-footprint", action="store_true", dest="blendToFireball", default=False, help="Create additional blended DEMs having the same " + \ "footprint as Fireball DEMs.") # Performance options parser.add_argument('--num-processes', dest='numProcesses', default=1, type=int, help='The number of simultaneous processes to run.') parser.add_argument('--num-threads', dest='numThreads', default=8, type=int, help='The number of threads per process.') options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) icebridge_common.switchWorkDir() os.system("ulimit -c 0") # disable core dumps os.system("rm -f core.*") # these keep on popping up os.system("umask 022") # enforce files be readable by others if len(options.yyyymmdd) != 8 and len(options.yyyymmdd) != 9: # Make an exception for 20100422a raise Exception("The --yyyymmdd field must have length 8 or 9.") if options.outputFolder is None: options.outputFolder = icebridge_common.outputFolder(options.site, options.yyyymmdd) os.system('mkdir -p ' + options.outputFolder) logLevel = logging.INFO # Make this an option?? logger = icebridge_common.setUpLogger(options.outputFolder, logLevel, 'icebridge_blend_log') (out, err, status) = asp_system_utils.executeCommand(['uname', '-a'], suppressOutput = True) logger.info("Running on machine: " + out) logger.info(str(argsIn)) processFolder = os.path.join(options.outputFolder, 'processed') # Handle subfolder option. This is useful for comparing results with different parameters! if options.processingSubfolder: processFolder = os.path.join(processFolder, options.processingSubfolder) logger.info('Reading from processing subfolder: ' + options.processingSubfolder) orthoFolder = icebridge_common.getOrthoFolder(options.outputFolder) orthoIndexPath = icebridge_common.csvIndexFile(orthoFolder) if not os.path.exists(orthoIndexPath): raise Exception("Error: Missing ortho index file: " + orthoIndexPath + ".") (orthoFrameDict, orthoUrlDict) = icebridge_common.readIndexFile(orthoIndexPath) if options.blendToFireball: fireballFrameDict = icebridge_common.getCorrectedFireballDems(options.outputFolder) lidarFolder = icebridge_common.getLidarFolder(options.outputFolder) threadText = '' if options.numThreads: threadText = '--threads ' + str(options.numThreads) redo = False suppressOutput = True taskHandles = [] if options.numProcesses > 1: pool = multiprocessing.Pool(options.numProcesses) # Bound the frames sortedFrames = sorted(orthoFrameDict.keys()) if len(sortedFrames) > 0: if options.startFrame < sortedFrames[0]: options.startFrame = sortedFrames[0] if options.stopFrame > sortedFrames[-1] + 1: options.stopFrame = sortedFrames[-1] + 1 else: # No ortho files, that means nothing to do options.startFrame = 0 options.stopFrame = 0 for frame in range(options.startFrame, options.stopFrame): if not frame in orthoFrameDict: logger.info("Error: Missing ortho file for frame: " + str(frame) + ".") continue orthoFile = orthoFrameDict[frame] try: lidarFile = icebridge_common.findMatchingLidarFile(orthoFile, lidarFolder) except: # Skip if no lidar file matches this frame continue fireballDEM = "" if options.blendToFireball: if frame in fireballFrameDict: fireballDEM = fireballFrameDict[frame] else: logger.info("No fireball DEM for frame: " + str(frame)) args = (frame, processFolder, lidarFile, fireballDEM, options, threadText, redo, suppressOutput) # Run things sequentially if only one process, to make it easy to debug if options.numProcesses > 1: taskHandles.append(pool.apply_async(runBlend, args)) else: runBlend(*args) if options.numProcesses > 1: icebridge_common.waitForTaskCompletionOrKeypress(taskHandles, logger, interactive = False, quitKey='q', sleepTime=20) icebridge_common.stopTaskPool(pool)
def convertJpegs(jpegFolder, imageFolder, startFrame, stopFrame, skipValidate, cameraMounting, logger): '''Convert jpeg images from RGB to single channel. Returns false if any files failed.''' badFiles = False logger.info('Converting input images to grayscale...') os.system('mkdir -p ' + imageFolder) # Loop through all the input images jpegIndexPath = icebridge_common.csvIndexFile(jpegFolder) if not os.path.exists(jpegIndexPath): raise Exception("Error: Missing jpeg index file: " + jpegIndexPath + ".") (jpegFrameDict, jpegUrlDict) = icebridge_common.readIndexFile(jpegIndexPath, prependFolder = True) # Need the orthos to get the timestamp orthoFolder = icebridge_common.getOrthoFolder(os.path.dirname(jpegFolder)) orthoIndexPath = icebridge_common.csvIndexFile(orthoFolder) if not os.path.exists(orthoIndexPath): raise Exception("Error: Missing ortho index file: " + orthoIndexPath + ".") (orthoFrameDict, orthoUrlDict) = icebridge_common.readIndexFile(orthoIndexPath, prependFolder = True) if not skipValidate: validFilesList = icebridge_common.validFilesList(os.path.dirname(jpegFolder), startFrame, stopFrame) validFilesSet = set() validFilesSet = icebridge_common.updateValidFilesListFromDisk(validFilesList, validFilesSet) numInitialValidFiles = len(validFilesSet) # Fast check for missing images. This is fragile, as maybe it gets # the wrong file with a similar name, but an honest check is very slow. imageFiles = icebridge_common.getTifs(imageFolder, prependFolder = True) imageFrameDict = {} for imageFile in imageFiles: frame = icebridge_common.getFrameNumberFromFilename(imageFile) if frame < startFrame or frame > stopFrame: continue imageFrameDict[frame] = imageFile for frame in sorted(jpegFrameDict.keys()): inputPath = jpegFrameDict[frame] # Only deal with frames in range if not ( (frame >= startFrame) and (frame <= stopFrame) ): continue if frame in imageFrameDict.keys() and skipValidate: # Fast, hackish check continue if frame not in orthoFrameDict: logger.info("Error: Could not find ortho image for jpeg frame: " + str(frame)) # Don't want to throw here. Just ignore the missing ortho continue # Make sure the timestamp and frame number are in the output file name try: outputPath = icebridge_common.jpegToImageFile(inputPath, orthoFrameDict[frame]) except Exception as e: logger.info(str(e)) logger.info("Removing bad file: " + inputPath) os.system('rm -f ' + inputPath) # will not throw badFiles = True continue # Skip existing valid files if skipValidate: if os.path.exists(outputPath): logger.info("File exists, skipping: " + outputPath) continue else: if outputPath in validFilesSet and os.path.exists(outputPath): #logger.info('Previously validated: ' + outputPath) # very verbose validFilesSet.add(inputPath) # Must have this continue if icebridge_common.isValidImage(outputPath): #logger.info("File exists and is valid, skipping: " + outputPath) # verbose if not skipValidate: # Mark both the input and the output as validated validFilesSet.add(inputPath) validFilesSet.add(outputPath) continue # Use ImageMagick tool to convert from RGB to grayscale # - Some image orientations are rotated to make stereo processing easier. rotateString = '' if cameraMounting == 2: # Flight direction towards top of image rotateString = '-rotate 90 ' if cameraMounting == 3: # Flight direction towards bottom of image rotateString = '-rotate -90 ' cmd = ('%s %s -colorspace Gray %s%s') % \ (asp_system_utils.which('convert'), inputPath, rotateString, outputPath) logger.info(cmd) # Run command and fetch its output p = subprocess.Popen(cmd.split(" "), stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) output, error = p.communicate() if p.returncode != 0: badFiles = True logger.error("Command failed.") logger.error("Wiping bad files: " + inputPath + " and " + outputPath + '\n' + output) os.system('rm -f ' + inputPath) # will not throw os.system('rm -f ' + outputPath) # will not throw if not os.path.exists(outputPath): badFiles = True logger.error('Failed to convert jpeg file: ' + inputPath) logger.error("Wiping bad files: " + inputPath + " and " + outputPath + '\n' + output) os.system('rm -f ' + inputPath) # will not throw os.system('rm -f ' + outputPath) # will not throw # Check for corrupted files if error is not None: output += error m = re.match("^.*?premature\s+end", output, re.IGNORECASE|re.MULTILINE|re.DOTALL) if m: badFiles = True logger.error("Wiping bad files: " + inputPath + " and " + outputPath + '\n' + output) os.system('rm -f ' + inputPath) # will not throw os.system('rm -f ' + outputPath) # will not throw if not skipValidate: # Write to disk the list of validated files, but only if new # validations happened. First re-read that list, in case a # different process modified it in the meantime, such as if two # managers are running at the same time. numFinalValidFiles = len(validFilesSet) if numInitialValidFiles != numFinalValidFiles: validFilesSet = icebridge_common.updateValidFilesListFromDisk(validFilesList, validFilesSet) icebridge_common.writeValidFilesList(validFilesList, validFilesSet) if badFiles: logger.error("Converstion of JPEGs failed. If any files were corrupted, " + "they were removed, and need to be re-fetched.") return (not badFiles)
def convertJpegs(jpegFolder, imageFolder, startFrame, stopFrame, skipValidate, cameraMounting, logger): '''Convert jpeg images from RGB to single channel. Returns false if any files failed.''' badFiles = False logger.info('Converting input images to grayscale...') os.system('mkdir -p ' + imageFolder) # Loop through all the input images jpegIndexPath = icebridge_common.csvIndexFile(jpegFolder) if not os.path.exists(jpegIndexPath): raise Exception("Error: Missing jpeg index file: " + jpegIndexPath + ".") (jpegFrameDict, jpegUrlDict) = icebridge_common.readIndexFile(jpegIndexPath, prependFolder=True) # Need the orthos to get the timestamp orthoFolder = icebridge_common.getOrthoFolder(os.path.dirname(jpegFolder)) orthoIndexPath = icebridge_common.csvIndexFile(orthoFolder) if not os.path.exists(orthoIndexPath): raise Exception("Error: Missing ortho index file: " + orthoIndexPath + ".") (orthoFrameDict, orthoUrlDict) = icebridge_common.readIndexFile(orthoIndexPath, prependFolder=True) if not skipValidate: validFilesList = icebridge_common.validFilesList( os.path.dirname(jpegFolder), startFrame, stopFrame) validFilesSet = set() validFilesSet = icebridge_common.updateValidFilesListFromDisk( validFilesList, validFilesSet) numInitialValidFiles = len(validFilesSet) # Fast check for missing images. This is fragile, as maybe it gets # the wrong file with a similar name, but an honest check is very slow. imageFiles = icebridge_common.getTifs(imageFolder, prependFolder=True) imageFrameDict = {} for imageFile in imageFiles: frame = icebridge_common.getFrameNumberFromFilename(imageFile) if frame < startFrame or frame > stopFrame: continue imageFrameDict[frame] = imageFile for frame in sorted(jpegFrameDict.keys()): inputPath = jpegFrameDict[frame] # Only deal with frames in range if not ((frame >= startFrame) and (frame <= stopFrame)): continue if frame in imageFrameDict.keys() and skipValidate: # Fast, hackish check continue if frame not in orthoFrameDict: logger.info("Error: Could not find ortho image for jpeg frame: " + str(frame)) # Don't want to throw here. Just ignore the missing ortho continue # Make sure the timestamp and frame number are in the output file name try: outputPath = icebridge_common.jpegToImageFile( inputPath, orthoFrameDict[frame]) except Exception, e: logger.info(str(e)) logger.info("Removing bad file: " + inputPath) os.system('rm -f ' + inputPath) # will not throw badFiles = True continue # Skip existing valid files if skipValidate: if os.path.exists(outputPath): logger.info("File exists, skipping: " + outputPath) continue else: if outputPath in validFilesSet and os.path.exists(outputPath): #logger.info('Previously validated: ' + outputPath) # very verbose validFilesSet.add(inputPath) # Must have this continue if icebridge_common.isValidImage(outputPath): #logger.info("File exists and is valid, skipping: " + outputPath) # verbose if not skipValidate: # Mark both the input and the output as validated validFilesSet.add(inputPath) validFilesSet.add(outputPath) continue # Use ImageMagick tool to convert from RGB to grayscale # - Some image orientations are rotated to make stereo processing easier. rotateString = '' if cameraMounting == 2: # Flight direction towards top of image rotateString = '-rotate 90' if cameraMounting == 3: # Flight direction towards bottom of image rotateString = '-rotate -90' cmd = ('%s %s -colorspace Gray %s %s') % \ (asp_system_utils.which('convert'), inputPath, rotateString, outputPath) logger.info(cmd) # Run command and fetch its output p = subprocess.Popen(cmd.split(" "), stdout=subprocess.PIPE, stderr=subprocess.PIPE) output, error = p.communicate() if p.returncode != 0: badFiles = True logger.error("Command failed.") logger.error("Wiping bad files: " + inputPath + " and " + outputPath + '\n' + output) os.system('rm -f ' + inputPath) # will not throw os.system('rm -f ' + outputPath) # will not throw if not os.path.exists(outputPath): badFiles = True logger.error('Failed to convert jpeg file: ' + inputPath) logger.error("Wiping bad files: " + inputPath + " and " + outputPath + '\n' + output) os.system('rm -f ' + inputPath) # will not throw os.system('rm -f ' + outputPath) # will not throw # Check for corrupted files if error is not None: output += error m = re.match("^.*?premature\s+end", output, re.IGNORECASE | re.MULTILINE | re.DOTALL) if m: badFiles = True logger.error("Wiping bad files: " + inputPath + " and " + outputPath + '\n' + output) os.system('rm -f ' + inputPath) # will not throw os.system('rm -f ' + outputPath) # will not throw
def pushByType(run, options, logger, dataType): # Fetch the ortho index from NSIDC if missing outputFolder = run.getFolder() logger.info("Output folder is " + outputFolder) os.system("mkdir -p " + outputFolder) # Current directory. It is important to go from /u to the real dir which is /nobackup... unpackDir = os.path.realpath(os.getcwd()) logger.info("Unpack directory is " + unpackDir) orthoFolder = icebridge_common.getOrthoFolder(outputFolder) orthoIndexPath = icebridge_common.csvIndexFile(orthoFolder) if not os.path.exists(orthoIndexPath): fetchIndices(options, logger) logger.info("Reading ortho index: " + orthoIndexPath) (orthoFrameDict, orthoUrlDict) = icebridge_common.readIndexFile(orthoIndexPath) # Fetch unarchived folder if missing if dataType == 'DEM': unarchivedFolder = run.getAssemblyFolder() elif dataType == 'ORTHO': unarchivedFolder = run.getProcessFolder() else: raise Exception("Unknown data type: " + dataType) logger.info("Unarchived data folder is " + unarchivedFolder) # Especially for ortho, force-fetch each time, as there is no good way # of checking if we fetched well before. start_time() if not archive_functions.fetchProcessedByType(run, unpackDir, logger, dataType): return stop_time("fetching archived data by type: " + dataType, logger) # Make the output directory at NSIDC m = re.match("(\d\d\d\d)(\d\d)(\d\d)", options.yyyymmdd) if m: outDir = options.site + "_" + m.group(1) + "." + m.group(2) + "." + m.group(3) else: raise Exception("Could not parse: " + options.yyyymmdd) # Keep the output directory locally here localDirPath = os.path.join(outputFolder, dataType, outDir) os.system("mkdir -p " + localDirPath) logger.info("Storing the renamed " + dataType + " files in " + localDirPath) logger.info("Directory name at NSIDC: " + outDir) # Read the DEMs and orthos, and copy them to outDir according to the final convention if dataType == 'DEM': dataFiles = icebridge_common.getTifs(unarchivedFolder, prependFolder=True) else: dataFiles = glob.glob(os.path.join(unarchivedFolder, 'batch_*', 'out-ortho.tif')) for dataFile in dataFiles: # Here we use the convention from archive_functions.py for DEMs and from how we store orthos. if dataType == 'DEM': m = re.match("^.*?" + unarchivedFolder + "/F_(\d+)_\d+_" + dataType + \ "\.tif$", dataFile) if not m: continue frameNumber = int(m.group(1)) else: m = re.match("^.*?" + unarchivedFolder + "/batch_(\d+)_\d+_\d+/" + \ "out-ortho.tif$", dataFile) if not m: continue frameNumber = int(m.group(1)) if frameNumber < options.startFrame or frameNumber > options.stopFrame: continue # For each data file, copy from the ortho its meta info if not frameNumber in orthoFrameDict.keys(): # Bugfix: Ortho fetching failed, try again fetchIndices(options, logger) logger.info("Re-reading ortho index: " + orthoIndexPath) (orthoFrameDict, orthoUrlDict) = icebridge_common.readIndexFile(orthoIndexPath) if not frameNumber in orthoFrameDict.keys(): # This time there is nothing we can do raise Exception("Cannot find ortho for frame: " + str(frameNumber)) orthoFile = orthoFrameDict[frameNumber] [dateString, timeString] = icebridge_common.parseTimeStamps(orthoFile) # It is always possible that the ortho file date will be the next day # after the current flight date, if the flight goes after midnight. # So it is not unreasonable that options.yyyymmdd != dateString. if dataType == 'DEM': outFile = ('IODEM3_%s_%s_%05d_DEM.tif' % (dateString, timeString, frameNumber)) else: # TODO: Need to think more of the naming convention. outFile = ('IODEM3_%s_%s_%05d_ORTHO.tif' % (dateString, timeString, frameNumber)) cmd = "/bin/cp -fv " + dataFile + " " + os.path.join(localDirPath, outFile) logger.info(cmd) os.system(cmd) # Push the directory to NSIDC remoteDirPath = os.path.join(os.path.basename(os.path.dirname(localDirPath)), os.path.basename(localDirPath)) remoteDirPath = os.path.join('/incoming', 'Ames', remoteDirPath) logger.info("Storing at NSIDC in: " + remoteDirPath) cmd = 'lftp -e "mirror -P 20 -c -R -vvv --delete --delete-first ' + localDirPath + \ ' ' + remoteDirPath + ' -i \'\.(tif)$\'; bye\" -u ' + options.loginInfo logger.info(cmd) start_time() (output, err, status) = asp_system_utils.executeCommand(cmd, suppressOutput = True) #status = os.system(cmd) logger.info("LFTP output and error: " + output + ' ' + err) logger.info("LFTP status: " + str(status)) #if status != 0: # raise Exception("Problem pushing") stop_time("push to NSIDC", logger)
def fetchAndParseIndexFile(options, isSouth, baseCurlCmd, outputFolder): '''Create a list of all files that must be fetched unless done already.''' # For AN 20091112, etc, some of the ortho images are stored at the # beginning of the next day's flight. Need to sort this out, and # it is tricky. More comments within the code. fetchNextDay = True separateByLat = (options.type == 'ortho' and isInSeparateByLatTable(options.yyyymmdd)) if separateByLat: # Here we won't fetch the next day, we will just separate by latitude within # a given day fetchNextDay = False orthoOrFireball = ( (options.type == 'ortho') or (options.type == 'fireball') ) if fetchNextDay: # Normally we fetch for next day only for ortho or fireball. However, # for one single special flight, we do it for jpeg too, as then # the jpegs are also split. if orthoOrFireball or \ ((options.type == 'jpeg') and twoFlightsInOneDay(options.site, options.yyyymmdd)): fetchNextDay = True else: fetchNextDay = False # If we need to parse the next flight day as well, as expected in some runs, # we will fetch two html files, but create a single index out of them. dayVals = [0] if fetchNextDay: dayVals.append(1) indexPath = icebridge_common.htmlIndexFile(outputFolder) currIndexPath = indexPath parsedIndexPath = icebridge_common.csvIndexFile(outputFolder) if options.refetchIndex: os.system('rm -f ' + indexPath) os.system('rm -f ' + parsedIndexPath) if icebridge_common.fileNonEmpty(parsedIndexPath): logger.info('Already have the index file ' + parsedIndexPath + ', keeping it.') return parsedIndexPath frameDict = {} urlDict = {} # We need the list of jpeg frames. Sometimes when fetching ortho images, # and we have to fetch from the next day, don't fetch unless # in the jpeg index. if len(dayVals) > 1 and options.type != 'jpeg': jpegFolder = icebridge_common.getJpegFolder(os.path.dirname(outputFolder)) jpegIndexPath = icebridge_common.csvIndexFile(jpegFolder) (jpegFrameDict, jpegUrlDict) = icebridge_common.readIndexFile(jpegIndexPath) orthoStamp = {} if options.type == 'fireball': # This is a bugfix. Ensure that the fireball DEM has not just # the same frame number, but also same timestamp as the ortho. orthoFolder = icebridge_common.getOrthoFolder(os.path.dirname(outputFolder)) orthoIndexPath = icebridge_common.csvIndexFile(orthoFolder) (orthoFrameDict, orthoUrlDict) = icebridge_common.readIndexFile(orthoIndexPath) for frame in sorted(orthoFrameDict.keys()): filename = orthoFrameDict[frame] [imageDateString, imageTimeString] = icebridge_common.parseTimeStamps(filename) orthoStamp[frame] = imageTimeString for dayVal in dayVals: if len(dayVals) > 1: currIndexPath = indexPath + '.day' + str(dayVal) if options.refetchIndex: os.system('rm -f ' + currIndexPath) # Find folderUrl which contains all of the files if options.type in LIDAR_TYPES: options.allFrames = True # For lidar, always get all the frames! # For lidar, the data can come from one of three sources. # Unfortunately sometimes there is more than one source, and then # we need to pick by latitude. folderUrls = [] lidar_types = [] for lidar in LIDAR_TYPES: folderUrl = getFolderUrl(options.yyyymmdd, options.year, options.month, options.day, dayVal, # note here the dayVal options.site, lidar) logger.info('Checking lidar URL: ' + folderUrl) if checkIfUrlExists(folderUrl, baseCurlCmd): logger.info('Found match with lidar type: ' + lidar) folderUrls.append(folderUrl) lidar_types.append(lidar) if len(folderUrls) == 0: logger.info('WARNING: Could not find any lidar data for the given date!') elif len(folderUrls) == 1: # Unique solution folderUrl = folderUrls[0] options.type = lidar_types[0] elif len(folderUrls) >= 2: # Multiple solutions. Pick the good one by latitude. logger.info("Multiples URLs to search: " + " ".join(folderUrls)) count = -1 isGood = False for folderUrl in folderUrls: count += 1 (localFrameDict, localUrlDict) = \ fetchAndParseIndexFileAux(isSouth, separateByLat, dayVal, baseCurlCmd, folderUrl, currIndexPath, lidar_types[count]) for frame in sorted(localFrameDict.keys()): filename = localFrameDict[frame] xmlFile = icebridge_common.xmlFile(filename) url = os.path.join(folderUrl, xmlFile) # Download the file curlCmd = baseCurlCmd + ' ' + url + ' > ' + xmlFile logger.info(curlCmd) p = subprocess.Popen(curlCmd, shell=True, universal_newlines=True) os.waitpid(p.pid, 0) latitude = icebridge_common.parseLatitude(xmlFile) if os.path.exists(xmlFile): os.remove(xmlFile) if hasGoodLat(latitude, isSouth): isGood = True options.type = lidar_types[count] logger.info("Good latitude " + str(latitude) + ", will use " + folderUrl + " of type " + lidar_types[count]) else: logger.info("Bad latitude " + str(latitude) + ", will not use " + folderUrl + " of type " + lidar_types[count]) # Stop at first file no matter what break if isGood: break if not isGood: if options.type in LIDAR_TYPES and options.ignoreMissingLidar: logger.info("No lidar. None of these URLs are good: " + " ".join(folderUrls)) else: raise Exception("None of these URLs are good: " + " ".join(folderUrls)) else: # Other cases are simpler folderUrl = getFolderUrl(options.yyyymmdd, options.year, options.month, options.day, dayVal, # note here the dayVal options.site, options.type) logger.info('Fetching from URL: ' + folderUrl) (localFrameDict, localUrlDict) = \ fetchAndParseIndexFileAux(isSouth, separateByLat, dayVal, baseCurlCmd, folderUrl, currIndexPath, options.type) # Append to the main index for frame in sorted(localFrameDict.keys()): if options.type == 'fireball': # This is a bugfix. Ensure that the fireball DEM has not just # the same frame number, but also same timestamp as the ortho. # Otherwise we may accidentally getting one from next day. [imageDateString, imageTimeString] = \ icebridge_common.parseTimeStamps(localFrameDict[frame]) if frame not in orthoStamp: #logger.info("Missing ortho for fireball: " + localFrameDict[frame]) continue if abs(int(imageTimeString) - int(orthoStamp[frame])) > 1000: # Apparently a tolerance is needed. Use 10 seconds, so the number 1000. #logger.info("Will not use fireball DEM whose timestamp differs from ortho.") #logger.info("Fireball is: " + localFrameDict[frame]) #logger.info("Ortho is: " + orthoFrameDict[frame]) continue # Fetch from next day, unless already have a value. And don't fetch # frames not in the jpeg index. if len(dayVals) > 1 and options.type != 'jpeg': if not frame in jpegFrameDict.keys(): continue if frame in frameDict.keys(): continue frameDict[frame] = localFrameDict[frame] urlDict[frame] = localUrlDict[frame] # Write the combined index file icebridge_common.writeIndexFile(parsedIndexPath, frameDict, urlDict) return parsedIndexPath
def main(argsIn): try: # Sample usage: # python full_processing_script.py \ # --yyyymmdd 20091016 --site AN --num-processes 1 --num-threads 12 --bundle-length 12 \ # --start-frame 350 --stop-frame 353 --skip-validate \ # --camera-calibration-folder camera_calib \ # --reference-dem-folder ref_dem_folder # An output folder will be crated automatically (with a name like # AN_20091016), or its name can be specified via the --output-folder # option. usage = '''full_processing_script.py <options>''' parser = argparse.ArgumentParser(usage=usage) # Run selection parser.add_argument( "--yyyymmdd", dest="yyyymmdd", required=True, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument( "--site", dest="site", required=True, help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--output-folder", dest="outputFolder", default=None, help="Name of the output folder. If not specified, " + \ "use something like AN_YYYYMMDD.") parser.add_argument("--camera-lookup-file", dest="cameraLookupFile", default=None, help="The file to use to find which camera was used for which " + \ "flight. By default it is in the same directory as this script " + \ "and named camera_lookup.txt.") # Processing options parser.add_argument('--bundle-length', dest='bundleLength', default=2, type=int, help="The number of images to bundle adjust and process " + \ "in a single batch.") # TODO: Compute this automatically?? parser.add_argument('--overlap-limit', dest='overlapLimit', default=2, type=int, help="The number of images to treat as overlapping for " + \ "bundle adjustment.") parser.add_argument( '--max-overlap-ratio', dest='maxOverlapRatio', default=0.85, type=float, help= 'The maximum ratio of overlap between images to be accepted as part of a stereo pair. When floating intrinsics, this will be set to 1, to not upset some bookkeeping.' ) parser.add_argument( '--stereo-arguments', dest='stereoArgs', # set --min-xcorr-level 0 to do the left-to-right # and right-to-left consistency check at the lowest level. default='--stereo-algorithm 2 --min-xcorr-level 0', help='Extra arguments to pass to stereo.') parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument('--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on.') parser.add_argument( '--frames-file', dest='framesFile', default="", help= 'Specific frames to run ortho2pinhole on within this frame range.') parser.add_argument('--max-num-lidar-to-fetch', dest='maxNumLidarToFetch', default=None, type=int, help="The maximum number of lidar files to fetch. " + \ "This is used in debugging.") parser.add_argument("--camera-calibration-folder", dest="inputCalFolder", default=None, help="The folder containing camera calibration.") parser.add_argument( "--input-calibration-camera", dest="inputCalCamera", default="", help= "Instead of looking up the calibrated camera in the calibration folder, use this one." ) parser.add_argument( "--output-calibration-camera", dest="outputCalCamera", default="", help= "If specified, float the intrinsics and write the optimized model here." ) parser.add_argument( "--output-model-type", dest="outputModelType", default="RPC", help="Generate a distortion model of type RPC, RPC5, or RPC6.") parser.add_argument( "--reference-dem-folder", dest="refDemFolder", default=None, help="The folder containing DEMs that created orthoimages.") parser.add_argument("--processing-subfolder", dest="processingSubfolder", default=None, help="Specify a subfolder name where the processing outputs will go. " + \ "fault is no additional folder") parser.add_argument( "--simple-cameras", action="store_true", dest="simpleCameras", default=False, help="Don't use orthoimages to refine the camera models.") # This option is only needed when generating camera models from the nav files. parser.add_argument( '--camera-mounting', default=0, dest='cameraMounting', type=int, help= '0=right-forwards, 1=left-forwards, 2=top-forwards, 3=bottom-forwards.' ) # Performance options parser.add_argument( '--num-processes', dest='numProcesses', default=1, type=int, help='The number of simultaneous processes to run.') parser.add_argument( '--num-ortho-processes', dest='numOrthoProcesses', default=-1, type=int, help='The number of simultaneous ortho processes to run.') parser.add_argument('--num-threads', dest='numThreads', default=8, type=int, help='The number of threads per process.') # Action control parser.add_argument("--skip-fetch", action="store_true", dest="noFetch", default=False, help="Skip data fetching.") parser.add_argument("--skip-convert", action="store_true", dest="noConvert", default=False, help="Skip data conversion.") parser.add_argument("--stop-after-fetch", action="store_true", dest="stopAfterFetch", default=False, help="Stop program after data fetching.") parser.add_argument("--stop-after-convert", action="store_true", dest="stopAfterConvert", default=False, help="Stop program after data conversion.") parser.add_argument("--skip-validate", action="store_true", dest="skipValidate", default=False, help="Skip input data validation.") parser.add_argument("--ignore-missing-lidar", action="store_true", dest="ignoreMissingLidar", default=False, help="Keep going if the lidar is missing.") parser.add_argument( "--log-batches", action="store_true", dest="logBatches", default=False, help="Log the required batch commands without running them.") parser.add_argument( '--cleanup', action='store_true', default=False, dest='cleanup', help='If the final result is produced delete intermediate files.') parser.add_argument( '--many-ip', action='store_true', default=False, dest='manyip', help= 'If to use a lot of IP in bundle adjustment from the beginning.') parser.add_argument( "--dry-run", action="store_true", dest="dryRun", default=False, help= "Set up the input directories but do not fetch/process any imagery." ) parser.add_argument("--refetch", action="store_true", dest="reFetch", default=False, help="Try fetching again if some files turned out invalid " + \ "during conversions.") parser.add_argument("--refetch-index", action="store_true", dest="refetchIndex", default=False, help="Force refetch of the index file.") parser.add_argument("--refetch-nav", action="store_true", dest="refetchNav", default=False, help="Force refetch of the nav file.") parser.add_argument("--stop-after-index-fetch", action="store_true", dest="stopAfterIndexFetch", default=False, help="Stop after fetching the indices.") parser.add_argument("--no-nav", action="store_true", dest="noNavFetch", default=False, help="Don't fetch or convert the nav data.") parser.add_argument("--no-lidar-convert", action="store_true", dest="noLidarConvert", default=False, help="Skip lidar files in the conversion step.") parser.add_argument( "--no-ortho-convert", action="store_true", dest="noOrthoConvert", default=False, help="Skip generating camera models in the conversion step.") parser.add_argument("--skip-fast-conversions", action="store_true", dest="skipFastConvert", default=False, help="Skips all non-ortho conversions.") options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) icebridge_common.switchWorkDir() if options.numOrthoProcesses < 0: options.numOrthoProcesses = options.numProcesses isSouth = icebridge_common.checkSite(options.site) # Turned off elevation limits here since they are being set from LIDAR data. ## Add the site based elevation limits to the stereoArgs option #altLimits = icebridge_common.getElevationLimits(options.site) #options.stereoArgs = (' %s --elevation-limit %f %f ' # % (options.stereoArgs, altLimits[0], altLimits[1])) options.stereoArgs = (' %s ' % (options.stereoArgs)) if options.cameraLookupFile is None: options.cameraLookupFile = P.join(basepath, 'camera_lookup.txt') if not os.path.isfile(options.cameraLookupFile): raise Exception("Can't find camera file: " + options.cameraLookupFile) if len(options.yyyymmdd) != 8 and len(options.yyyymmdd) != 9: # Make an exception for 20100422a raise Exception("The --yyyymmdd field must have length 8 or 9.") if options.outputFolder is None: options.outputFolder = icebridge_common.outputFolder( options.site, options.yyyymmdd) if options.stopAfterIndexFetch: options.stopAfterFetch = True os.system('mkdir -p ' + options.outputFolder) logLevel = logging.INFO # Record everything logger = icebridge_common.setUpLogger(options.outputFolder, logLevel, 'icebridge_processing_log_frames_' + \ str(options.startFrame) + "_" + str(options.stopFrame)) # Make sure we later know what we were doing logger.info("full_processing_script.py " + " ".join(argsIn)) (out, err, status) = asp_system_utils.executeCommand(['uname', '-a'], suppressOutput=True) logger.info("Running on machine: " + out) logger.info("Work dir is " + os.getcwd()) os.system("ulimit -c 0") # disable core dumps os.system("umask 022") # enforce files be readable by others # Perform some input checks and initializations # These are not needed unless cameras are initialized if options.inputCalFolder is None or not os.path.exists( options.inputCalFolder): raise Exception("Missing camera calibration folder.") if options.refDemFolder is None or not os.path.exists( options.refDemFolder): raise Exception("Missing reference DEM folder.") refDemName = icebridge_common.getReferenceDemName(options.site) refDemPath = os.path.join(options.refDemFolder, refDemName) if not os.path.exists(refDemPath): raise Exception("Missing reference DEM: " + refDemPath) # TODO: CLEAN UP!!! # Set up the output folders cameraFolder = icebridge_common.getCameraFolder(options.outputFolder) imageFolder = icebridge_common.getImageFolder(options.outputFolder) jpegFolder = icebridge_common.getJpegFolder(options.outputFolder) orthoFolder = icebridge_common.getOrthoFolder(options.outputFolder) fireballFolder = icebridge_common.getFireballFolder(options.outputFolder) corrFireballFolder = icebridge_common.getCorrFireballFolder( options.outputFolder) lidarFolder = icebridge_common.getLidarFolder(options.outputFolder) navFolder = icebridge_common.getNavFolder(options.outputFolder) navCameraFolder = icebridge_common.getNavCameraFolder(options.outputFolder) processedFolder = icebridge_common.getProcessedFolder(options.outputFolder) if options.outputCalCamera != "": if options.maxOverlapRatio < 1: raise Exception ("For optimizing intrinsics, must set --max-overlap-ratio to 1, " + \ "to always use consecutive frames.") # Prepare to solve for intrinsics. Note that this modifies some things along the way. (options, cameraFolder, navCameraFolder, processedFolder) = \ solveIntrinsics_Part1(options, jpegFolder, cameraFolder, navCameraFolder, processedFolder, logger) # Handle subfolder option. This is useful for comparing results with different parameters! if options.processingSubfolder: processedFolder = os.path.join(processedFolder, options.processingSubfolder) logger.info('Will write to processing subfolder: ' + options.processingSubfolder) # If something failed in the first attempt either in fetch or in # convert, we will wipe bad files, and try to refetch/re-convert. numAttempts = 1 if options.reFetch and (not options.noFetch): numAttempts = 2 for attempt in range(numAttempts): if numAttempts > 1: logger.info("Fetch/convert attempt: " + str(attempt + 1)) ans = runFetchConvert(options, isSouth, cameraFolder, imageFolder, jpegFolder, orthoFolder, fireballFolder, corrFireballFolder, lidarFolder, processedFolder, navFolder, navCameraFolder, refDemPath, logger) if ans == 0: break if options.stopAfterFetch or options.dryRun or options.stopAfterConvert: logger.info('Fetch/convert finished!') return 0 # Call the processing routine processTheRun(options, imageFolder, cameraFolder, lidarFolder, orthoFolder, corrFireballFolder, processedFolder, isSouth, refDemPath) if options.outputCalCamera != "": # Finish solving for intrinscs. solveIntrinsics_Part2(options, imageFolder, cameraFolder, lidarFolder, orthoFolder, processedFolder, isSouth, logger)
def main(argsIn): try: # Sample usage: # python full_processing_script.py \ # --yyyymmdd 20091016 --site AN --num-processes 1 --num-threads 12 --bundle-length 12 \ # --start-frame 350 --stop-frame 353 --skip-validate \ # --camera-calibration-folder camera_calib \ # --reference-dem-folder ref_dem_folder # An output folder will be crated automatically (with a name like # AN_20091016), or its name can be specified via the --output-folder # option. usage = '''full_processing_script.py <options>''' parser = argparse.ArgumentParser(usage=usage) # Run selection parser.add_argument("--yyyymmdd", dest="yyyymmdd", required=True, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument("--site", dest="site", required=True, help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--output-folder", dest="outputFolder", default=None, help="Name of the output folder. If not specified, " + \ "use something like AN_YYYYMMDD.") parser.add_argument("--camera-lookup-file", dest="cameraLookupFile", default=None, help="The file to use to find which camera was used for which " + \ "flight. By default it is in the same directory as this script " + \ "and named camera_lookup.txt.") # Processing options parser.add_argument('--bundle-length', dest='bundleLength', default=2, type=int, help="The number of images to bundle adjust and process " + \ "in a single batch.") # TODO: Compute this automatically?? parser.add_argument('--overlap-limit', dest='overlapLimit', default=2, type=int, help="The number of images to treat as overlapping for " + \ "bundle adjustment.") parser.add_argument('--stereo-arguments', dest='stereoArgs', # set --min-xcorr-level 0 to do the left-to-right # and right-to-left consistency check at the lowest level. default='--stereo-algorithm 2 --min-xcorr-level 0', help='Extra arguments to pass to stereo.') parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument('--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on.') parser.add_argument('--frames-file', dest='framesFile', default="", help='Specific frames to run ortho2pinhole on within this frame range.') parser.add_argument('--max-num-lidar-to-fetch', dest='maxNumLidarToFetch', default=None, type=int, help="The maximum number of lidar files to fetch. " + \ "This is used in debugging.") parser.add_argument("--camera-calibration-folder", dest="inputCalFolder", default=None, help="The folder containing camera calibration.") parser.add_argument("--input-calibration-camera", dest="inputCalCamera", default="", help="Instead of looking up the calibrated camera in the calibration folder, use this one.") parser.add_argument("--output-calibration-camera", dest="outputCalCamera", default="", help="If specified, float the intrinsics and write the optimized model here.") parser.add_argument("--output-model-type", dest="outputModelType", default="RPC", help="Generate a distortion model of type RPC, RPC5, or RPC6.") parser.add_argument("--reference-dem-folder", dest="refDemFolder", default=None, help="The folder containing DEMs that created orthoimages.") parser.add_argument("--processing-subfolder", dest="processingSubfolder", default=None, help="Specify a subfolder name where the processing outputs will go. " + \ "fault is no additional folder") parser.add_argument("--simple-cameras", action="store_true", dest="simpleCameras", default=False, help="Don't use orthoimages to refine the camera models.") # This option is only needed when generating camera models from the nav files. parser.add_argument('--camera-mounting', default=0, dest='cameraMounting', type=int, help='0=right-forwards, 1=left-forwards, 2=top-forwards, 3=bottom-forwards.') # Performance options parser.add_argument('--num-processes', dest='numProcesses', default=1, type=int, help='The number of simultaneous processes to run.') parser.add_argument('--num-ortho-processes', dest='numOrthoProcesses', default=-1, type=int, help='The number of simultaneous ortho processes to run.') parser.add_argument('--num-threads', dest='numThreads', default=8, type=int, help='The number of threads per process.') # Action control parser.add_argument("--skip-fetch", action="store_true", dest="noFetch", default=False, help="Skip data fetching.") parser.add_argument("--skip-convert", action="store_true", dest="noConvert", default=False, help="Skip data conversion.") parser.add_argument("--stop-after-fetch", action="store_true", dest="stopAfterFetch", default=False, help="Stop program after data fetching.") parser.add_argument("--stop-after-convert", action="store_true", dest="stopAfterConvert", default=False, help="Stop program after data conversion.") parser.add_argument("--skip-validate", action="store_true", dest="skipValidate", default=False, help="Skip input data validation.") parser.add_argument("--ignore-missing-lidar", action="store_true", dest="ignoreMissingLidar", default=False, help="Keep going if the lidar is missing.") parser.add_argument("--log-batches", action="store_true", dest="logBatches", default=False, help="Log the required batch commands without running them.") parser.add_argument('--cleanup', action='store_true', default=False, dest='cleanup', help='If the final result is produced delete intermediate files.') parser.add_argument('--many-ip', action='store_true', default=False, dest='manyip', help='If to use a lot of IP in bundle adjustment from the beginning.') parser.add_argument("--dry-run", action="store_true", dest="dryRun", default=False, help="Set up the input directories but do not fetch/process any imagery.") parser.add_argument("--refetch", action="store_true", dest="reFetch", default=False, help="Try fetching again if some files turned out invalid " + \ "during conversions.") parser.add_argument("--refetch-index", action="store_true", dest="refetchIndex", default=False, help="Force refetch of the index file.") parser.add_argument("--refetch-nav", action="store_true", dest="refetchNav", default=False, help="Force refetch of the nav file.") parser.add_argument("--stop-after-index-fetch", action="store_true", dest="stopAfterIndexFetch", default=False, help="Stop after fetching the indices.") parser.add_argument("--no-nav", action="store_true", dest="noNavFetch", default=False, help="Don't fetch or convert the nav data.") parser.add_argument("--no-lidar-convert", action="store_true", dest="noLidarConvert", default=False, help="Skip lidar files in the conversion step.") parser.add_argument("--no-ortho-convert", action="store_true", dest="noOrthoConvert", default=False, help="Skip generating camera models in the conversion step.") parser.add_argument("--skip-fast-conversions", action="store_true", dest="skipFastConvert", default=False, help="Skips all non-ortho conversions.") options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) icebridge_common.switchWorkDir() if options.numOrthoProcesses < 0: options.numOrthoProcesses = options.numProcesses isSouth = icebridge_common.checkSite(options.site) # Turned off elevation limits here since they are being set from LIDAR data. ## Add the site based elevation limits to the stereoArgs option #altLimits = icebridge_common.getElevationLimits(options.site) #options.stereoArgs = (' %s --elevation-limit %f %f ' # % (options.stereoArgs, altLimits[0], altLimits[1])) options.stereoArgs = (' %s ' % (options.stereoArgs)) if options.cameraLookupFile is None: options.cameraLookupFile = P.join(basepath, 'camera_lookup.txt') if not os.path.isfile(options.cameraLookupFile): raise Exception("Can't find camera file: " + options.cameraLookupFile) if len(options.yyyymmdd) != 8 and len(options.yyyymmdd) != 9: # Make an exception for 20100422a raise Exception("The --yyyymmdd field must have length 8 or 9.") if options.outputFolder is None: options.outputFolder = icebridge_common.outputFolder(options.site, options.yyyymmdd) if options.stopAfterIndexFetch: options.stopAfterFetch = True os.system('mkdir -p ' + options.outputFolder) logLevel = logging.INFO # Record everything logger = icebridge_common.setUpLogger(options.outputFolder, logLevel, 'icebridge_processing_log_frames_' + \ str(options.startFrame) + "_" + str(options.stopFrame)) # Make sure we later know what we were doing logger.info("full_processing_script.py " + " ".join(argsIn)) (out, err, status) = asp_system_utils.executeCommand(['uname', '-a'], suppressOutput = True) logger.info("Running on machine: " + out) logger.info("Work dir is " + os.getcwd()) os.system("ulimit -c 0") # disable core dumps os.system("umask 022") # enforce files be readable by others # Perform some input checks and initializations # These are not needed unless cameras are initialized if options.inputCalFolder is None or not os.path.exists(options.inputCalFolder): raise Exception("Missing camera calibration folder.") if options.refDemFolder is None or not os.path.exists(options.refDemFolder): raise Exception("Missing reference DEM folder.") refDemName = icebridge_common.getReferenceDemName(options.site) refDemPath = os.path.join(options.refDemFolder, refDemName) if not os.path.exists(refDemPath): raise Exception("Missing reference DEM: " + refDemPath) # TODO: CLEAN UP!!! # Set up the output folders cameraFolder = icebridge_common.getCameraFolder(options.outputFolder) imageFolder = icebridge_common.getImageFolder(options.outputFolder) jpegFolder = icebridge_common.getJpegFolder(options.outputFolder) orthoFolder = icebridge_common.getOrthoFolder(options.outputFolder) fireballFolder = icebridge_common.getFireballFolder(options.outputFolder) corrFireballFolder = icebridge_common.getCorrFireballFolder(options.outputFolder) lidarFolder = icebridge_common.getLidarFolder(options.outputFolder) navFolder = icebridge_common.getNavFolder(options.outputFolder) navCameraFolder = icebridge_common.getNavCameraFolder(options.outputFolder) processedFolder = icebridge_common.getProcessedFolder(options.outputFolder) if options.outputCalCamera != "": # Prepare to solve for intrinsics. Note that this modifies some things along the way. (options, cameraFolder, navCameraFolder, processedFolder) = \ solveIntrinsics_Part1(options, jpegFolder, cameraFolder, navCameraFolder, processedFolder, logger) # Handle subfolder option. This is useful for comparing results with different parameters! if options.processingSubfolder: processedFolder = os.path.join(processedFolder, options.processingSubfolder) logger.info('Will write to processing subfolder: ' + options.processingSubfolder) # If something failed in the first attempt either in fetch or in # convert, we will wipe bad files, and try to refetch/re-convert. numAttempts = 1 if options.reFetch and (not options.noFetch): numAttempts = 2 for attempt in range(numAttempts): if numAttempts > 1: logger.info("Fetch/convert attempt: " + str(attempt+1)) ans = runFetchConvert(options, isSouth, cameraFolder, imageFolder, jpegFolder, orthoFolder, fireballFolder, corrFireballFolder, lidarFolder, processedFolder, navFolder, navCameraFolder, refDemPath, logger) if ans == 0: break if options.stopAfterFetch or options.dryRun or options.stopAfterConvert: logger.info('Fetch/convert finished!') return 0 # Call the processing routine processTheRun(options, imageFolder, cameraFolder, lidarFolder, orthoFolder, corrFireballFolder, processedFolder, isSouth, refDemPath) if options.outputCalCamera != "": # Finish solving for intrinscs. solveIntrinsics_Part2(options, imageFolder, cameraFolder, lidarFolder, orthoFolder, processedFolder, isSouth, logger)
def pushByType(run, options, logger, dataType): # Fetch the ortho index from NSIDC if missing outputFolder = run.getFolder() logger.info("Output folder is " + outputFolder) os.system("mkdir -p " + outputFolder) # Current directory. It is important to go from /u to the real dir which is /nobackup... unpackDir = os.path.realpath(os.getcwd()) logger.info("Unpack directory is " + unpackDir) orthoFolder = icebridge_common.getOrthoFolder(outputFolder) orthoIndexPath = icebridge_common.csvIndexFile(orthoFolder) if not os.path.exists(orthoIndexPath): fetchIndices(options, logger) logger.info("Reading ortho index: " + orthoIndexPath) (orthoFrameDict, orthoUrlDict) = icebridge_common.readIndexFile(orthoIndexPath) # Fetch unarchived folder if missing if dataType == 'DEM': unarchivedFolder = run.getAssemblyFolder() elif dataType == 'ORTHO': unarchivedFolder = run.getProcessFolder() else: raise Exception("Unknown data type: " + dataType) logger.info("Unarchived data folder is " + unarchivedFolder) # Especially for ortho, force-fetch each time, as there is no good way # of checking if we fetched well before. start_time() if not archive_functions.fetchProcessedByType(run, unpackDir, logger, dataType): return stop_time("fetching archived data by type: " + dataType, logger) # Make the output directory at NSIDC m = re.match("(\d\d\d\d)(\d\d)(\d\d)", options.yyyymmdd) if m: outDir = options.site + "_" + m.group(1) + "." + m.group( 2) + "." + m.group(3) else: raise Exception("Could not parse: " + options.yyyymmdd) # Keep the output directory locally here localDirPath = os.path.join(outputFolder, dataType, outDir) os.system("mkdir -p " + localDirPath) logger.info("Storing the renamed " + dataType + " files in " + localDirPath) logger.info("Directory name at NSIDC: " + outDir) # Read the DEMs and orthos, and copy them to outDir according to the final convention if dataType == 'DEM': dataFiles = icebridge_common.getTifs(unarchivedFolder, prependFolder=True) else: dataFiles = glob.glob( os.path.join(unarchivedFolder, 'batch_*', 'out-ortho.tif')) for dataFile in dataFiles: # Here we use the convention from archive_functions.py for DEMs and from how we store orthos. if dataType == 'DEM': m = re.match("^.*?" + unarchivedFolder + "/F_(\d+)_\d+_" + dataType + \ "\.tif$", dataFile) if not m: continue frameNumber = int(m.group(1)) else: m = re.match("^.*?" + unarchivedFolder + "/batch_(\d+)_\d+_\d+/" + \ "out-ortho.tif$", dataFile) if not m: continue frameNumber = int(m.group(1)) if frameNumber < options.startFrame or frameNumber > options.stopFrame: continue # For each data file, copy from the ortho its meta info if not frameNumber in orthoFrameDict.keys(): # Bugfix: Ortho fetching failed, try again fetchIndices(options, logger) logger.info("Re-reading ortho index: " + orthoIndexPath) (orthoFrameDict, orthoUrlDict) = icebridge_common.readIndexFile(orthoIndexPath) if not frameNumber in orthoFrameDict.keys(): # This time there is nothing we can do raise Exception("Cannot find ortho for frame: " + str(frameNumber)) orthoFile = orthoFrameDict[frameNumber] [dateString, timeString] = icebridge_common.parseTimeStamps(orthoFile) # It is always possible that the ortho file date will be the next day # after the current flight date, if the flight goes after midnight. # So it is not unreasonable that options.yyyymmdd != dateString. if dataType == 'DEM': outFile = ('IODEM3_%s_%s_%05d_DEM.tif' % (dateString, timeString, frameNumber)) else: # TODO: Need to think more of the naming convention. outFile = ('IODEM3_%s_%s_%05d_ORTHO.tif' % (dateString, timeString, frameNumber)) cmd = "/bin/cp -fv " + dataFile + " " + os.path.join( localDirPath, outFile) logger.info(cmd) os.system(cmd) # Push the directory to NSIDC remoteDirPath = os.path.join( os.path.basename(os.path.dirname(localDirPath)), os.path.basename(localDirPath)) remoteDirPath = os.path.join('/incoming', 'Ames', remoteDirPath) logger.info("Storing at NSIDC in: " + remoteDirPath) cmd = 'lftp -e "mirror -P 20 -c -R -vvv --delete --delete-first ' + localDirPath + \ ' ' + remoteDirPath + ' -i \'\.(tif)$\'; bye\" -u ' + options.loginInfo logger.info(cmd) start_time() (output, err, status) = asp_system_utils.executeCommand(cmd, suppressOutput=True) #status = os.system(cmd) logger.info("LFTP output and error: " + output + ' ' + err) logger.info("LFTP status: " + str(status)) #if status != 0: # raise Exception("Problem pushing") stop_time("push to NSIDC", logger)
'icebridge_blend_log') (out, err, status) = asp_system_utils.executeCommand(['uname', '-a'], suppressOutput=True) logger.info("Running on machine: " + out) processFolder = os.path.join(options.outputFolder, 'processed') # Handle subfolder option. This is useful for comparing results with different parameters! if options.processingSubfolder: processFolder = os.path.join(processFolder, options.processingSubfolder) logger.info('Reading from processing subfolder: ' + options.processingSubfolder) orthoFolder = icebridge_common.getOrthoFolder(options.outputFolder) orthoIndexPath = icebridge_common.csvIndexFile(orthoFolder) if not os.path.exists(orthoIndexPath): raise Exception("Error: Missing ortho index file: " + orthoIndexPath + ".") (orthoFrameDict, orthoUrlDict) = icebridge_common.readIndexFile(orthoIndexPath) if options.blendToFireball: fireballFrameDict = icebridge_common.getCorrectedFireballDems( options.outputFolder) lidarFolder = icebridge_common.getLidarFolder(options.outputFolder) threadText = '' if options.numThreads:
def main(argsIn): try: # Sample usage: # python ~/projects/StereoPipeline/src/asp/IceBridge/blend_dems.py --site GR \ # --yyyymmdd 20120315 --start-frame 2490 --stop-frame 2491 --bundle-length 2 \ # --num-threads 8 --num-processes 10 usage = '''blend_dems.py <options>''' parser = argparse.ArgumentParser(usage=usage) # Run selection parser.add_argument( "--yyyymmdd", dest="yyyymmdd", required=True, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument( "--site", dest="site", required=True, help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--output-folder", dest="outputFolder", default=None, help="Name of the output folder. If not specified, " + \ "use something like AN_YYYYMMDD.") # Processing options parser.add_argument('--bundle-length', dest='bundleLength', default=2, type=int, help="The number of images to bundle adjust and process " + \ "in a single batch.") parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument( '--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on. This frame will also be processed.') parser.add_argument("--processing-subfolder", dest="processingSubfolder", default=None, help="Specify a subfolder name where the processing outputs will go. "+\ "The default is no additional folder.") parser.add_argument( "--compute-diff-to-prev-dem", action="store_true", dest="computeDiffToPrev", default=False, help="Compute the absolute difference between the current DEM " + "and the one before it.") parser.add_argument("--blend-to-fireball-footprint", action="store_true", dest="blendToFireball", default=False, help="Create additional blended DEMs having the same " + \ "footprint as Fireball DEMs.") # Performance options parser.add_argument( '--num-processes', dest='numProcesses', default=1, type=int, help='The number of simultaneous processes to run.') parser.add_argument('--num-threads', dest='numThreads', default=8, type=int, help='The number of threads per process.') options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) icebridge_common.switchWorkDir() os.system("ulimit -c 0") # disable core dumps os.system("rm -f core.*") # these keep on popping up os.system("umask 022") # enforce files be readable by others if len(options.yyyymmdd) != 8 and len(options.yyyymmdd) != 9: # Make an exception for 20100422a raise Exception("The --yyyymmdd field must have length 8 or 9.") if options.outputFolder is None: options.outputFolder = icebridge_common.outputFolder( options.site, options.yyyymmdd) os.system('mkdir -p ' + options.outputFolder) logLevel = logging.INFO # Make this an option?? logger = icebridge_common.setUpLogger(options.outputFolder, logLevel, 'icebridge_blend_log') (out, err, status) = asp_system_utils.executeCommand(['uname', '-a'], suppressOutput=True) logger.info("Running on machine: " + out) logger.info(str(argsIn)) processFolder = os.path.join(options.outputFolder, 'processed') # Handle subfolder option. This is useful for comparing results with different parameters! if options.processingSubfolder: processFolder = os.path.join(processFolder, options.processingSubfolder) logger.info('Reading from processing subfolder: ' + options.processingSubfolder) orthoFolder = icebridge_common.getOrthoFolder(options.outputFolder) orthoIndexPath = icebridge_common.csvIndexFile(orthoFolder) if not os.path.exists(orthoIndexPath): raise Exception("Error: Missing ortho index file: " + orthoIndexPath + ".") (orthoFrameDict, orthoUrlDict) = icebridge_common.readIndexFile(orthoIndexPath) if options.blendToFireball: fireballFrameDict = icebridge_common.getCorrectedFireballDems( options.outputFolder) lidarFolder = icebridge_common.getLidarFolder(options.outputFolder) threadText = '' if options.numThreads: threadText = '--threads ' + str(options.numThreads) redo = False suppressOutput = True taskHandles = [] if options.numProcesses > 1: pool = multiprocessing.Pool(options.numProcesses) # Bound the frames sortedFrames = sorted(orthoFrameDict.keys()) if len(sortedFrames) > 0: if options.startFrame < sortedFrames[0]: options.startFrame = sortedFrames[0] if options.stopFrame > sortedFrames[-1] + 1: options.stopFrame = sortedFrames[-1] + 1 else: # No ortho files, that means nothing to do options.startFrame = 0 options.stopFrame = 0 for frame in range(options.startFrame, options.stopFrame): if not frame in orthoFrameDict: logger.info("Error: Missing ortho file for frame: " + str(frame) + ".") continue orthoFile = orthoFrameDict[frame] try: lidarFile = icebridge_common.findMatchingLidarFile( orthoFile, lidarFolder) except: # Skip if no lidar file matches this frame continue fireballDEM = "" if options.blendToFireball: if frame in fireballFrameDict: fireballDEM = fireballFrameDict[frame] else: logger.info("No fireball DEM for frame: " + str(frame)) args = (frame, processFolder, lidarFile, fireballDEM, options, threadText, redo, suppressOutput) # Run things sequentially if only one process, to make it easy to debug if options.numProcesses > 1: taskHandles.append(pool.apply_async(runBlend, args)) else: runBlend(*args) if options.numProcesses > 1: icebridge_common.waitForTaskCompletionOrKeypress(taskHandles, logger, interactive=False, quitKey='q', sleepTime=20) icebridge_common.stopTaskPool(pool)