def lidarFilesInRange(lidarDict, lidarFolder, startFrame, stopFrame): '''Fetch only lidar files for the given frame range. Do that as follows. For each ortho frame in [startFrame, stopFrame], find the lidar file with the closest timestamp. Collect them all. Add the two neighboring ones, to help with finding lidar pairs later.''' lidarList = [] for frame in sorted(lidarDict.keys()): lidarList.append(lidarDict[frame]) # If we requested all frames, also get all the lidar files. if ((startFrame == icebridge_common.getSmallestFrame()) and (stopFrame == icebridge_common.getLargestFrame())): minLidarIndex = 0 maxLidarIndex = len(lidarList) - 1 else: minLidarIndex = len(lidarList) maxLidarIndex = 0 # Build up a list of lidar files that match the requested input frames orthoFolder = icebridge_common.getOrthoFolder( os.path.dirname(lidarFolder)) orthoIndexPath = icebridge_common.csvIndexFile(orthoFolder) (orthoFrameDict, orthoUrlDict) = icebridge_common.readIndexFile(orthoIndexPath) for frame in sorted(orthoFrameDict.keys()): if ((frame < startFrame) or (frame > stopFrame)): continue orthoFrame = orthoFrameDict[frame] try: matchingLidar = icebridge_common.findMatchingLidarFileFromList( orthoFrame, lidarList) except: # Some image files don't have a matching lidar file, just keep going. continue for index in range(len(lidarList)): if lidarList[index] == matchingLidar: if minLidarIndex > index: minLidarIndex = index if maxLidarIndex < index: maxLidarIndex = index # We will fetch neighboring lidar files as well if minLidarIndex > 0: minLidarIndex = minLidarIndex - 1 if maxLidarIndex + 1 < len(lidarList): maxLidarIndex = maxLidarIndex + 1 lidarsToFetch = set() if lidarList: for index in range(minLidarIndex, maxLidarIndex + 1): # Fetch only the requested lidar files. lidarsToFetch.add(lidarList[index]) return lidarsToFetch
def main(argsIn): '''Parse arguments and call the processing function''' try: # Sample usage: # python generate_flight_summary.py --yyyymmdd 20091016 --site AN usage = '''generate_flight_summary.py <options>''' parser = argparse.ArgumentParser(usage=usage) parser.add_argument( "--yyyymmdd", dest="yyyymmdd", required=True, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument( "--site", dest="site", required=True, help="Name of the location of the images (AN, GR, or AL).") parser.add_argument("--output-folder", dest="outputFolder", default=None, help="Name of the output folder. If not specified, " + \ "use something like AN_YYYYMMDD.") parser.add_argument("--parent-folder", dest="parentFolder", default=os.getcwd(), help="The folder having all the runs.") parser.add_argument("--skip-kml-gen", action="store_true", dest="skipKml", default=False, help="Skip combining kml files.") parser.add_argument( "--skip-geo-center", action="store_true", dest="skipGeo", default=False, help="Skip computing the center of the tile, which is slow.") parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument('--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on.') options = parser.parse_args(argsIn) except argparse.ArgumentError, msg: parser.error(msg)
def lidarFilesInRange(lidarDict, lidarFolder, startFrame, stopFrame): '''Fetch only lidar files for the given frame range. Do that as follows. For each ortho frame in [startFrame, stopFrame], find the lidar file with the closest timestamp. Collect them all. Add the two neighboring ones, to help with finding lidar pairs later.''' lidarList = [] for frame in sorted(lidarDict.keys()): lidarList.append(lidarDict[frame]) # If we requested all frames, also get all the lidar files. if ((startFrame == icebridge_common.getSmallestFrame()) and (stopFrame == icebridge_common.getLargestFrame() ) ): minLidarIndex = 0 maxLidarIndex = len(lidarList)-1 else: minLidarIndex = len(lidarList) maxLidarIndex = 0 # Build up a list of lidar files that match the requested input frames orthoFolder = icebridge_common.getOrthoFolder(os.path.dirname(lidarFolder)) orthoIndexPath = icebridge_common.csvIndexFile(orthoFolder) (orthoFrameDict, orthoUrlDict) = icebridge_common.readIndexFile(orthoIndexPath) for frame in sorted(orthoFrameDict.keys()): if ((frame < startFrame) or (frame > stopFrame) ): continue orthoFrame = orthoFrameDict[frame] try: matchingLidar = icebridge_common.findMatchingLidarFileFromList(orthoFrame, lidarList) except: # Some image files don't have a matching lidar file, just keep going. continue for index in range(len(lidarList)): if lidarList[index] == matchingLidar: if minLidarIndex > index: minLidarIndex = index if maxLidarIndex < index: maxLidarIndex = index # We will fetch neighboring lidar files as well if minLidarIndex > 0: minLidarIndex = minLidarIndex -1 if maxLidarIndex + 1 < len(lidarList): maxLidarIndex = maxLidarIndex + 1 lidarsToFetch = set() if lidarList: for index in range(minLidarIndex, maxLidarIndex+1): # Fetch only the requested lidar files. lidarsToFetch.add(lidarList[index]) return lidarsToFetch
def main(argsIn): try: usage = '''label_image.py <options>''' parser = argparse.ArgumentParser(usage=usage) parser.add_argument("--yyyymmdd", dest="yyyymmdd", required=True, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument("--site", dest="site", required=True, help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--output-folder", dest="outputFolder", default=None, help="Name of the output folder. If not specified, " + \ "use something like AN_YYYYMMDD.") parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument('--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on. This frame will also be processed.') parser.add_argument("--training", dest="trainingPath", required=True, help="Path to the training file.") parser.add_argument('--num-processes', dest='numProcesses', default=8, type=int, help='The number of simultaneous processes to run.') parser.add_argument('--num-threads', dest='numThreads', default=1, type=int, help='IGNORED.') options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) if not os.path.exists(options.trainingPath): print 'Error: Input training file ' + options.trainingPath + ' does not exist!' return -1 # TODO: Everything should use the RunHelper class for this! if options.outputFolder is None: options.outputFolder = icebridge_common.outputFolder(options.site, options.yyyymmdd) # Input is raw jpeg files. inputFolder = icebridge_common.getJpegFolder(options.outputFolder) # Write all tool output to this folder. outputFolder = icebridge_common.getLabelFolder(options.outputFolder) # Do the work label_images(inputFolder, outputFolder, options.startFrame, options.stopFrame, options.trainingPath, options.numProcesses)
def main(argsIn): try: usage = '''label_image.py <options>''' parser = argparse.ArgumentParser(usage=usage) parser.add_argument( "--yyyymmdd", dest="yyyymmdd", required=True, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument( "--site", dest="site", required=True, help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--output-folder", dest="outputFolder", default=None, help="Name of the output folder. If not specified, " + \ "use something like AN_YYYYMMDD.") parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument( '--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on. This frame will also be processed.') parser.add_argument("--training", dest="trainingPath", required=True, help="Path to the training file.") parser.add_argument( '--num-processes', dest='numProcesses', default=8, type=int, help='The number of simultaneous processes to run.') parser.add_argument('--num-threads', dest='numThreads', default=1, type=int, help='IGNORED.') options = parser.parse_args(argsIn) except argparse.ArgumentError, msg: parser.error(msg)
def main(argsIn): try: # Sample usage: # python ~/projects/StereoPipeline/src/asp/IceBridge/gen_ortho.py --site GR \ # --yyyymmdd 20120315 --start-frame 2490 --stop-frame 2491 --bundle-length 2 \ # --num-threads 8 --num-processes 3. usage = '''gen_ortho.py <options>''' parser = argparse.ArgumentParser(usage=usage) # Run selection parser.add_argument("--yyyymmdd", dest="yyyymmdd", required=True, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument("--site", dest="site", required=True, help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--output-folder", dest="outputFolder", default=None, help="Name of the output folder. If not specified, " + \ "use something like AN_YYYYMMDD.") # Processing options parser.add_argument('--bundle-length', dest='bundleLength', default=2, type=int, help="The number of images to bundle adjust and process " + \ "in a single batch.") parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument('--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on. This frame will also be processed.') parser.add_argument('--camera-mounting', dest='cameraMounting', default=0, type=int, help='0=right-forwards, 1=left-forwards, 2=top-forwards, 3=bottom-forwards.') parser.add_argument("--processing-subfolder", dest="processingSubfolder", default=None, help="Specify a subfolder name where the processing outputs will go. "+\ "The default is no additional folder.") # Performance options parser.add_argument('--num-processes', dest='numProcesses', default=1, type=int, help='The number of simultaneous processes to run.') parser.add_argument('--num-threads', dest='numThreads', default=8, type=int, help='The number of threads per process.') options = parser.parse_args(argsIn) except argparse.ArgumentError, msg: parser.error(msg)
def main(argsIn): try: usage = '''usage: multi_process_command_runner.py ...''' parser = argparse.ArgumentParser(usage=usage) # Data selection optios parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument( '--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on. This last one will not be processed.') parser.add_argument( '--num-processes', dest='numProcesses', type=int, default=-1, help='How many processes to start at the same time.') parser.add_argument( "--command-file-path", dest="commandFilePath", default=None, help="The file from where to read the commands to process.") parser.add_argument( "--force-redo-these-frames", dest="redoFrameList", default="", help= "For each frame in this file (stored one per line) within the current frame range, delete the batch folder and redo the batch." ) options = parser.parse_args(argsIn) except argparse.ArgumentError, msg: parser.error(msg)
def main(argsIn): try: usage = '''usage: multi_process_command_runner.py ...''' parser = argparse.ArgumentParser(usage=usage) # Data selection options parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames. Set both to None to blindly run all commands.") parser.add_argument( '--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on. This last one will not be processed.') parser.add_argument( '--num-processes', dest='numProcesses', type=int, default=-1, help='How many processes to start at the same time.') parser.add_argument( "--command-file-path", dest="commandFilePath", default=None, help="The file from where to read the commands to process.") parser.add_argument( "--force-redo-these-frames", dest="redoFrameList", default="", help= "For each frame in this file (stored one per line) within the current frame range, delete the batch folder and redo the batch." ) options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) icebridge_common.switchWorkDir() os.system("ulimit -c 0") # disable core dumps os.system("umask 022") # enforce files be readable by others if not os.path.exists(options.commandFilePath): print('Error: File ' + options.commandFilePath + ' does not exist!') return -1 # TODO: Write to a log? print('Starting processing pool with ' + str(options.numProcesses) + ' processes.') pool = multiprocessing.Pool(options.numProcesses) taskHandles = [] framesToDo = set() if options.redoFrameList != "" and os.path.exists(options.redoFrameList): with open(options.redoFrameList, 'r') as f: text = f.read() for line in text.split('\n'): line = line.strip() if line == "": continue framesToDo.add(int(line)) # Open the file and loop through all the lines # - Count the lines as we go so we only process the desired lines print('Opening command file ' + options.commandFilePath) text = '' with open(options.commandFilePath, 'r') as f: text = f.read() for line in text.split('\n'): if line == "": continue # If the frame range is turned off, just run the commands as-is. if (options.startFrame == None and options.stopFrame == None): # Add the command to the task pool taskHandles.append(pool.apply_async(runCommand, (line, ))) continue (begFrame, endFrame) = icebridge_common.getFrameRangeFromBatchFolder(line) # Check line indices if begFrame >= options.startFrame and begFrame < options.stopFrame: if options.redoFrameList != "": if begFrame in framesToDo: folderName = icebridge_common.getBatchFolderFromBatchLine( line) if os.path.exists(folderName): print("will wipe " + folderName) cmd = "rm -rf " + folderName print(cmd) try: os.system(cmd) except Exception as e: pass else: print("Could not find " + folderName) else: print("Will skip frame: " + str(begFrame)) continue # Add the command to the task pool taskHandles.append(pool.apply_async(runCommand, (line, ))) # Wait for all the tasks to complete print('Finished adding ' + str(len(taskHandles)) + ' tasks to the pool.') icebridge_common.waitForTaskCompletionOrKeypress(taskHandles, interactive=False) # All tasks should be finished, clean up the processing pool icebridge_common.stopTaskPool(pool) print('Jobs finished.')
def main(argsIn): try: usage = '''label_images.py <options>''' parser = argparse.ArgumentParser(usage=usage) parser.add_argument("--yyyymmdd", dest="yyyymmdd", required=True, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument("--site", dest="site", required=True, help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--output-folder", dest="outputFolder", default=None, help="Name of the output folder. If not specified, " + \ "use something like AN_YYYYMMDD.") parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument('--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on. This frame will also be processed.') parser.add_argument("--training", dest="trainingPath", required=True, help="Path to the training file.") parser.add_argument('--num-processes', dest='numProcesses', default=8, type=int, help='The number of simultaneous processes to run.') parser.add_argument('--num-threads', dest='numThreads', default=1, type=int, help='Used for mapproject.') options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) if not os.path.exists(options.trainingPath): print 'Error: Input training file ' + options.trainingPath + ' does not exist!' return -1 # TODO: Everything should use the RunHelper class for this! if options.outputFolder is None: options.outputFolder = icebridge_common.outputFolder(options.site, options.yyyymmdd) # Set up a processing tool to handle the frames, this will be more efficient # than using the built-in mulithreading support. pool = multiprocessing.Pool(options.numProcesses) taskHandles = [] for i in range(options.startFrame, options.stopFrame+1): # Run on a single frame with one thread. #label_images(options.outputFolder, i, options.trainingPath, options.site, options.yyyymmdd, options.numThreads) taskHandles.append(pool.apply_async(label_images, (options.outputFolder, i, options.trainingPath, options.site, options.yyyymmdd, options.numThreads))) # Wait for all the tasks to complete print('Finished adding ' + str(len(taskHandles)) + ' tasks to the pool.') icebridge_common.waitForTaskCompletionOrKeypress(taskHandles, interactive=False) # All tasks should be finished, clean up the processing pool icebridge_common.stopTaskPool(pool) print('Jobs finished.')
def main(argsIn): try: usage = '''label_image.py <options>''' parser = argparse.ArgumentParser(usage=usage) parser.add_argument( "--yyyymmdd", dest="yyyymmdd", required=True, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument( "--site", dest="site", required=True, help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--output-folder", dest="outputFolder", default=None, help="Name of the output folder. If not specified, " + \ "use something like AN_YYYYMMDD.") parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument( '--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on. This frame will also be processed.') parser.add_argument("--training", dest="trainingPath", required=True, help="Path to the training file.") parser.add_argument( '--num-processes', dest='numProcesses', default=8, type=int, help='The number of simultaneous processes to run.') parser.add_argument('--num-threads', dest='numThreads', default=1, type=int, help='IGNORED.') options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) if not os.path.exists(options.trainingPath): print 'Error: Input training file ' + options.trainingPath + ' does not exist!' return -1 # TODO: Everything should use the RunHelper class for this! if options.outputFolder is None: options.outputFolder = icebridge_common.outputFolder( options.site, options.yyyymmdd) # Input is raw jpeg files. inputFolder = icebridge_common.getJpegFolder(options.outputFolder) # Write all tool output to this folder. outputFolder = icebridge_common.getLabelFolder(options.outputFolder) # Do the work label_images(inputFolder, outputFolder, options.startFrame, options.stopFrame, options.trainingPath, options.numProcesses)
def doFetch(options, outputFolder): '''The main fetch function. Returns the number of failures.''' # Verify that required files exist home = os.path.expanduser("~") if not (os.path.exists(home + '/.netrc') and os.path.exists(home + '/.urs_cookies')): logger.error( 'Missing a required authentication file! See instructions here:\n' + ' https://nsidc.org/support/faq/what-options-are-available-bulk-' + 'downloading-data-https-earthdata-login-enabled') return -1 curlPath = asp_system_utils.which("curl") curlOpts = ' -n -L ' cookiePaths = ' -b ~/.urs_cookies -c ~/.urs_cookies ' baseCurlCmd = curlPath + curlOpts + cookiePaths logger.info('Creating output folder: ' + outputFolder) os.system('mkdir -p ' + outputFolder) isSouth = (options.site == 'AN') if options.type == 'nav': # Nav fetching is much less complicated return fetchNavData(options, outputFolder) parsedIndexPath = fetchAndParseIndexFile(options, isSouth, baseCurlCmd, outputFolder) if not icebridge_common.fileNonEmpty(parsedIndexPath): # Some dirs are weird, both images, fireball dems, and ortho. # Just accept whatever there is, but with a warning. logger.info('Warning: Missing index file: ' + parsedIndexPath) # Store file information in a dictionary # - Keep track of the earliest and latest frame logger.info('Reading file list from ' + parsedIndexPath) try: (frameDict, urlDict) = icebridge_common.readIndexFile(parsedIndexPath) except: # We probably ran into old format index file. Must refetch. logger.info('Could not read index file. Try again.') options.refetchIndex = True parsedIndexPath = fetchAndParseIndexFile(options, isSouth, baseCurlCmd, outputFolder) (frameDict, urlDict) = icebridge_common.readIndexFile(parsedIndexPath) if options.stopAfterIndexFetch: return 0 isLidar = (options.type in LIDAR_TYPES) allFrames = sorted(frameDict.keys()) if not isLidar: # The lidar frames use a totally different numbering than the image/ortho/dem frames firstFrame = icebridge_common.getLargestFrame() # start big lastFrame = icebridge_common.getSmallestFrame() # start small for frameNumber in allFrames: if frameNumber < firstFrame: firstFrame = frameNumber if frameNumber > lastFrame: lastFrame = frameNumber if options.allFrames: options.startFrame = firstFrame options.stopFrame = lastFrame if isLidar: # Based on image frames, determine which lidar frames to fetch. if options.ignoreMissingLidar and len(frameDict.keys()) == 0: # Nothing we can do if this run has no lidar and we are told to continue logger.info("Warning: missing lidar, but continuing.") lidarsToFetch = set() else: lidarsToFetch = lidarFilesInRange(frameDict, outputFolder, options.startFrame, options.stopFrame) # There is always a chance that not all requested frames are available. # That is particularly true for Fireball DEMs. Instead of failing, # just download what is present and give a warning. if options.startFrame not in frameDict and not isLidar: logger.info("Warning: Frame " + str(options.startFrame) + " is not found in this flight.") if options.stopFrame and (options.stopFrame not in frameDict) and not isLidar: logger.info("Warning: Frame " + str(options.stopFrame) + " is not found in this flight.") allFilesToFetch = [ ] # Files that we will fetch, relative to the current dir. allUrlsToFetch = [] # Full url of each file. # Loop through all found frames within the provided range currentFileCount = 0 lastFrame = "" if len(allFrames) > 0: lastFrame = allFrames[len(allFrames) - 1] hasTfw = (options.type == 'fireball') hasXml = (isLidar or (options.type == 'ortho') or hasTfw) numFetched = 0 skipCount = 0 for frame in allFrames: # Skip frame outside of range if isLidar: if frameDict[frame] not in lidarsToFetch: continue else: if ((frame < options.startFrame) or (frame > options.stopFrame)): continue # Handle the frame skip option if options.frameSkip > 0: if skipCount < options.frameSkip: skipCount += 1 continue skipCount = 0 filename = frameDict[frame] # Some files have an associated xml file. Fireball DEMs also have a tfw file. currFilesToFetch = [filename] if hasXml: currFilesToFetch.append(icebridge_common.xmlFile(filename)) if hasTfw: currFilesToFetch.append(icebridge_common.tfwFile(filename)) for filename in currFilesToFetch: url = os.path.join(urlDict[frame], filename) outputPath = os.path.join(outputFolder, filename) allFilesToFetch.append(outputPath) allUrlsToFetch.append(url) # Restrict lidar fetch amount according to the parameter if (isLidar and options.maxNumLidarToFetch > 0 and len(allFilesToFetch) > options.maxNumLidarToFetch): # Ensure an even number, to fetch both the lidar file and its xml if options.maxNumLidarToFetch % 2 == 1: options.maxNumLidarToFetch += 1 allFilesToFetch = allFilesToFetch[0:options.maxNumLidarToFetch] allUrlsToFetch = allUrlsToFetch[0:options.maxNumLidarToFetch] icebridge_common.fetchFilesInBatches(baseCurlCmd, MAX_IN_ONE_CALL, options.dryRun, outputFolder, allFilesToFetch, allUrlsToFetch, logger) # Fetch from disk the set of already validated files, if any validFilesList = icebridge_common.validFilesList( os.path.dirname(outputFolder), options.startFrame, options.stopFrame) validFilesSet = set() validFilesSet = icebridge_common.updateValidFilesListFromDisk( validFilesList, validFilesSet) numInitialValidFiles = len(validFilesSet) # Verify that all files were fetched and are in good shape failedFiles = [] for outputPath in allFilesToFetch: if options.skipValidate: continue if not icebridge_common.fileNonEmpty(outputPath): logger.info('Missing file: ' + outputPath) failedFiles.append(outputPath) continue if icebridge_common.hasImageExtension(outputPath): if False: # This check is just so slow. Turn it off for now. # This will impact only the validation of jpegs, # as the other files can be validated via the checksum. # Jpegs will be validated when converting them to 1 band images if outputPath in validFilesSet and os.path.exists(outputPath): #logger.info('Previously validated: ' + outputPath) # verbose continue else: if not icebridge_common.isValidImage(outputPath): logger.info('Found an invalid image. Will wipe it: ' + outputPath) if os.path.exists(outputPath): os.remove(outputPath) failedFiles.append(outputPath) continue else: logger.info('Valid image: ' + outputPath) validFilesSet.add(outputPath) # mark it as validated # Sanity check: XML files must have the right latitude. if icebridge_common.fileExtension(outputPath) == '.xml': if outputPath in validFilesSet and os.path.exists(outputPath): #logger.info('Previously validated: ' + outputPath) #verbose continue else: if os.path.exists(outputPath): try: latitude = icebridge_common.parseLatitude(outputPath) logger.info('Valid file: ' + outputPath) validFilesSet.add(outputPath) # mark it as validated except: # Corrupted file logger.info("Failed to parse latitude, will wipe: " + outputPath) if os.path.exists(outputPath): os.remove(outputPath) failedFiles.append(outputPath) # On a second thought, don't wipe files with wrong latitude, as # next time we run fetch we will have to fetch them again. # Hopefully they will be ignored. #isGood = hasGoodLat(latitude, isSouth) #if not isGood: # logger.info("Wiping XML file " + outputPath + " with bad latitude " + \ # str(latitude)) # os.remove(outputPath) # imageFile = icebridge_common.xmlToImage(outputPath) # if os.path.exists(imageFile): # logger.info("Wiping TIF file " + imageFile + " with bad latitude " + \ # str(latitude)) # os.remove(imageFile) # Verify the chcksum if hasXml and len(outputPath) >= 4 and outputPath[-4:] != '.xml' \ and outputPath[-4:] != '.tfw': if outputPath in validFilesSet and os.path.exists(outputPath): #logger.info('Previously validated: ' + outputPath) # verbose continue else: isGood = icebridge_common.hasValidChkSum(outputPath, logger) if not isGood: xmlFile = icebridge_common.xmlFile(outputPath) logger.info('Found invalid data. Will wipe: ' + outputPath + ' ' + xmlFile) if os.path.exists(outputPath): os.remove(outputPath) if os.path.exists(xmlFile): os.remove(xmlFile) failedFiles.append(outputPath) failedFiles.append(xmlFile) continue else: logger.info('Valid file: ' + outputPath) validFilesSet.add(outputPath) if hasTfw and icebridge_common.fileExtension(outputPath) == '.tfw': if outputPath in validFilesSet and os.path.exists(outputPath): #logger.info('Previously validated: ' + outputPath) continue else: isGood = icebridge_common.isValidTfw(outputPath, logger) if not isGood: xmlFile = icebridge_common.xmlFile(outputPath) logger.info('Found invalid tfw. Will wipe: ' + outputPath + ' ' + xmlFile) if os.path.exists(outputPath): os.remove(outputPath) if os.path.exists(xmlFile): os.remove(xmlFile) failedFiles.append(outputPath) failedFiles.append(xmlFile) continue else: logger.info('Valid tfw file: ' + outputPath) validFilesSet.add(outputPath) # Write to disk the list of validated files, but only if new # validations happened. First re-read that list, in case a # different process modified it in the meantime, such as if two # managers are running at the same time. numFinalValidFiles = len(validFilesSet) if numInitialValidFiles != numFinalValidFiles: validFilesSet = \ icebridge_common.updateValidFilesListFromDisk(validFilesList, validFilesSet) icebridge_common.writeValidFilesList(validFilesList, validFilesSet) numFailed = len(failedFiles) if numFailed > 0: logger.info("Number of files that could not be processed: " + str(numFailed)) return numFailed
def main(argsIn): try: usage = '''usage: push_to_nsidc.py <options> ''' parser = argparse.ArgumentParser(usage=usage) parser.add_argument( "--yyyymmdd", dest="yyyymmdd", default="", help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument( "--site", dest="site", default="", help="Name of the location of the images (AN, GR, or AL)") parser.add_argument( "--site_yyyymmdd", dest="site_yyyymmdd", default="", help= "A value like GR_20150330, which will be split into site and yyyymmdd by this script." ) parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument('--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on.') parser.add_argument("--camera-calibration-folder", dest="inputCalFolder", default=None, help="The folder containing camera calibration.") parser.add_argument( "--reference-dem-folder", dest="refDemFolder", default=None, help="The folder containing DEMs that created orthoimages.") parser.add_argument("--login-info", dest="loginInfo", default=None, help="user,password destination.nsidc.org.") parser.add_argument("--done-file", dest="doneFile", default=None, help="List of runs that were done by now.") options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) # parse --site_yyyymmdd. Sometimes it is easier to pass this than # to pass separately --site and --yyyymmdd. m = re.match('^(\w+)_(\w+)', options.site_yyyymmdd) if m: options.site = m.group(1) options.yyyymmdd = m.group(2) else: options.site_yyyymmdd = options.site + "_" + options.yyyymmdd # Read the done file and exit if the current flight is done done = set() if options.doneFile != "": with open(options.doneFile, 'r') as f: for val in f: val = val.strip() done.add(val) if options.site_yyyymmdd in done: print("Skipping done flight: " + options.site_yyyymmdd) return 0 run = run_helper.RunHelper(options.site, options.yyyymmdd, os.getcwd()) # Set up logging in the run directory. Log outside of the run dir, # as that one we will wipe logFolder = os.path.abspath( os.path.join(run.getFolder(), '..', 'push_logs')) os.system('mkdir -p ' + logFolder) logLevel = logging.INFO logger = icebridge_common.setUpLogger(logFolder, logLevel, "push") logger.info("Logging in: " + logFolder) # Check the lftp version. On some machines it is too old. (out, err, status) = asp_system_utils.executeCommand(['lftp', '--version'], suppressOutput=True) m = re.match('^.*?LFTP\s+\|\s+Version\s+4.5', out) if not m: raise Exception('Expecting LFTP version 4.5.') else: logger.info("Found an acceptable version of LFTP.") pushByType(run, options, logger, 'DEM') #pushByType(run, options, logger, 'ORTHO') # need to wait for format decision # Wipe at the end cmd = "rm -rf " + run.getFolder() logger.info(cmd) os.system(cmd)
def main(argsIn): try: usage = '''usage: push_to_nsidc.py <options> ''' parser = argparse.ArgumentParser(usage=usage) parser.add_argument( "--yyyymmdd", dest="yyyymmdd", default="", help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument( "--site", dest="site", default="", help="Name of the location of the images (AN, GR, or AL)") parser.add_argument( "--site_yyyymmdd", dest="site_yyyymmdd", default="", help= "A value like GR_20150330, which will be split into site and yyyymmdd by this script." ) parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument('--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on.') parser.add_argument("--camera-calibration-folder", dest="inputCalFolder", default=None, help="The folder containing camera calibration.") parser.add_argument( "--reference-dem-folder", dest="refDemFolder", default=None, help="The folder containing DEMs that created orthoimages.") parser.add_argument("--login-info", dest="loginInfo", default=None, help="user,password destination.nsidc.org.") parser.add_argument("--done-file", dest="doneFile", default=None, help="List of runs that were done by now.") options = parser.parse_args(argsIn) except argparse.ArgumentError, msg: parser.error(msg)
def main(argsIn): try: usage = '''generate_fake_camera_models.py <options>''' parser = argparse.ArgumentParser(usage=usage) parser.add_argument("--yyyymmdd", dest="yyyymmdd", required=True, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument("--site", dest="site", required=True, help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--work-folder", dest="workFolder", default=None, help="Temporary download folder.") parser.add_argument("--output-folder", dest="outputFolder", default=None, help="Name of the output folder. If not specified, " + \ "use something like AN_YYYYMMDD.") parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument('--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on. This frame will also be processed.') options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) # Fetch the jpeg files for missing camera files fetch_options = options fetch_options.type = 'jpeg' fetch_options.year = int(options.yyyymmdd[0:4]) fetch_options.month = int(options.yyyymmdd[4:6]) fetch_options.day = int(options.yyyymmdd[6:8]) fetch_options.skipValidate = True fetch_options.ignoreMissingLidar = True fetch_options.maxNumLidarToFetch = 0 fetch_options.refetchIndex = False fetch_options.refetchNav = False fetch_options.stopAfterIndexFetch = False fetch_options.dryRun = False fetch_options.allFrames = False fetch_options.frameSkip = 0 fetch_icebridge_data.doFetch(fetch_options, options.workFolder) if not os.path.exists(options.outputFolder): os.mkdir(options.outputFolder) # For each jpeg file, generate an empty file with the correct file name. inputFiles = os.listdir(options.workFolder) for f in inputFiles: if os.path.splitext(f)[1] != '.JPG': continue inputPath = os.path.join(options.workFolder, f) print inputPath # Get image info frame = icebridge_common.getFrameNumberFromFilename(inputPath) (datestr, timestr) = icebridge_common.getJpegDateTime(inputPath) # Pick output name outputName = icebridge_common.formFilePrefix(datestr, timestr, frame) + '.tsai' outputPath = os.path.join(options.outputFolder, outputName) cmd = 'touch ' + outputPath print cmd os.system(cmd)
def main(argsIn): try: # Sample usage: # python fetcher_script.py \ # --yyyymmdd 20091016 --site AN --start-frame 350 --stop-frame 353 --skip-validate # An output folder will be crated automatically (with a name like # AN_20091016), or its name can be specified via the --output-folder # option. usage = '''usage: fetcher_script.py <options>''' parser = optparse.OptionParser(usage=usage) # Run selection parser.add_option( "--yyyymmdd", dest="yyyymmdd", default=None, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_option( "--site", dest="site", default=None, help="Name of the location of the images (AN, GR, or AL)") parser.add_option("--camera-calibration-folder", dest="inputCalFolder", default=None, help="The folder containing camera calibration.") parser.add_option( "--reference-dem-folder", dest="refDemFolder", default=None, help="The folder containing DEMs that created orthoimages.") # Python treats numbers starting with 0 as being in octal rather than decimal. # Ridiculous. So read them as strings and convert to int. parser.add_option('--start-frame', dest='startFrameStr', default=None, help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_option('--stop-frame', dest='stopFrameStr', default=None, help='Frame to stop on.') parser.add_option('--max-num-lidar-to-fetch', dest='maxNumLidarToFetch', default=100000000, type='int', help='The maximum number of lidar files to fetch. ' + \ 'This is used in debugging.') parser.add_option("--skip-validate", action="store_true", dest="skipValidate", default=False, help="Skip input data validation.") parser.add_option("--ignore-missing-lidar", action="store_true", dest="ignoreMissingLidar", default=False, help="Keep going if the lidar is missing.") parser.add_option("--no-lidar-convert", action="store_true", dest="noLidarConvert", default=False, help="Skip lidar files in the conversion step.") parser.add_option("--no-nav", action="store_true", dest="noNav", default=False, help="Skip dealing with raw nav data.") parser.add_option( "--skip-processing", action="store_true", dest="skipProcessing", default=False, help="Skip fetch, validate, and convert. Assume all data is ready." ) parser.add_option("--refetch-index", action="store_true", dest="refetchIndex", default=False, help="Force refetch of the index file.") parser.add_option("--stop-after-index-fetch", action="store_true", dest="stopAfterIndexFetch", default=False, help="Stop after fetching the indices.") parser.add_option("--tar", action="store_true", dest="tar", default=False, help="After fetching all data and performing all conversions and " + \ "validations, make a tarball on lou. Only valid on Pleiades!") parser.add_option( "--wipe", action="store_true", dest="wipe", default=False, help="After making a tarball with --tar, wipe the directory. ") parser.add_option( "--start-with-lou-archive", action="store_true", dest="startWithLouArchive", default=False, help="Untar an existing archive from lou, then continue.") (options, args) = parser.parse_args(argsIn) except optparse.OptionError as msg: raise Usage(msg) if options.yyyymmdd is None or options.site is None: print("The flight date and site must be specified.") return -1 options.outputFolder = icebridge_common.outputFolder( options.site, options.yyyymmdd) os.system('mkdir -p ' + options.outputFolder) logLevel = logging.INFO logger = icebridge_common.setUpLogger(options.outputFolder, logLevel, 'icebridge_fetcher_log') # Explicitely go from strings to integers, per earlier note. if options.startFrameStr is not None: startFrame = int(options.startFrameStr) else: startFrame = icebridge_common.getSmallestFrame() if options.stopFrameStr is not None: stopFrame = int(options.stopFrameStr) else: stopFrame = icebridge_common.getLargestFrame() # Unarchive, then continue with fetching if options.startWithLouArchive: start_time() startWithLouArchive(options, logger) stop_time("fetch_from_lou", logger) cmd = (( '--yyyymmdd %s --site %s --start-frame %d --stop-frame %d ' + '--max-num-lidar-to-fetch %d --stop-after-convert --no-ortho-convert --refetch' ) % (options.yyyymmdd, options.site, startFrame, stopFrame, options.maxNumLidarToFetch)) if options.refetchIndex: cmd += ' --refetch-index' # this was not right in older fetched runs if options.stopAfterIndexFetch: cmd += ' --stop-after-index-fetch' if options.skipValidate: cmd += ' --skip-validate' if options.ignoreMissingLidar: cmd += ' --ignore-missing-lidar' if options.noLidarConvert: cmd += ' --no-lidar-convert' if options.noNav: cmd += ' --no-nav' if options.inputCalFolder is not None: cmd += ' --camera-calibration-folder ' + options.inputCalFolder if options.refDemFolder is not None: cmd += ' --reference-dem-folder ' + options.refDemFolder if not options.skipProcessing: # Refetch all nav stuff, as it was unreliable in the past navFolder = icebridge_common.getNavFolder(options.outputFolder) navCameraFolder = icebridge_common.getNavCameraFolder( options.outputFolder) if os.path.exists(navFolder): logger.info("Wiping: " + navFolder) os.system('rm -rfv ' + navFolder) if os.path.exists(navCameraFolder): logger.info("Wiping: " + navCameraFolder) os.system('rm -rfv ' + navCameraFolder) # Wipe processed lidar, as sometimes errors crept in. logger.info("Wiping processed lidar:") lidarFolder = icebridge_common.getLidarFolder(options.outputFolder) if os.path.exists(lidarFolder): os.system('rm -fv ' + lidarFolder + '/*csv') pairedFolder = icebridge_common.getPairedLidarFolder(lidarFolder) if os.path.exists(pairedFolder): os.system('rm -rfv ' + pairedFolder) logger.info("full_processing_script.py " + cmd) start_time() if full_processing_script.main(cmd.split()) < 0: return -1 stop_time("fetch_validate", logger) # Archive after fetching if options.tar: tarAndWipe(options, logger)
def main(argsIn): try: # Sample usage: # python ~/projects/StereoPipeline/src/asp/IceBridge/gen_ortho.py --site GR \ # --yyyymmdd 20120315 --start-frame 2490 --stop-frame 2491 --bundle-length 2 \ # --num-threads 8 --num-processes 3. usage = '''gen_ortho.py <options>''' parser = argparse.ArgumentParser(usage=usage) # Run selection parser.add_argument("--yyyymmdd", dest="yyyymmdd", required=True, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument("--site", dest="site", required=True, help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--output-folder", dest="outputFolder", default=None, help="Name of the output folder. If not specified, " + \ "use something like AN_YYYYMMDD.") # Processing options parser.add_argument('--bundle-length', dest='bundleLength', default=2, type=int, help="The number of images to bundle adjust and process " + \ "in a single batch.") parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument('--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on. This frame will also be processed.') parser.add_argument('--camera-mounting', dest='cameraMounting', default=0, type=int, help='0=right-forwards, 1=left-forwards, 2=top-forwards, 3=bottom-forwards.') parser.add_argument("--processing-subfolder", dest="processingSubfolder", default=None, help="Specify a subfolder name where the processing outputs will go. "+\ "The default is no additional folder.") # Performance options parser.add_argument('--num-processes', dest='numProcesses', default=1, type=int, help='The number of simultaneous processes to run.') parser.add_argument('--num-threads', dest='numThreads', default=8, type=int, help='The number of threads per process.') options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) icebridge_common.switchWorkDir() if len(options.yyyymmdd) != 8 and len(options.yyyymmdd) != 9: # Make an exception for 20100422a raise Exception("The --yyyymmdd field must have length 8 or 9.") if options.outputFolder is None: options.outputFolder = icebridge_common.outputFolder(options.site, options.yyyymmdd) os.system('mkdir -p ' + options.outputFolder) logLevel = logging.INFO # Make this an option?? logger = icebridge_common.setUpLogger(options.outputFolder, logLevel, 'icebridge_ortho_log') (out, err, status) = asp_system_utils.executeCommand(['uname', '-a'], suppressOutput = True) logger.info("Running on machine: " + out) processFolder = os.path.join(options.outputFolder, 'processed') # Handle subfolder option. This is useful for comparing results with different parameters! if options.processingSubfolder: processFolder = os.path.join(processFolder, options.processingSubfolder) logger.info('Reading from processing subfolder: ' + options.processingSubfolder) jpegFolder = icebridge_common.getJpegFolder(options.outputFolder) jpegIndexPath = icebridge_common.csvIndexFile(jpegFolder) if not os.path.exists(jpegIndexPath): raise Exception("Error: Missing jpeg index file: " + jpegIndexPath + ".") (jpegFrameDict, jpegUrlDict) = icebridge_common.readIndexFile(jpegIndexPath, prependFolder = True) threadText = '' if options.numThreads: threadText = '--threads ' + str(options.numThreads) redo = False suppressOutput = True taskHandles = [] if options.numProcesses > 1: pool = multiprocessing.Pool(options.numProcesses) # Bound the frames sortedFrames = sorted(jpegFrameDict.keys()) if len(sortedFrames) > 0: if options.startFrame < sortedFrames[0]: options.startFrame = sortedFrames[0] if options.stopFrame > sortedFrames[-1] + 1: options.stopFrame = sortedFrames[-1] + 1 else: # No jpeg files, that means nothing to do options.startFrame = 0 options.stopFrame = 0 for frame in range(options.startFrame, options.stopFrame): if not frame in jpegFrameDict: logger.info("Error: Missing jpeg file for frame: " + str(frame) + ".") continue # Find the right image currImage = jpegFrameDict[frame] args = (frame, processFolder, currImage, options.bundleLength, options.cameraMounting, threadText, redo, suppressOutput) # Run things sequentially if only one process, to make it easy to debug if options.numProcesses > 1: taskHandles.append(pool.apply_async(runOrtho, args)) else: runOrtho(*args) if options.numProcesses > 1: icebridge_common.waitForTaskCompletionOrKeypress(taskHandles, logger, interactive = False, quitKey='q', sleepTime=20) icebridge_common.stopTaskPool(pool)
def doFetch(options, outputFolder): # Verify that required files exist home = os.path.expanduser("~") if not (os.path.exists(home + '/.netrc') and os.path.exists(home + '/.urs_cookies')): logger.error( 'Missing a required authentication file! See instructions here:\n' + ' https://nsidc.org/support/faq/what-options-are-available-bulk-downloading-data-https-earthdata-login-enabled' ) return -1 curlPath = asp_system_utils.which("curl") curlOpts = ' -n -L ' cookiePaths = ' -b ~/.urs_cookies -c ~/.urs_cookies ' baseCurlCmd = curlPath + curlOpts + cookiePaths logger.info('Creating output folder: ' + outputFolder) os.system('mkdir -p ' + outputFolder) isSouth = (options.site == 'AN') parsedIndexPath = fetchAndParseIndexFile(options, isSouth, baseCurlCmd, outputFolder) if not icebridge_common.fileNonEmpty(parsedIndexPath): # Some dirs are weird, both images, dems, and ortho. # Just accept whatever there is, but with a warning. logger.info('Warning: Missing index file: ' + parsedIndexPath) # Store file information in a dictionary # - Keep track of the earliest and latest frame logger.info('Reading file list from ' + parsedIndexPath) try: (frameDict, urlDict) = readIndexFile(parsedIndexPath) except: # We probably ran into old format index file. Must refetch. logger.info('Could not read index file. Try again.') options.refetchIndex = True parsedIndexPath = fetchAndParseIndexFile(options, isSouth, baseCurlCmd, outputFolder) (frameDict, urlDict) = readIndexFile(parsedIndexPath) allFrames = sorted(frameDict.keys()) firstFrame = icebridge_common.getLargestFrame() # start big lastFrame = icebridge_common.getSmallestFrame() # start small for frameNumber in allFrames: if frameNumber < firstFrame: firstFrame = frameNumber if frameNumber > lastFrame: lastFrame = frameNumber if options.allFrames: options.startFrame = firstFrame options.stopFrame = lastFrame # There is always a chance that not all requested frames are available. # That is particularly true for Fireball DEMs. Instead of failing, # just download what is present and give a warning. if options.startFrame not in frameDict: logger.info("Warning: Frame " + str(options.startFrame) + \ " is not found in this flight.") if options.stopFrame and (options.stopFrame not in frameDict): logger.info("Warning: Frame " + str(options.stopFrame) + \ " is not found in this flight.") allFilesToFetch = [ ] # Files that we will fetch, relative to the current dir. allUrlsToFetch = [] # Full url of each file. # Loop through all found frames within the provided range currentFileCount = 0 lastFrame = "" if len(allFrames) > 0: lastFrame = allFrames[len(allFrames) - 1] hasTfw = (options.type == 'dem') hasXml = ((options.type in LIDAR_TYPES) or (options.type == 'ortho') or hasTfw) numFetched = 0 for frame in allFrames: if (frame >= options.startFrame) and (frame <= options.stopFrame): filename = frameDict[frame] # Some files have an associated xml file. DEMs also have a tfw file. currFilesToFetch = [filename] if hasXml: currFilesToFetch.append(icebridge_common.xmlFile(filename)) if hasTfw: currFilesToFetch.append(icebridge_common.tfwFile(filename)) for filename in currFilesToFetch: url = os.path.join(urlDict[frame], filename) outputPath = os.path.join(outputFolder, filename) allFilesToFetch.append(outputPath) allUrlsToFetch.append(url) if options.maxNumToFetch > 0 and len( allFilesToFetch) > options.maxNumToFetch: allFilesToFetch = allFilesToFetch[0:options.maxNumToFetch] allUrlsToFetch = allUrlsToFetch[0:options.maxNumToFetch] icebridge_common.fetchFilesInBatches(baseCurlCmd, MAX_IN_ONE_CALL, options.dryRun, outputFolder, allFilesToFetch, allUrlsToFetch, logger) # Verify that all files were fetched and are in good shape failedFiles = [] for outputPath in allFilesToFetch: if options.skipValidate: continue if not icebridge_common.fileNonEmpty(outputPath): logger.info('Missing file: ' + outputPath) failedFiles.append(outputPath) continue if icebridge_common.hasImageExtension(outputPath): if not icebridge_common.isValidImage(outputPath): logger.info('Found an invalid image. Will wipe it: ' + outputPath) if os.path.exists(outputPath): os.remove(outputPath) failedFiles.append(outputPath) continue else: logger.info('Valid image: ' + outputPath) # Sanity check: XML files must have the right latitude. if icebridge_common.fileExtension(outputPath) == '.xml': if os.path.exists(outputPath): latitude = icebridge_common.parseLatitude(outputPath) isGood = hasGoodLat(latitude, isSouth) if not isGood: logger.info("Wiping XML file " + outputPath + " with bad latitude " + \ str(latitude)) os.remove(outputPath) imageFile = icebridge_common.xmlToImage(outputPath) if os.path.exists(imageFile): logger.info("Wiping TIF file " + imageFile + " with bad latitude " + \ str(latitude)) os.remove(imageFile) # Verify the chcksum if hasXml and len(outputPath) >= 4 and outputPath[-4:] != '.xml' \ and outputPath[-4:] != '.tfw': isGood = icebridge_common.hasValidChkSum(outputPath) if not isGood: xmlFile = icebridge_common.xmlFile(outputPath) logger.info('Found invalid data. Will wipe it: ' + outputPath + ' ' + xmlFile) if os.path.exists(outputPath): os.remove(outputPath) if os.path.exists(xmlFile): os.remove(xmlFile) failedFiles.append(outputPath) failedFiles.append(xmlFile) continue else: logger.info('Valid chksum: ' + outputPath) if hasTfw and icebridge_common.fileExtension(outputPath) == '.tfw': isGood = icebridge_common.isValidTfw(outputPath) if not isGood: xmlFile = icebridge_common.xmlFile(outputPath) logger.info('Found invalid data. Will wipe it: ' + outputPath + ' ' + xmlFile) if os.path.exists(outputPath): os.remove(outputPath) if os.path.exists(xmlFile): os.remove(xmlFile) failedFiles.append(outputPath) failedFiles.append(xmlFile) continue else: logger.info('Valid tfw file: ' + outputPath) numFailed = len(failedFiles) if numFailed > 0: logger.info("Number of files that could not be processed: " + str(numFailed)) return numFailed
def main(argsIn): # Command line parsing try: usage = "usage: fetch_icebridge_data.py [options] output_folder" parser = optparse.OptionParser(usage=usage) parser.add_option("--year", dest="year", type='int', default=None, help="Number of processes to use (default program tries to choose best)") parser.add_option("--month", dest="month", type='int', default=None, help="Number of processes to use (default program tries to choose best)") parser.add_option("--day", dest="day", type='int', default=None, help="Number of processes to use (default program tries to choose best)") parser.add_option("--yyyymmdd", dest="yyyymmdd", default=None, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_option("--site", dest="site", default=None, help="Name of the location of the images (AN or GR)") parser.add_option("--start-frame", dest="startFrame", type='int', default=icebridge_common.getSmallestFrame(), help="Frame number or start of frame sequence") parser.add_option("--stop-frame", dest="stopFrame", type='int', default=icebridge_common.getLargestFrame(), help="End of frame sequence to download.") parser.add_option("--all-frames", action="store_true", dest="allFrames", default=False, help="Fetch all frames for this flight.") parser.add_option("--skip-validate", action="store_true", dest="skipValidate", default=False, help="Skip input data validation.") parser.add_option("--ignore-missing-lidar", action="store_true", dest="ignoreMissingLidar", default=False, help="Keep going if the lidar is missing.") parser.add_option("--frame-skip", dest="frameSkip", type='int', default=0, help="Skip this many frames between downloads.") parser.add_option("--dry-run", action="store_true", dest="dryRun", default=False, help="Just print the image/ortho/fireball download commands.") parser.add_option("--refetch-index", action="store_true", dest="refetchIndex", default=False, help="Force refetch of the index file.") parser.add_option("--refetch-nav", action="store_true", dest="refetchNav", default=False, help="Force refetch of the nav file.") parser.add_option("--stop-after-index-fetch", action="store_true", dest="stopAfterIndexFetch", default=False, help="Stop after fetching the indices.") parser.add_option('--max-num-lidar-to-fetch', dest='maxNumLidarToFetch', default=-1, type='int', help='The maximum number of lidar files to fetch. ' + \ 'This is used in debugging.') # This call handles all the parallel_mapproject specific options. (options, args) = parser.parse_args(argsIn) if len(args) != 1: logger.info('Error: Missing output folder.\n' + usage) return -1 outputFolder = os.path.abspath(args[0]) # TODO: Restore "type" input parameter so that outside users who do not use # our folder convention can use this tool. options.type = icebridge_common.folderToType(outputFolder) if options.type == 'lidar': options.type = LIDAR_TYPES[0] print ('Detected type: ' + options.type) # Handle unified date option if options.yyyymmdd: options.year = int(options.yyyymmdd[0:4]) options.month = int(options.yyyymmdd[4:6]) options.day = int(options.yyyymmdd[6:8]) if not options.stopFrame: options.stopFrame = options.startFrame # Error checking if (not options.year) or (not options.month) or (not options.day): logger.error('Error: year, month, and day must be provided.\n' + usage) return -1 # Ortho and Fireball DEM files don't need this information to find them. if (options.type == 'jpeg') and not (options.site == 'AN' or options.site == 'GR'): logger.error('Error, site must be AN or GR for images.\n' + usage) return -1 KNOWN_TYPES = ['jpeg', 'ortho', 'fireball', 'nav'] + LIDAR_TYPES if not (options.type.lower() in KNOWN_TYPES): logger.error('Error, type must be image, ortho, fireball, or a lidar type.\n' + usage) return -1 except optparse.OptionError as msg: raise Exception(msg) # Make several attempts. Stop if there is no progress. numPrevFailed = -1 numFailed = -1 for attempt in range(10): numFailed = doFetch(options, outputFolder) if numFailed == 0: return 0 # Success if numFailed == numPrevFailed: logger.info("No progress in attempt %d" % (attempt+1)) return -1 # Try again logger.info("Failed to fetch all in attempt %d, will try again.\n" % (attempt+1)) numPrevFailed = numFailed return -1 # We should not come all the way to here
def main(argsIn): try: usage = '''usage: push_to_nsidc.py <options> ''' parser = argparse.ArgumentParser(usage=usage) parser.add_argument("--yyyymmdd", dest="yyyymmdd", default="", help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument("--site", dest="site", default = "", help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--site_yyyymmdd", dest="site_yyyymmdd", default = "", help="A value like GR_20150330, which will be split into site and yyyymmdd by this script.") parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument('--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on.') parser.add_argument("--camera-calibration-folder", dest="inputCalFolder", default=None, help="The folder containing camera calibration.") parser.add_argument("--reference-dem-folder", dest="refDemFolder", default=None, help="The folder containing DEMs that created orthoimages.") parser.add_argument("--login-info", dest="loginInfo", default=None, help="user,password destination.nsidc.org.") parser.add_argument("--done-file", dest="doneFile", default=None, help="List of runs that were done by now.") options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) # parse --site_yyyymmdd. Sometimes it is easier to pass this than # to pass separately --site and --yyyymmdd. m = re.match('^(\w+)_(\w+)', options.site_yyyymmdd) if m: options.site = m.group(1) options.yyyymmdd = m.group(2) else: options.site_yyyymmdd = options.site + "_" + options.yyyymmdd # Read the done file and exit if the current flight is done done = set() if options.doneFile != "": with open(options.doneFile, 'r') as f: for val in f: val = val.strip() done.add(val) if options.site_yyyymmdd in done: print("Skipping done flight: " + options.site_yyyymmdd) return 0 run = run_helper.RunHelper(options.site, options.yyyymmdd, os.getcwd()) # Set up logging in the run directory. Log outside of the run dir, # as that one we will wipe logFolder = os.path.abspath(os.path.join(run.getFolder(), '..', 'push_logs')) os.system('mkdir -p ' + logFolder) logLevel = logging.INFO logger = icebridge_common.setUpLogger(logFolder, logLevel, "push") logger.info("Logging in: " + logFolder) # Check the lftp version. On some machines it is too old. (out, err, status) = asp_system_utils.executeCommand(['lftp', '--version'], suppressOutput = True) m = re.match('^.*?LFTP\s+\|\s+Version\s+4.5', out) if not m: raise Exception('Expecting LFTP version 4.5.') else: logger.info("Found an acceptable version of LFTP.") pushByType(run, options, logger, 'DEM') #pushByType(run, options, logger, 'ORTHO') # need to wait for format decision # Wipe at the end cmd = "rm -rf " + run.getFolder() logger.info(cmd) os.system(cmd)
options.cameraLookupFile = P.join(basepath, 'camera_lookup.txt') if not os.path.isfile(options.cameraLookupFile): raise Exception("Missing camera file: " + options.cameraLookupFile) if len(options.yyyymmdd) != 8 and len(options.yyyymmdd) != 9: # Make an exception for 20100422a raise Exception("The --yyyymmdd field must have length 8 or 9.") if options.outputFolder is None: options.outputFolder = options.site + '_' + options.yyyymmdd # Explicitely go from strings to integers, per earlier note. if options.startFrameStr is not None: startFrame = int(options.startFrameStr) else: startFrame = icebridge_common.getSmallestFrame() if options.stopFrameStr is not None: stopFrame = int(options.stopFrameStr) else: stopFrame = icebridge_common.getLargestFrame() os.system('mkdir -p ' + options.outputFolder) logLevel = logging.INFO # Make this an option?? logger = setUpLogger(options.outputFolder, logLevel) # Perform some input checks if not os.path.exists(inputCalFolder): raise Exception("Missing camera calibration folder: " + inputCalFolder) if not os.path.exists(refDemFolder): raise Exception("Missing reference DEM folder: " + refDemFolder) if not options.yyyymmdd:
def main(argsIn): try: # Sample usage: # python ~/projects/StereoPipeline/src/asp/IceBridge/blend_dems.py --site GR \ # --yyyymmdd 20120315 --start-frame 2490 --stop-frame 2491 --bundle-length 2 \ # --num-threads 8 --num-processes 10 usage = '''blend_dems.py <options>''' parser = argparse.ArgumentParser(usage=usage) # Run selection parser.add_argument("--yyyymmdd", dest="yyyymmdd", required=True, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument("--site", dest="site", required=True, help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--output-folder", dest="outputFolder", default=None, help="Name of the output folder. If not specified, " + \ "use something like AN_YYYYMMDD.") # Processing options parser.add_argument('--bundle-length', dest='bundleLength', default=2, type=int, help="The number of images to bundle adjust and process " + \ "in a single batch.") parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument('--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on. This frame will also be processed.') parser.add_argument("--processing-subfolder", dest="processingSubfolder", default=None, help="Specify a subfolder name where the processing outputs will go. "+\ "The default is no additional folder.") parser.add_argument("--compute-diff-to-prev-dem", action="store_true", dest="computeDiffToPrev", default=False, help="Compute the absolute difference between the current DEM " + "and the one before it.") parser.add_argument("--blend-to-fireball-footprint", action="store_true", dest="blendToFireball", default=False, help="Create additional blended DEMs having the same " + \ "footprint as Fireball DEMs.") # Performance options parser.add_argument('--num-processes', dest='numProcesses', default=1, type=int, help='The number of simultaneous processes to run.') parser.add_argument('--num-threads', dest='numThreads', default=8, type=int, help='The number of threads per process.') options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) icebridge_common.switchWorkDir() os.system("ulimit -c 0") # disable core dumps os.system("rm -f core.*") # these keep on popping up os.system("umask 022") # enforce files be readable by others if len(options.yyyymmdd) != 8 and len(options.yyyymmdd) != 9: # Make an exception for 20100422a raise Exception("The --yyyymmdd field must have length 8 or 9.") if options.outputFolder is None: options.outputFolder = icebridge_common.outputFolder(options.site, options.yyyymmdd) os.system('mkdir -p ' + options.outputFolder) logLevel = logging.INFO # Make this an option?? logger = icebridge_common.setUpLogger(options.outputFolder, logLevel, 'icebridge_blend_log') (out, err, status) = asp_system_utils.executeCommand(['uname', '-a'], suppressOutput = True) logger.info("Running on machine: " + out) logger.info(str(argsIn)) processFolder = os.path.join(options.outputFolder, 'processed') # Handle subfolder option. This is useful for comparing results with different parameters! if options.processingSubfolder: processFolder = os.path.join(processFolder, options.processingSubfolder) logger.info('Reading from processing subfolder: ' + options.processingSubfolder) orthoFolder = icebridge_common.getOrthoFolder(options.outputFolder) orthoIndexPath = icebridge_common.csvIndexFile(orthoFolder) if not os.path.exists(orthoIndexPath): raise Exception("Error: Missing ortho index file: " + orthoIndexPath + ".") (orthoFrameDict, orthoUrlDict) = icebridge_common.readIndexFile(orthoIndexPath) if options.blendToFireball: fireballFrameDict = icebridge_common.getCorrectedFireballDems(options.outputFolder) lidarFolder = icebridge_common.getLidarFolder(options.outputFolder) threadText = '' if options.numThreads: threadText = '--threads ' + str(options.numThreads) redo = False suppressOutput = True taskHandles = [] if options.numProcesses > 1: pool = multiprocessing.Pool(options.numProcesses) # Bound the frames sortedFrames = sorted(orthoFrameDict.keys()) if len(sortedFrames) > 0: if options.startFrame < sortedFrames[0]: options.startFrame = sortedFrames[0] if options.stopFrame > sortedFrames[-1] + 1: options.stopFrame = sortedFrames[-1] + 1 else: # No ortho files, that means nothing to do options.startFrame = 0 options.stopFrame = 0 for frame in range(options.startFrame, options.stopFrame): if not frame in orthoFrameDict: logger.info("Error: Missing ortho file for frame: " + str(frame) + ".") continue orthoFile = orthoFrameDict[frame] try: lidarFile = icebridge_common.findMatchingLidarFile(orthoFile, lidarFolder) except: # Skip if no lidar file matches this frame continue fireballDEM = "" if options.blendToFireball: if frame in fireballFrameDict: fireballDEM = fireballFrameDict[frame] else: logger.info("No fireball DEM for frame: " + str(frame)) args = (frame, processFolder, lidarFile, fireballDEM, options, threadText, redo, suppressOutput) # Run things sequentially if only one process, to make it easy to debug if options.numProcesses > 1: taskHandles.append(pool.apply_async(runBlend, args)) else: runBlend(*args) if options.numProcesses > 1: icebridge_common.waitForTaskCompletionOrKeypress(taskHandles, logger, interactive = False, quitKey='q', sleepTime=20) icebridge_common.stopTaskPool(pool)
def main(argsIn): try: # Sample usage: # python fetcher_script.py \ # --yyyymmdd 20091016 --site AN --start-frame 350 --stop-frame 353 --skip-validate # An output folder will be crated automatically (with a name like # AN_20091016), or its name can be specified via the --output-folder # option. usage = '''usage: fetcher_script.py <options>''' parser = optparse.OptionParser(usage=usage) # Run selection parser.add_option("--yyyymmdd", dest="yyyymmdd", default=None, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_option("--site", dest="site", default=None, help="Name of the location of the images (AN, GR, or AL)") parser.add_option("--camera-calibration-folder", dest="inputCalFolder", default=None, help="The folder containing camera calibration.") parser.add_option("--reference-dem-folder", dest="refDemFolder", default=None, help="The folder containing DEMs that created orthoimages.") # Python treats numbers starting with 0 as being in octal rather than decimal. # Ridiculous. So read them as strings and convert to int. parser.add_option('--start-frame', dest='startFrameStr', default=None, help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_option('--stop-frame', dest='stopFrameStr', default=None, help='Frame to stop on.') parser.add_option('--max-num-lidar-to-fetch', dest='maxNumLidarToFetch', default=100000000, type='int', help='The maximum number of lidar files to fetch. ' + \ 'This is used in debugging.') parser.add_option("--skip-validate", action="store_true", dest="skipValidate", default=False, help="Skip input data validation.") parser.add_option("--ignore-missing-lidar", action="store_true", dest="ignoreMissingLidar", default=False, help="Keep going if the lidar is missing.") parser.add_option("--no-lidar-convert", action="store_true", dest="noLidarConvert", default=False, help="Skip lidar files in the conversion step.") parser.add_option("--no-nav", action="store_true", dest="noNav", default=False, help="Skip dealing with raw nav data.") parser.add_option("--skip-processing", action="store_true", dest="skipProcessing", default=False, help="Skip fetch, validate, and convert. Assume all data is ready.") parser.add_option("--refetch-index", action="store_true", dest="refetchIndex", default=False, help="Force refetch of the index file.") parser.add_option("--stop-after-index-fetch", action="store_true", dest="stopAfterIndexFetch", default=False, help="Stop after fetching the indices.") parser.add_option("--tar", action="store_true", dest="tar", default=False, help="After fetching all data and performing all conversions and " + \ "validations, make a tarball on lou. Only valid on Pleiades!") parser.add_option("--wipe", action="store_true", dest="wipe", default=False, help="After making a tarball with --tar, wipe the directory. ") parser.add_option("--start-with-lou-archive", action="store_true", dest="startWithLouArchive", default=False, help="Untar an existing archive from lou, then continue.") (options, args) = parser.parse_args(argsIn) except optparse.OptionError as msg: raise Usage(msg) if options.yyyymmdd is None or options.site is None: print("The flight date and site must be specified.") return -1 options.outputFolder = icebridge_common.outputFolder(options.site, options.yyyymmdd) os.system('mkdir -p ' + options.outputFolder) logLevel = logging.INFO logger = icebridge_common.setUpLogger(options.outputFolder, logLevel, 'icebridge_fetcher_log') # Explicitely go from strings to integers, per earlier note. if options.startFrameStr is not None: startFrame = int(options.startFrameStr) else: startFrame = icebridge_common.getSmallestFrame() if options.stopFrameStr is not None: stopFrame = int(options.stopFrameStr) else: stopFrame = icebridge_common.getLargestFrame() # Unarchive, then continue with fetching if options.startWithLouArchive: start_time() startWithLouArchive(options, logger) stop_time("fetch_from_lou", logger) cmd = (('--yyyymmdd %s --site %s --start-frame %d --stop-frame %d ' + '--max-num-lidar-to-fetch %d --stop-after-convert --no-ortho-convert --refetch') % (options.yyyymmdd, options.site, startFrame, stopFrame, options.maxNumLidarToFetch)) if options.refetchIndex: cmd += ' --refetch-index' # this was not right in older fetched runs if options.stopAfterIndexFetch: cmd += ' --stop-after-index-fetch' if options.skipValidate: cmd += ' --skip-validate' if options.ignoreMissingLidar: cmd += ' --ignore-missing-lidar' if options.noLidarConvert: cmd += ' --no-lidar-convert' if options.noNav: cmd += ' --no-nav' if options.inputCalFolder is not None: cmd += ' --camera-calibration-folder ' + options.inputCalFolder if options.refDemFolder is not None: cmd += ' --reference-dem-folder ' + options.refDemFolder if not options.skipProcessing: # Refetch all nav stuff, as it was unreliable in the past navFolder = icebridge_common.getNavFolder(options.outputFolder) navCameraFolder = icebridge_common.getNavCameraFolder(options.outputFolder) if os.path.exists(navFolder): logger.info("Wiping: " + navFolder) os.system('rm -rfv ' + navFolder) if os.path.exists(navCameraFolder): logger.info("Wiping: " + navCameraFolder) os.system('rm -rfv ' + navCameraFolder) # Wipe processed lidar, as sometimes errors crept in. logger.info("Wiping processed lidar:") lidarFolder = icebridge_common.getLidarFolder(options.outputFolder) if os.path.exists(lidarFolder): os.system('rm -fv ' + lidarFolder + '/*csv') pairedFolder = icebridge_common.getPairedLidarFolder(lidarFolder) if os.path.exists(pairedFolder): os.system('rm -rfv ' + pairedFolder) logger.info("full_processing_script.py " + cmd) start_time() if full_processing_script.main(cmd.split()) < 0: return -1 stop_time("fetch_validate", logger) # Archive after fetching if options.tar: tarAndWipe(options, logger)
def main(argsIn): try: # Sample usage: # python ~/projects/StereoPipeline/src/asp/IceBridge/gen_ortho.py --site GR \ # --yyyymmdd 20120315 --start-frame 2490 --stop-frame 2491 --bundle-length 2 \ # --num-threads 8 --num-processes 3. usage = '''gen_ortho.py <options>''' parser = argparse.ArgumentParser(usage=usage) # Run selection parser.add_argument( "--yyyymmdd", dest="yyyymmdd", required=True, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument( "--site", dest="site", required=True, help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--output-folder", dest="outputFolder", default=None, help="Name of the output folder. If not specified, " + \ "use something like AN_YYYYMMDD.") # Processing options parser.add_argument('--bundle-length', dest='bundleLength', default=2, type=int, help="The number of images to bundle adjust and process " + \ "in a single batch.") parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument( '--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on. This frame will also be processed.') parser.add_argument( '--camera-mounting', dest='cameraMounting', default=0, type=int, help= '0=right-forwards, 1=left-forwards, 2=top-forwards, 3=bottom-forwards.' ) parser.add_argument("--processing-subfolder", dest="processingSubfolder", default=None, help="Specify a subfolder name where the processing outputs will go. "+\ "The default is no additional folder.") # Performance options parser.add_argument( '--num-processes', dest='numProcesses', default=1, type=int, help='The number of simultaneous processes to run.') parser.add_argument('--num-threads', dest='numThreads', default=8, type=int, help='The number of threads per process.') options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) icebridge_common.switchWorkDir() if len(options.yyyymmdd) != 8 and len(options.yyyymmdd) != 9: # Make an exception for 20100422a raise Exception("The --yyyymmdd field must have length 8 or 9.") if options.outputFolder is None: options.outputFolder = icebridge_common.outputFolder( options.site, options.yyyymmdd) os.system('mkdir -p ' + options.outputFolder) logLevel = logging.INFO # Make this an option?? logger = icebridge_common.setUpLogger(options.outputFolder, logLevel, 'icebridge_ortho_log') (out, err, status) = asp_system_utils.executeCommand(['uname', '-a'], suppressOutput=True) logger.info("Running on machine: " + out) processFolder = os.path.join(options.outputFolder, 'processed') # Handle subfolder option. This is useful for comparing results with different parameters! if options.processingSubfolder: processFolder = os.path.join(processFolder, options.processingSubfolder) logger.info('Reading from processing subfolder: ' + options.processingSubfolder) jpegFolder = icebridge_common.getJpegFolder(options.outputFolder) jpegIndexPath = icebridge_common.csvIndexFile(jpegFolder) if not os.path.exists(jpegIndexPath): raise Exception("Error: Missing jpeg index file: " + jpegIndexPath + ".") (jpegFrameDict, jpegUrlDict) = icebridge_common.readIndexFile(jpegIndexPath, prependFolder=True) threadText = '' if options.numThreads: threadText = '--threads ' + str(options.numThreads) redo = False suppressOutput = True taskHandles = [] if options.numProcesses > 1: pool = multiprocessing.Pool(options.numProcesses) # Bound the frames sortedFrames = sorted(jpegFrameDict.keys()) if len(sortedFrames) > 0: if options.startFrame < sortedFrames[0]: options.startFrame = sortedFrames[0] if options.stopFrame > sortedFrames[-1] + 1: options.stopFrame = sortedFrames[-1] + 1 else: # No jpeg files, that means nothing to do options.startFrame = 0 options.stopFrame = 0 for frame in range(options.startFrame, options.stopFrame): if not frame in jpegFrameDict: logger.info("Error: Missing jpeg file for frame: " + str(frame) + ".") continue # Find the right image currImage = jpegFrameDict[frame] args = (frame, processFolder, currImage, options.bundleLength, options.cameraMounting, threadText, redo, suppressOutput) # Run things sequentially if only one process, to make it easy to debug if options.numProcesses > 1: taskHandles.append(pool.apply_async(runOrtho, args)) else: runOrtho(*args) if options.numProcesses > 1: icebridge_common.waitForTaskCompletionOrKeypress(taskHandles, logger, interactive=False, quitKey='q', sleepTime=20) icebridge_common.stopTaskPool(pool)
def solveIntrinsics_Part1(options, jpegFolder, cameraFolder, navCameraFolder, processedFolder, logger): '''Some preliminary work before solving for intrinsics. Here we look up the default calibration file, and generate an RPC approximation of its distortion model with polynomials of degree 4. We will then create cameras and stereo DEMs using this initial camera file with RPC distortion.''' # Sanity checks if options.startFrame == icebridge_common.getSmallestFrame() or \ options.stopFrame == icebridge_common.getLargestFrame(): raise Exception( "When solving for intrinsics, must specify a frame range.") if options.bundleLength != 2: raise Exception( "When solving for intrinsics, we assume bundle length of 2.") if (options.stopFrame - options.startFrame) % 2 == 0: raise Exception( "When solving for intrinsics, must have an even number of frames, " + " so stopFrame - startFrame must be odd.") if options.processingSubfolder: raise Exception( "Processing subfolder not supported when solving for intrinsics.") # Generate extra data we will use later to float intrinsics options.stereoArgs += " --num-matches-from-disp-triplets 10000 --unalign-disparity " # --enable-fill-holes " # Create separate directories for cameras and processed data, # as these will be distinct than what we will finally be # using to do the full run. suff = "_camgen" cameraFolder += suff navCameraFolder += suff processedFolder += suff # Get the input calibration file defaultCalibFile = "" for frame in range(options.startFrame, options.stopFrame + 1): currCalibFile = input_conversions.getCalibrationFileForFrame( options.cameraLookupFile, options.inputCalFolder, frame, options.yyyymmdd, options.site) if defaultCalibFile == "": defaultCalibFile = currCalibFile if defaultCalibFile != currCalibFile: # This is important, the calibration file must be unique raise Exception("Found two distinct calibration files: " + defaultCalibFile + \ " and " + currCalibFile) logger.info("Default calibration file: " + defaultCalibFile) if options.inputCalCamera != "": defaultCalibFile = options.inputCalCamera logger.info("Using instead the user-provided: " + defaultCalibFile) # Find the first image in the range jpegIndex = icebridge_common.csvIndexFile(jpegFolder) (jpegFrameDict, jpegUrlDict) = icebridge_common.readIndexFile(jpegIndex, prependFolder=True) if options.startFrame not in jpegFrameDict.keys(): raise Exception("Could not find jpeg image for frame: " + options.startFrame) firstImage = jpegFrameDict[options.startFrame] # Create the RPC file before optimization rpcCalibFile = os.path.join(processedFolder, os.path.basename(defaultCalibFile)) rpcCalibFile = rpcCalibFile.replace(".tsai", "_INIT_RPC.tsai") logger.info("Will approximate camera model " + defaultCalibFile + " with " + \ options.outputModelType + " model " + rpcCalibFile) os.system("mkdir -p " + os.path.dirname(rpcCalibFile)) cmd = "convert_pinhole_model --input-file " + firstImage + ' --camera-file ' + \ defaultCalibFile + ' --output-type ' + options.outputModelType + \ ' --sample-spacing 50 -o ' + rpcCalibFile logger.info(cmd) os.system(cmd) # Use this one from now on options.inputCalCamera = rpcCalibFile # Return the modified values return (options, cameraFolder, navCameraFolder, processedFolder)
def main(argsIn): # Command line parsing try: usage = "usage: fetch_icebridge_data.py [options] output_folder" parser = optparse.OptionParser(usage=usage) parser.add_option( "--year", dest="year", type='int', default=None, help= "Number of processes to use (default program tries to choose best)" ) parser.add_option( "--month", dest="month", type='int', default=None, help= "Number of processes to use (default program tries to choose best)" ) parser.add_option( "--day", dest="day", type='int', default=None, help= "Number of processes to use (default program tries to choose best)" ) parser.add_option( "--yyyymmdd", dest="yyyymmdd", default=None, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_option("--site", dest="site", default=None, help="Name of the location of the images (AN or GR)") parser.add_option("--start-frame", dest="startFrame", type='int', default=icebridge_common.getSmallestFrame(), help="Frame number or start of frame sequence") parser.add_option("--stop-frame", dest="stopFrame", type='int', default=icebridge_common.getLargestFrame(), help="End of frame sequence to download.") parser.add_option("--all-frames", action="store_true", dest="allFrames", default=False, help="Fetch all frames for this flight.") parser.add_option("--skip-validate", action="store_true", dest="skipValidate", default=False, help="Skip input data validation.") parser.add_option("--ignore-missing-lidar", action="store_true", dest="ignoreMissingLidar", default=False, help="Keep going if the lidar is missing.") parser.add_option("--frame-skip", dest="frameSkip", type='int', default=0, help="Skip this many frames between downloads.") parser.add_option( "--dry-run", action="store_true", dest="dryRun", default=False, help="Just print the image/ortho/fireball download commands.") parser.add_option("--refetch-index", action="store_true", dest="refetchIndex", default=False, help="Force refetch of the index file.") parser.add_option("--stop-after-index-fetch", action="store_true", dest="stopAfterIndexFetch", default=False, help="Stop after fetching the indices.") parser.add_option('--max-num-lidar-to-fetch', dest='maxNumLidarToFetch', default=-1, type='int', help='The maximum number of lidar files to fetch. ' + \ 'This is used in debugging.') # This call handles all the parallel_mapproject specific options. (options, args) = parser.parse_args(argsIn) if len(args) != 1: logger.info('Error: Missing output folder.\n' + usage) return -1 outputFolder = os.path.abspath(args[0]) # TODO: Restore "type" input parameter so that outside users who do not use # our folder convention can use this tool. options.type = icebridge_common.folderToType(outputFolder) if options.type == 'lidar': options.type = LIDAR_TYPES[0] print 'Detected type: ' + options.type # Handle unified date option if options.yyyymmdd: options.year = int(options.yyyymmdd[0:4]) options.month = int(options.yyyymmdd[4:6]) options.day = int(options.yyyymmdd[6:8]) if not options.stopFrame: options.stopFrame = options.startFrame # Error checking if (not options.year) or (not options.month) or (not options.day): logger.error('Error: year, month, and day must be provided.\n' + usage) return -1 # Ortho and Fireball DEM files don't need this information to find them. if (options.type == 'jpeg') and not (options.site == 'AN' or options.site == 'GR'): logger.error('Error, site must be AN or GR for images.\n' + usage) return -1 KNOWN_TYPES = ['jpeg', 'ortho', 'fireball', 'nav'] + LIDAR_TYPES if not (options.type.lower() in KNOWN_TYPES): logger.error( 'Error, type must be image, ortho, fireball, or a lidar type.\n' + usage) return -1 except optparse.OptionError, msg: raise Exception(msg)
def solveIntrinsics_Part1(options, jpegFolder, cameraFolder, navCameraFolder, processedFolder, logger): '''Some preliminary work before solving for intrinsics. Here we look up the default calibration file, and generate an RPC approximation of its distortion model with polynomials of degree 4. We will then create cameras and stereo DEMs using this initial camera file with RPC distortion.''' # Sanity checks if options.startFrame == icebridge_common.getSmallestFrame() or \ options.stopFrame == icebridge_common.getLargestFrame(): raise Exception("When solving for intrinsics, must specify a frame range.") if options.bundleLength != 2: raise Exception("When solving for intrinsics, we assume bundle length of 2.") if (options.stopFrame - options.startFrame) % 2 == 0: raise Exception("When solving for intrinsics, must have an even number of frames, " + " so stopFrame - startFrame must be odd.") if options.processingSubfolder: raise Exception("Processing subfolder not supported when solving for intrinsics.") # Generate extra data we will use later to float intrinsics options.stereoArgs += " --num-matches-from-disp-triplets 10000 --unalign-disparity " # --enable-fill-holes " # Create separate directories for cameras and processed data, # as these will be distinct than what we will finally be # using to do the full run. suff = "_camgen" cameraFolder += suff navCameraFolder += suff processedFolder += suff # Get the input calibration file defaultCalibFile = "" for frame in range(options.startFrame, options.stopFrame+1): currCalibFile = input_conversions.getCalibrationFileForFrame(options.cameraLookupFile, options.inputCalFolder, frame, options.yyyymmdd, options.site, logger) if defaultCalibFile == "": defaultCalibFile = currCalibFile if defaultCalibFile != currCalibFile: # This is important, the calibration file must be unique raise Exception("Found two distinct calibration files: " + defaultCalibFile + \ " and " + currCalibFile) logger.info("Default calibration file: " + defaultCalibFile) if options.inputCalCamera != "": defaultCalibFile = options.inputCalCamera logger.info("Using instead the user-provided: " + defaultCalibFile) # Find the first image in the range jpegIndex = icebridge_common.csvIndexFile(jpegFolder) (jpegFrameDict, jpegUrlDict) = icebridge_common.readIndexFile(jpegIndex, prependFolder = True) if options.startFrame not in jpegFrameDict.keys(): raise Exception("Could not find jpeg image for frame: " + options.startFrame) firstImage = jpegFrameDict[options.startFrame] # Create the RPC file before optimization rpcCalibFile = os.path.join(processedFolder, os.path.basename(defaultCalibFile)) rpcCalibFile = rpcCalibFile.replace(".tsai", "_INIT_RPC.tsai") logger.info("Will approximate camera model " + defaultCalibFile + " with " + \ options.outputModelType + " model " + rpcCalibFile) os.system("mkdir -p " + os.path.dirname(rpcCalibFile)) cmd = "convert_pinhole_model --input-file " + firstImage + ' --camera-file ' + \ defaultCalibFile + ' --output-type ' + options.outputModelType + \ ' --sample-spacing 50 -o ' + rpcCalibFile logger.info(cmd) os.system(cmd) # Use this one from now on options.inputCalCamera = rpcCalibFile # Return the modified values return (options, cameraFolder, navCameraFolder, processedFolder)
def main(argsIn): try: # Sample usage: # python full_processing_script.py \ # --yyyymmdd 20091016 --site AN --num-processes 1 --num-threads 12 --bundle-length 12 \ # --start-frame 350 --stop-frame 353 --skip-validate \ # --camera-calibration-folder camera_calib \ # --reference-dem-folder ref_dem_folder # An output folder will be crated automatically (with a name like # AN_20091016), or its name can be specified via the --output-folder # option. usage = '''full_processing_script.py <options>''' parser = argparse.ArgumentParser(usage=usage) # Run selection parser.add_argument( "--yyyymmdd", dest="yyyymmdd", required=True, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument( "--site", dest="site", required=True, help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--output-folder", dest="outputFolder", default=None, help="Name of the output folder. If not specified, " + \ "use something like AN_YYYYMMDD.") parser.add_argument("--camera-lookup-file", dest="cameraLookupFile", default=None, help="The file to use to find which camera was used for which " + \ "flight. By default it is in the same directory as this script " + \ "and named camera_lookup.txt.") # Processing options parser.add_argument('--bundle-length', dest='bundleLength', default=2, type=int, help="The number of images to bundle adjust and process " + \ "in a single batch.") # TODO: Compute this automatically?? parser.add_argument('--overlap-limit', dest='overlapLimit', default=2, type=int, help="The number of images to treat as overlapping for " + \ "bundle adjustment.") parser.add_argument( '--stereo-arguments', dest='stereoArgs', # set --min-xcorr-level 0 to do the left-to-right # and right-to-left consistency check at the lowest level. default='--stereo-algorithm 2 --min-xcorr-level 0', help='Extra arguments to pass to stereo.') parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument('--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on.') parser.add_argument( '--frames-file', dest='framesFile', default="", help= 'Specific frames to run ortho2pinhole on within this frame range.') parser.add_argument('--max-num-lidar-to-fetch', dest='maxNumLidarToFetch', default=None, type=int, help="The maximum number of lidar files to fetch. " + \ "This is used in debugging.") parser.add_argument("--camera-calibration-folder", dest="inputCalFolder", default=None, help="The folder containing camera calibration.") parser.add_argument( "--input-calibration-camera", dest="inputCalCamera", default="", help= "Instead of looking up the calibrated camera in the calibration folder, use this one." ) parser.add_argument( "--output-calibration-camera", dest="outputCalCamera", default="", help= "If specified, float the intrinsics and write the optimized model here." ) parser.add_argument( "--output-model-type", dest="outputModelType", default="RPC", help="Generate a distortion model of type RPC, RPC5, or RPC6.") parser.add_argument( "--reference-dem-folder", dest="refDemFolder", default=None, help="The folder containing DEMs that created orthoimages.") parser.add_argument("--processing-subfolder", dest="processingSubfolder", default=None, help="Specify a subfolder name where the processing outputs will go. " + \ "fault is no additional folder") parser.add_argument( "--simple-cameras", action="store_true", dest="simpleCameras", default=False, help="Don't use orthoimages to refine the camera models.") # This option is only needed when generating camera models from the nav files. parser.add_argument( '--camera-mounting', default=0, dest='cameraMounting', type=int, help= '0=right-forwards, 1=left-forwards, 2=top-forwards, 3=bottom-forwards.' ) # Performance options parser.add_argument( '--num-processes', dest='numProcesses', default=1, type=int, help='The number of simultaneous processes to run.') parser.add_argument( '--num-ortho-processes', dest='numOrthoProcesses', default=-1, type=int, help='The number of simultaneous ortho processes to run.') parser.add_argument('--num-threads', dest='numThreads', default=8, type=int, help='The number of threads per process.') parser.add_argument('--num-processes-per-batch', dest='numProcessesPerBatch', default=1, type=int, help='The number of simultaneous processes to run ' + \ 'for each batch. This better be kept at 1 if running more than one batch.') # Action control parser.add_argument("--skip-fetch", action="store_true", dest="noFetch", default=False, help="Skip data fetching.") parser.add_argument("--skip-convert", action="store_true", dest="noConvert", default=False, help="Skip data conversion.") parser.add_argument("--stop-after-fetch", action="store_true", dest="stopAfterFetch", default=False, help="Stop program after data fetching.") parser.add_argument("--stop-after-convert", action="store_true", dest="stopAfterConvert", default=False, help="Stop program after data conversion.") parser.add_argument("--skip-validate", action="store_true", dest="skipValidate", default=False, help="Skip input data validation.") parser.add_argument("--ignore-missing-lidar", action="store_true", dest="ignoreMissingLidar", default=False, help="Keep going if the lidar is missing.") parser.add_argument( "--log-batches", action="store_true", dest="logBatches", default=False, help="Log the required batch commands without running them.") parser.add_argument( '--cleanup', action='store_true', default=False, dest='cleanup', help='If the final result is produced delete intermediate files.') parser.add_argument( '--many-ip', action='store_true', default=False, dest='manyip', help= 'If to use a lot of IP in bundle adjustment from the beginning.') parser.add_argument( "--dry-run", action="store_true", dest="dryRun", default=False, help= "Set up the input directories but do not fetch/process any imagery." ) parser.add_argument("--refetch", action="store_true", dest="reFetch", default=False, help="Try fetching again if some files turned out invalid " + \ "during conversions.") parser.add_argument("--refetch-index", action="store_true", dest="refetchIndex", default=False, help="Force refetch of the index file.") parser.add_argument("--refetch-nav", action="store_true", dest="refetchNav", default=False, help="Force refetch of the nav file.") parser.add_argument("--stop-after-index-fetch", action="store_true", dest="stopAfterIndexFetch", default=False, help="Stop after fetching the indices.") parser.add_argument("--no-nav", action="store_true", dest="noNavFetch", default=False, help="Don't fetch or convert the nav data.") parser.add_argument("--no-lidar-convert", action="store_true", dest="noLidarConvert", default=False, help="Skip lidar files in the conversion step.") parser.add_argument( "--no-ortho-convert", action="store_true", dest="noOrthoConvert", default=False, help="Skip generating camera models in the conversion step.") parser.add_argument("--skip-fast-conversions", action="store_true", dest="skipFastConvert", default=False, help="Skips all non-ortho conversions.") options = parser.parse_args(argsIn) except argparse.ArgumentError, msg: parser.error(msg)
def main(argsIn): try: usage = '''usage: multi_process_command_runner.py ...''' parser = argparse.ArgumentParser(usage=usage) # Data selection optios parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument('--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on. This last one will not be processed.') parser.add_argument('--num-processes', dest='numProcesses', type=int, default=-1, help='How many processes to start at the same time.') parser.add_argument("--command-file-path", dest="commandFilePath", default=None, help="The file from where to read the commands to process.") parser.add_argument("--force-redo-these-frames", dest="redoFrameList", default="", help="For each frame in this file (stored one per line) within the current frame range, delete the batch folder and redo the batch.") options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) icebridge_common.switchWorkDir() os.system("ulimit -c 0") # disable core dumps os.system("umask 022") # enforce files be readable by others if not os.path.exists(options.commandFilePath): print('Error: File ' + options.commandFilePath + ' does not exist!') return -1 # TODO: Write to a log? print('Starting processing pool with ' + str(options.numProcesses) +' processes.') pool = multiprocessing.Pool(options.numProcesses) taskHandles = [] framesToDo = set() if options.redoFrameList != "" and os.path.exists(options.redoFrameList): with open(options.redoFrameList, 'r') as f: text = f.read() for line in text.split('\n'): line = line.strip() if line == "": continue framesToDo.add(int(line)) # Open the file and loop through all the lines # - Count the lines as we go so we only process the desired lines print('Opening command file ' + options.commandFilePath) text = '' with open(options.commandFilePath, 'r') as f: text = f.read() for line in text.split('\n'): if line == "": continue (begFrame, endFrame) = icebridge_common.getFrameRangeFromBatchFolder(line) # Check line indices if begFrame >= options.startFrame and begFrame < options.stopFrame: if options.redoFrameList != "": if begFrame in framesToDo: folderName = icebridge_common.getBatchFolderFromBatchLine(line) if os.path.exists(folderName): print("will wipe " + folderName) cmd = "rm -rf " + folderName print(cmd) try: os.system(cmd) except Exception as e: pass else: print("Could not find " + folderName) else: print("Will skip frame: " + str(begFrame)) continue # Add the command to the task pool taskHandles.append(pool.apply_async(runCommand, (line,))) # Wait for all the tasks to complete print('Finished adding ' + str(len(taskHandles)) + ' tasks to the pool.') icebridge_common.waitForTaskCompletionOrKeypress(taskHandles, interactive=False) # All tasks should be finished, clean up the processing pool icebridge_common.stopTaskPool(pool) print('Jobs finished.')
def main(argsIn): try: # Sample usage: # python full_processing_script.py \ # --yyyymmdd 20091016 --site AN --num-processes 1 --num-threads 12 --bundle-length 12 \ # --start-frame 350 --stop-frame 353 --skip-validate \ # --camera-calibration-folder camera_calib \ # --reference-dem-folder ref_dem_folder # An output folder will be crated automatically (with a name like # AN_20091016), or its name can be specified via the --output-folder # option. usage = '''full_processing_script.py <options>''' parser = argparse.ArgumentParser(usage=usage) # Run selection parser.add_argument("--yyyymmdd", dest="yyyymmdd", required=True, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument("--site", dest="site", required=True, help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--output-folder", dest="outputFolder", default=None, help="Name of the output folder. If not specified, " + \ "use something like AN_YYYYMMDD.") parser.add_argument("--camera-lookup-file", dest="cameraLookupFile", default=None, help="The file to use to find which camera was used for which " + \ "flight. By default it is in the same directory as this script " + \ "and named camera_lookup.txt.") # Processing options parser.add_argument('--bundle-length', dest='bundleLength', default=2, type=int, help="The number of images to bundle adjust and process " + \ "in a single batch.") # TODO: Compute this automatically?? parser.add_argument('--overlap-limit', dest='overlapLimit', default=2, type=int, help="The number of images to treat as overlapping for " + \ "bundle adjustment.") parser.add_argument('--stereo-arguments', dest='stereoArgs', # set --min-xcorr-level 0 to do the left-to-right # and right-to-left consistency check at the lowest level. default='--stereo-algorithm 2 --min-xcorr-level 0', help='Extra arguments to pass to stereo.') parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument('--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on.') parser.add_argument('--frames-file', dest='framesFile', default="", help='Specific frames to run ortho2pinhole on within this frame range.') parser.add_argument('--max-num-lidar-to-fetch', dest='maxNumLidarToFetch', default=None, type=int, help="The maximum number of lidar files to fetch. " + \ "This is used in debugging.") parser.add_argument("--camera-calibration-folder", dest="inputCalFolder", default=None, help="The folder containing camera calibration.") parser.add_argument("--input-calibration-camera", dest="inputCalCamera", default="", help="Instead of looking up the calibrated camera in the calibration folder, use this one.") parser.add_argument("--output-calibration-camera", dest="outputCalCamera", default="", help="If specified, float the intrinsics and write the optimized model here.") parser.add_argument("--output-model-type", dest="outputModelType", default="RPC", help="Generate a distortion model of type RPC, RPC5, or RPC6.") parser.add_argument("--reference-dem-folder", dest="refDemFolder", default=None, help="The folder containing DEMs that created orthoimages.") parser.add_argument("--processing-subfolder", dest="processingSubfolder", default=None, help="Specify a subfolder name where the processing outputs will go. " + \ "fault is no additional folder") parser.add_argument("--simple-cameras", action="store_true", dest="simpleCameras", default=False, help="Don't use orthoimages to refine the camera models.") # This option is only needed when generating camera models from the nav files. parser.add_argument('--camera-mounting', default=0, dest='cameraMounting', type=int, help='0=right-forwards, 1=left-forwards, 2=top-forwards, 3=bottom-forwards.') # Performance options parser.add_argument('--num-processes', dest='numProcesses', default=1, type=int, help='The number of simultaneous processes to run.') parser.add_argument('--num-ortho-processes', dest='numOrthoProcesses', default=-1, type=int, help='The number of simultaneous ortho processes to run.') parser.add_argument('--num-threads', dest='numThreads', default=8, type=int, help='The number of threads per process.') # Action control parser.add_argument("--skip-fetch", action="store_true", dest="noFetch", default=False, help="Skip data fetching.") parser.add_argument("--skip-convert", action="store_true", dest="noConvert", default=False, help="Skip data conversion.") parser.add_argument("--stop-after-fetch", action="store_true", dest="stopAfterFetch", default=False, help="Stop program after data fetching.") parser.add_argument("--stop-after-convert", action="store_true", dest="stopAfterConvert", default=False, help="Stop program after data conversion.") parser.add_argument("--skip-validate", action="store_true", dest="skipValidate", default=False, help="Skip input data validation.") parser.add_argument("--ignore-missing-lidar", action="store_true", dest="ignoreMissingLidar", default=False, help="Keep going if the lidar is missing.") parser.add_argument("--log-batches", action="store_true", dest="logBatches", default=False, help="Log the required batch commands without running them.") parser.add_argument('--cleanup', action='store_true', default=False, dest='cleanup', help='If the final result is produced delete intermediate files.') parser.add_argument('--many-ip', action='store_true', default=False, dest='manyip', help='If to use a lot of IP in bundle adjustment from the beginning.') parser.add_argument("--dry-run", action="store_true", dest="dryRun", default=False, help="Set up the input directories but do not fetch/process any imagery.") parser.add_argument("--refetch", action="store_true", dest="reFetch", default=False, help="Try fetching again if some files turned out invalid " + \ "during conversions.") parser.add_argument("--refetch-index", action="store_true", dest="refetchIndex", default=False, help="Force refetch of the index file.") parser.add_argument("--refetch-nav", action="store_true", dest="refetchNav", default=False, help="Force refetch of the nav file.") parser.add_argument("--stop-after-index-fetch", action="store_true", dest="stopAfterIndexFetch", default=False, help="Stop after fetching the indices.") parser.add_argument("--no-nav", action="store_true", dest="noNavFetch", default=False, help="Don't fetch or convert the nav data.") parser.add_argument("--no-lidar-convert", action="store_true", dest="noLidarConvert", default=False, help="Skip lidar files in the conversion step.") parser.add_argument("--no-ortho-convert", action="store_true", dest="noOrthoConvert", default=False, help="Skip generating camera models in the conversion step.") parser.add_argument("--skip-fast-conversions", action="store_true", dest="skipFastConvert", default=False, help="Skips all non-ortho conversions.") options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) icebridge_common.switchWorkDir() if options.numOrthoProcesses < 0: options.numOrthoProcesses = options.numProcesses isSouth = icebridge_common.checkSite(options.site) # Turned off elevation limits here since they are being set from LIDAR data. ## Add the site based elevation limits to the stereoArgs option #altLimits = icebridge_common.getElevationLimits(options.site) #options.stereoArgs = (' %s --elevation-limit %f %f ' # % (options.stereoArgs, altLimits[0], altLimits[1])) options.stereoArgs = (' %s ' % (options.stereoArgs)) if options.cameraLookupFile is None: options.cameraLookupFile = P.join(basepath, 'camera_lookup.txt') if not os.path.isfile(options.cameraLookupFile): raise Exception("Can't find camera file: " + options.cameraLookupFile) if len(options.yyyymmdd) != 8 and len(options.yyyymmdd) != 9: # Make an exception for 20100422a raise Exception("The --yyyymmdd field must have length 8 or 9.") if options.outputFolder is None: options.outputFolder = icebridge_common.outputFolder(options.site, options.yyyymmdd) if options.stopAfterIndexFetch: options.stopAfterFetch = True os.system('mkdir -p ' + options.outputFolder) logLevel = logging.INFO # Record everything logger = icebridge_common.setUpLogger(options.outputFolder, logLevel, 'icebridge_processing_log_frames_' + \ str(options.startFrame) + "_" + str(options.stopFrame)) # Make sure we later know what we were doing logger.info("full_processing_script.py " + " ".join(argsIn)) (out, err, status) = asp_system_utils.executeCommand(['uname', '-a'], suppressOutput = True) logger.info("Running on machine: " + out) logger.info("Work dir is " + os.getcwd()) os.system("ulimit -c 0") # disable core dumps os.system("umask 022") # enforce files be readable by others # Perform some input checks and initializations # These are not needed unless cameras are initialized if options.inputCalFolder is None or not os.path.exists(options.inputCalFolder): raise Exception("Missing camera calibration folder.") if options.refDemFolder is None or not os.path.exists(options.refDemFolder): raise Exception("Missing reference DEM folder.") refDemName = icebridge_common.getReferenceDemName(options.site) refDemPath = os.path.join(options.refDemFolder, refDemName) if not os.path.exists(refDemPath): raise Exception("Missing reference DEM: " + refDemPath) # TODO: CLEAN UP!!! # Set up the output folders cameraFolder = icebridge_common.getCameraFolder(options.outputFolder) imageFolder = icebridge_common.getImageFolder(options.outputFolder) jpegFolder = icebridge_common.getJpegFolder(options.outputFolder) orthoFolder = icebridge_common.getOrthoFolder(options.outputFolder) fireballFolder = icebridge_common.getFireballFolder(options.outputFolder) corrFireballFolder = icebridge_common.getCorrFireballFolder(options.outputFolder) lidarFolder = icebridge_common.getLidarFolder(options.outputFolder) navFolder = icebridge_common.getNavFolder(options.outputFolder) navCameraFolder = icebridge_common.getNavCameraFolder(options.outputFolder) processedFolder = icebridge_common.getProcessedFolder(options.outputFolder) if options.outputCalCamera != "": # Prepare to solve for intrinsics. Note that this modifies some things along the way. (options, cameraFolder, navCameraFolder, processedFolder) = \ solveIntrinsics_Part1(options, jpegFolder, cameraFolder, navCameraFolder, processedFolder, logger) # Handle subfolder option. This is useful for comparing results with different parameters! if options.processingSubfolder: processedFolder = os.path.join(processedFolder, options.processingSubfolder) logger.info('Will write to processing subfolder: ' + options.processingSubfolder) # If something failed in the first attempt either in fetch or in # convert, we will wipe bad files, and try to refetch/re-convert. numAttempts = 1 if options.reFetch and (not options.noFetch): numAttempts = 2 for attempt in range(numAttempts): if numAttempts > 1: logger.info("Fetch/convert attempt: " + str(attempt+1)) ans = runFetchConvert(options, isSouth, cameraFolder, imageFolder, jpegFolder, orthoFolder, fireballFolder, corrFireballFolder, lidarFolder, processedFolder, navFolder, navCameraFolder, refDemPath, logger) if ans == 0: break if options.stopAfterFetch or options.dryRun or options.stopAfterConvert: logger.info('Fetch/convert finished!') return 0 # Call the processing routine processTheRun(options, imageFolder, cameraFolder, lidarFolder, orthoFolder, corrFireballFolder, processedFolder, isSouth, refDemPath) if options.outputCalCamera != "": # Finish solving for intrinscs. solveIntrinsics_Part2(options, imageFolder, cameraFolder, lidarFolder, orthoFolder, processedFolder, isSouth, logger)
def main(argsIn): try: # Sample usage: # python full_processing_script.py \ # --yyyymmdd 20091016 --site AN --num-processes 1 --num-threads 12 --bundle-length 12 \ # --start-frame 350 --stop-frame 353 --skip-validate \ # --camera-calibration-folder camera_calib \ # --reference-dem-folder ref_dem_folder # An output folder will be crated automatically (with a name like # AN_20091016), or its name can be specified via the --output-folder # option. usage = '''full_processing_script.py <options>''' parser = argparse.ArgumentParser(usage=usage) # Run selection parser.add_argument( "--yyyymmdd", dest="yyyymmdd", required=True, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument( "--site", dest="site", required=True, help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--output-folder", dest="outputFolder", default=None, help="Name of the output folder. If not specified, " + \ "use something like AN_YYYYMMDD.") parser.add_argument("--camera-lookup-file", dest="cameraLookupFile", default=None, help="The file to use to find which camera was used for which " + \ "flight. By default it is in the same directory as this script " + \ "and named camera_lookup.txt.") # Processing options parser.add_argument('--bundle-length', dest='bundleLength', default=2, type=int, help="The number of images to bundle adjust and process " + \ "in a single batch.") # TODO: Compute this automatically?? parser.add_argument('--overlap-limit', dest='overlapLimit', default=2, type=int, help="The number of images to treat as overlapping for " + \ "bundle adjustment.") parser.add_argument( '--max-overlap-ratio', dest='maxOverlapRatio', default=0.85, type=float, help= 'The maximum ratio of overlap between images to be accepted as part of a stereo pair. When floating intrinsics, this will be set to 1, to not upset some bookkeeping.' ) parser.add_argument( '--stereo-arguments', dest='stereoArgs', # set --min-xcorr-level 0 to do the left-to-right # and right-to-left consistency check at the lowest level. default='--stereo-algorithm 2 --min-xcorr-level 0', help='Extra arguments to pass to stereo.') parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument('--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on.') parser.add_argument( '--frames-file', dest='framesFile', default="", help= 'Specific frames to run ortho2pinhole on within this frame range.') parser.add_argument('--max-num-lidar-to-fetch', dest='maxNumLidarToFetch', default=None, type=int, help="The maximum number of lidar files to fetch. " + \ "This is used in debugging.") parser.add_argument("--camera-calibration-folder", dest="inputCalFolder", default=None, help="The folder containing camera calibration.") parser.add_argument( "--input-calibration-camera", dest="inputCalCamera", default="", help= "Instead of looking up the calibrated camera in the calibration folder, use this one." ) parser.add_argument( "--output-calibration-camera", dest="outputCalCamera", default="", help= "If specified, float the intrinsics and write the optimized model here." ) parser.add_argument( "--output-model-type", dest="outputModelType", default="RPC", help="Generate a distortion model of type RPC, RPC5, or RPC6.") parser.add_argument( "--reference-dem-folder", dest="refDemFolder", default=None, help="The folder containing DEMs that created orthoimages.") parser.add_argument("--processing-subfolder", dest="processingSubfolder", default=None, help="Specify a subfolder name where the processing outputs will go. " + \ "fault is no additional folder") parser.add_argument( "--simple-cameras", action="store_true", dest="simpleCameras", default=False, help="Don't use orthoimages to refine the camera models.") # This option is only needed when generating camera models from the nav files. parser.add_argument( '--camera-mounting', default=0, dest='cameraMounting', type=int, help= '0=right-forwards, 1=left-forwards, 2=top-forwards, 3=bottom-forwards.' ) # Performance options parser.add_argument( '--num-processes', dest='numProcesses', default=1, type=int, help='The number of simultaneous processes to run.') parser.add_argument( '--num-ortho-processes', dest='numOrthoProcesses', default=-1, type=int, help='The number of simultaneous ortho processes to run.') parser.add_argument('--num-threads', dest='numThreads', default=8, type=int, help='The number of threads per process.') # Action control parser.add_argument("--skip-fetch", action="store_true", dest="noFetch", default=False, help="Skip data fetching.") parser.add_argument("--skip-convert", action="store_true", dest="noConvert", default=False, help="Skip data conversion.") parser.add_argument("--stop-after-fetch", action="store_true", dest="stopAfterFetch", default=False, help="Stop program after data fetching.") parser.add_argument("--stop-after-convert", action="store_true", dest="stopAfterConvert", default=False, help="Stop program after data conversion.") parser.add_argument("--skip-validate", action="store_true", dest="skipValidate", default=False, help="Skip input data validation.") parser.add_argument("--ignore-missing-lidar", action="store_true", dest="ignoreMissingLidar", default=False, help="Keep going if the lidar is missing.") parser.add_argument( "--log-batches", action="store_true", dest="logBatches", default=False, help="Log the required batch commands without running them.") parser.add_argument( '--cleanup', action='store_true', default=False, dest='cleanup', help='If the final result is produced delete intermediate files.') parser.add_argument( '--many-ip', action='store_true', default=False, dest='manyip', help= 'If to use a lot of IP in bundle adjustment from the beginning.') parser.add_argument( "--dry-run", action="store_true", dest="dryRun", default=False, help= "Set up the input directories but do not fetch/process any imagery." ) parser.add_argument("--refetch", action="store_true", dest="reFetch", default=False, help="Try fetching again if some files turned out invalid " + \ "during conversions.") parser.add_argument("--refetch-index", action="store_true", dest="refetchIndex", default=False, help="Force refetch of the index file.") parser.add_argument("--refetch-nav", action="store_true", dest="refetchNav", default=False, help="Force refetch of the nav file.") parser.add_argument("--stop-after-index-fetch", action="store_true", dest="stopAfterIndexFetch", default=False, help="Stop after fetching the indices.") parser.add_argument("--no-nav", action="store_true", dest="noNavFetch", default=False, help="Don't fetch or convert the nav data.") parser.add_argument("--no-lidar-convert", action="store_true", dest="noLidarConvert", default=False, help="Skip lidar files in the conversion step.") parser.add_argument( "--no-ortho-convert", action="store_true", dest="noOrthoConvert", default=False, help="Skip generating camera models in the conversion step.") parser.add_argument("--skip-fast-conversions", action="store_true", dest="skipFastConvert", default=False, help="Skips all non-ortho conversions.") options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) icebridge_common.switchWorkDir() if options.numOrthoProcesses < 0: options.numOrthoProcesses = options.numProcesses isSouth = icebridge_common.checkSite(options.site) # Turned off elevation limits here since they are being set from LIDAR data. ## Add the site based elevation limits to the stereoArgs option #altLimits = icebridge_common.getElevationLimits(options.site) #options.stereoArgs = (' %s --elevation-limit %f %f ' # % (options.stereoArgs, altLimits[0], altLimits[1])) options.stereoArgs = (' %s ' % (options.stereoArgs)) if options.cameraLookupFile is None: options.cameraLookupFile = P.join(basepath, 'camera_lookup.txt') if not os.path.isfile(options.cameraLookupFile): raise Exception("Can't find camera file: " + options.cameraLookupFile) if len(options.yyyymmdd) != 8 and len(options.yyyymmdd) != 9: # Make an exception for 20100422a raise Exception("The --yyyymmdd field must have length 8 or 9.") if options.outputFolder is None: options.outputFolder = icebridge_common.outputFolder( options.site, options.yyyymmdd) if options.stopAfterIndexFetch: options.stopAfterFetch = True os.system('mkdir -p ' + options.outputFolder) logLevel = logging.INFO # Record everything logger = icebridge_common.setUpLogger(options.outputFolder, logLevel, 'icebridge_processing_log_frames_' + \ str(options.startFrame) + "_" + str(options.stopFrame)) # Make sure we later know what we were doing logger.info("full_processing_script.py " + " ".join(argsIn)) (out, err, status) = asp_system_utils.executeCommand(['uname', '-a'], suppressOutput=True) logger.info("Running on machine: " + out) logger.info("Work dir is " + os.getcwd()) os.system("ulimit -c 0") # disable core dumps os.system("umask 022") # enforce files be readable by others # Perform some input checks and initializations # These are not needed unless cameras are initialized if options.inputCalFolder is None or not os.path.exists( options.inputCalFolder): raise Exception("Missing camera calibration folder.") if options.refDemFolder is None or not os.path.exists( options.refDemFolder): raise Exception("Missing reference DEM folder.") refDemName = icebridge_common.getReferenceDemName(options.site) refDemPath = os.path.join(options.refDemFolder, refDemName) if not os.path.exists(refDemPath): raise Exception("Missing reference DEM: " + refDemPath) # TODO: CLEAN UP!!! # Set up the output folders cameraFolder = icebridge_common.getCameraFolder(options.outputFolder) imageFolder = icebridge_common.getImageFolder(options.outputFolder) jpegFolder = icebridge_common.getJpegFolder(options.outputFolder) orthoFolder = icebridge_common.getOrthoFolder(options.outputFolder) fireballFolder = icebridge_common.getFireballFolder(options.outputFolder) corrFireballFolder = icebridge_common.getCorrFireballFolder( options.outputFolder) lidarFolder = icebridge_common.getLidarFolder(options.outputFolder) navFolder = icebridge_common.getNavFolder(options.outputFolder) navCameraFolder = icebridge_common.getNavCameraFolder(options.outputFolder) processedFolder = icebridge_common.getProcessedFolder(options.outputFolder) if options.outputCalCamera != "": if options.maxOverlapRatio < 1: raise Exception ("For optimizing intrinsics, must set --max-overlap-ratio to 1, " + \ "to always use consecutive frames.") # Prepare to solve for intrinsics. Note that this modifies some things along the way. (options, cameraFolder, navCameraFolder, processedFolder) = \ solveIntrinsics_Part1(options, jpegFolder, cameraFolder, navCameraFolder, processedFolder, logger) # Handle subfolder option. This is useful for comparing results with different parameters! if options.processingSubfolder: processedFolder = os.path.join(processedFolder, options.processingSubfolder) logger.info('Will write to processing subfolder: ' + options.processingSubfolder) # If something failed in the first attempt either in fetch or in # convert, we will wipe bad files, and try to refetch/re-convert. numAttempts = 1 if options.reFetch and (not options.noFetch): numAttempts = 2 for attempt in range(numAttempts): if numAttempts > 1: logger.info("Fetch/convert attempt: " + str(attempt + 1)) ans = runFetchConvert(options, isSouth, cameraFolder, imageFolder, jpegFolder, orthoFolder, fireballFolder, corrFireballFolder, lidarFolder, processedFolder, navFolder, navCameraFolder, refDemPath, logger) if ans == 0: break if options.stopAfterFetch or options.dryRun or options.stopAfterConvert: logger.info('Fetch/convert finished!') return 0 # Call the processing routine processTheRun(options, imageFolder, cameraFolder, lidarFolder, orthoFolder, corrFireballFolder, processedFolder, isSouth, refDemPath) if options.outputCalCamera != "": # Finish solving for intrinscs. solveIntrinsics_Part2(options, imageFolder, cameraFolder, lidarFolder, orthoFolder, processedFolder, isSouth, logger)
def main(argsIn): try: # Sample usage: # python ~/projects/StereoPipeline/src/asp/IceBridge/blend_dems.py --site GR \ # --yyyymmdd 20120315 --start-frame 2490 --stop-frame 2491 --bundle-length 2 \ # --num-threads 8 --num-processes 10 usage = '''blend_dems.py <options>''' parser = argparse.ArgumentParser(usage=usage) # Run selection parser.add_argument( "--yyyymmdd", dest="yyyymmdd", required=True, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument( "--site", dest="site", required=True, help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--output-folder", dest="outputFolder", default=None, help="Name of the output folder. If not specified, " + \ "use something like AN_YYYYMMDD.") # Processing options parser.add_argument('--bundle-length', dest='bundleLength', default=2, type=int, help="The number of images to bundle adjust and process " + \ "in a single batch.") parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument( '--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on. This frame will also be processed.') parser.add_argument("--processing-subfolder", dest="processingSubfolder", default=None, help="Specify a subfolder name where the processing outputs will go. "+\ "The default is no additional folder.") parser.add_argument( "--compute-diff-to-prev-dem", action="store_true", dest="computeDiffToPrev", default=False, help="Compute the absolute difference between the current DEM " + "and the one before it.") parser.add_argument("--blend-to-fireball-footprint", action="store_true", dest="blendToFireball", default=False, help="Create additional blended DEMs having the same " + \ "footprint as Fireball DEMs.") # Performance options parser.add_argument( '--num-processes', dest='numProcesses', default=1, type=int, help='The number of simultaneous processes to run.') parser.add_argument('--num-threads', dest='numThreads', default=8, type=int, help='The number of threads per process.') options = parser.parse_args(argsIn) except argparse.ArgumentError, msg: parser.error(msg)
def doFetch(options, outputFolder): '''The main fetch function. Returns the number of failures.''' # Verify that required files exist home = os.path.expanduser("~") if not (os.path.exists(home+'/.netrc') and os.path.exists(home+'/.urs_cookies')): logger.error('Missing a required authentication file! See instructions here:\n' + ' https://nsidc.org/support/faq/what-options-are-available-bulk-' + 'downloading-data-https-earthdata-login-enabled') return -1 curlPath = asp_system_utils.which("curl") curlOpts = ' -n -L ' cookiePaths = ' -b ~/.urs_cookies -c ~/.urs_cookies ' baseCurlCmd = curlPath + curlOpts + cookiePaths logger.info('Creating output folder: ' + outputFolder) os.system('mkdir -p ' + outputFolder) isSouth = (options.site == 'AN') if options.type == 'nav': # Nav fetching is much less complicated return fetchNavData(options, outputFolder) parsedIndexPath = fetchAndParseIndexFile(options, isSouth, baseCurlCmd, outputFolder) if not icebridge_common.fileNonEmpty(parsedIndexPath): # Some dirs are weird, both images, fireball dems, and ortho. # Just accept whatever there is, but with a warning. logger.info('Warning: Missing index file: ' + parsedIndexPath) # Store file information in a dictionary # - Keep track of the earliest and latest frame logger.info('Reading file list from ' + parsedIndexPath) try: (frameDict, urlDict) = icebridge_common.readIndexFile(parsedIndexPath) except: # We probably ran into old format index file. Must refetch. logger.info('Could not read index file. Try again.') options.refetchIndex = True parsedIndexPath = fetchAndParseIndexFile(options, isSouth, baseCurlCmd, outputFolder) (frameDict, urlDict) = icebridge_common.readIndexFile(parsedIndexPath) if options.stopAfterIndexFetch: return 0 isLidar = (options.type in LIDAR_TYPES) allFrames = sorted(frameDict.keys()) if not isLidar: # The lidar frames use a totally different numbering than the image/ortho/dem frames firstFrame = icebridge_common.getLargestFrame() # start big lastFrame = icebridge_common.getSmallestFrame() # start small for frameNumber in allFrames: if frameNumber < firstFrame: firstFrame = frameNumber if frameNumber > lastFrame: lastFrame = frameNumber if options.allFrames: options.startFrame = firstFrame options.stopFrame = lastFrame if isLidar: # Based on image frames, determine which lidar frames to fetch. if options.ignoreMissingLidar and len(frameDict.keys()) == 0: # Nothing we can do if this run has no lidar and we are told to continue logger.info("Warning: missing lidar, but continuing.") lidarsToFetch = set() else: lidarsToFetch = lidarFilesInRange(frameDict, outputFolder, options.startFrame, options.stopFrame) # There is always a chance that not all requested frames are available. # That is particularly true for Fireball DEMs. Instead of failing, # just download what is present and give a warning. if options.startFrame not in frameDict and not isLidar: logger.info("Warning: Frame " + str(options.startFrame) + " is not found in this flight.") if options.stopFrame and (options.stopFrame not in frameDict) and not isLidar: logger.info("Warning: Frame " + str(options.stopFrame) + " is not found in this flight.") allFilesToFetch = [] # Files that we will fetch, relative to the current dir. allUrlsToFetch = [] # Full url of each file. # Loop through all found frames within the provided range currentFileCount = 0 lastFrame = "" if len(allFrames) > 0: lastFrame = allFrames[len(allFrames)-1] hasTfw = (options.type == 'fireball') hasXml = ( isLidar or (options.type == 'ortho') or hasTfw ) numFetched = 0 skipCount = 0 for frame in allFrames: # Skip frame outside of range if isLidar: if frameDict[frame] not in lidarsToFetch: continue else: if ((frame < options.startFrame) or (frame > options.stopFrame) ): continue # Handle the frame skip option if options.frameSkip > 0: if skipCount < options.frameSkip: skipCount += 1 continue skipCount = 0 filename = frameDict[frame] # Some files have an associated xml file. Fireball DEMs also have a tfw file. currFilesToFetch = [filename] if hasXml: currFilesToFetch.append(icebridge_common.xmlFile(filename)) if hasTfw: currFilesToFetch.append(icebridge_common.tfwFile(filename)) for filename in currFilesToFetch: url = os.path.join(urlDict[frame], filename) outputPath = os.path.join(outputFolder, filename) allFilesToFetch.append(outputPath) allUrlsToFetch.append(url) # Restrict lidar fetch amount according to the parameter if (isLidar and options.maxNumLidarToFetch > 0 and len(allFilesToFetch) > options.maxNumLidarToFetch): # Ensure an even number, to fetch both the lidar file and its xml if options.maxNumLidarToFetch % 2 == 1: options.maxNumLidarToFetch += 1 allFilesToFetch = allFilesToFetch[0:options.maxNumLidarToFetch] allUrlsToFetch = allUrlsToFetch [0:options.maxNumLidarToFetch] icebridge_common.fetchFilesInBatches(baseCurlCmd, MAX_IN_ONE_CALL, options.dryRun, outputFolder, allFilesToFetch, allUrlsToFetch, logger) # Fetch from disk the set of already validated files, if any validFilesList = icebridge_common.validFilesList(os.path.dirname(outputFolder), options.startFrame, options.stopFrame) validFilesSet = set() validFilesSet = icebridge_common.updateValidFilesListFromDisk(validFilesList, validFilesSet) numInitialValidFiles = len(validFilesSet) # Verify that all files were fetched and are in good shape failedFiles = [] for outputPath in allFilesToFetch: if options.skipValidate: continue if not icebridge_common.fileNonEmpty(outputPath): logger.info('Missing file: ' + outputPath) failedFiles.append(outputPath) continue if icebridge_common.hasImageExtension(outputPath): if False: # This check is just so slow. Turn it off for now. # This will impact only the validation of jpegs, # as the other files can be validated via the checksum. # Jpegs will be validated when converting them to 1 band images if outputPath in validFilesSet and os.path.exists(outputPath): #logger.info('Previously validated: ' + outputPath) # verbose continue else: if not icebridge_common.isValidImage(outputPath): logger.info('Found an invalid image. Will wipe it: ' + outputPath) if os.path.exists(outputPath): os.remove(outputPath) failedFiles.append(outputPath) continue else: logger.info('Valid image: ' + outputPath) validFilesSet.add(outputPath) # mark it as validated # Sanity check: XML files must have the right latitude. if icebridge_common.fileExtension(outputPath) == '.xml': if outputPath in validFilesSet and os.path.exists(outputPath): #logger.info('Previously validated: ' + outputPath) #verbose continue else: if os.path.exists(outputPath): try: latitude = icebridge_common.parseLatitude(outputPath) logger.info('Valid file: ' + outputPath) validFilesSet.add(outputPath) # mark it as validated except: # Corrupted file logger.info("Failed to parse latitude, will wipe: " + outputPath) if os.path.exists(outputPath): os.remove(outputPath) failedFiles.append(outputPath) # On a second thought, don't wipe files with wrong latitude, as # next time we run fetch we will have to fetch them again. # Hopefully they will be ignored. #isGood = hasGoodLat(latitude, isSouth) #if not isGood: # logger.info("Wiping XML file " + outputPath + " with bad latitude " + \ # str(latitude)) # os.remove(outputPath) # imageFile = icebridge_common.xmlToImage(outputPath) # if os.path.exists(imageFile): # logger.info("Wiping TIF file " + imageFile + " with bad latitude " + \ # str(latitude)) # os.remove(imageFile) # Verify the chcksum if hasXml and len(outputPath) >= 4 and outputPath[-4:] != '.xml' \ and outputPath[-4:] != '.tfw': if outputPath in validFilesSet and os.path.exists(outputPath): #logger.info('Previously validated: ' + outputPath) # verbose continue else: isGood = icebridge_common.hasValidChkSum(outputPath, logger) if not isGood: xmlFile = icebridge_common.xmlFile(outputPath) logger.info('Found invalid data. Will wipe: ' + outputPath + ' ' + xmlFile) if os.path.exists(outputPath): os.remove(outputPath) if os.path.exists(xmlFile): os.remove(xmlFile) failedFiles.append(outputPath) failedFiles.append(xmlFile) continue else: logger.info('Valid file: ' + outputPath) validFilesSet.add(outputPath) if hasTfw and icebridge_common.fileExtension(outputPath) == '.tfw': if outputPath in validFilesSet and os.path.exists(outputPath): #logger.info('Previously validated: ' + outputPath) continue else: isGood = icebridge_common.isValidTfw(outputPath, logger) if not isGood: xmlFile = icebridge_common.xmlFile(outputPath) logger.info('Found invalid tfw. Will wipe: ' + outputPath + ' ' + xmlFile) if os.path.exists(outputPath): os.remove(outputPath) if os.path.exists(xmlFile): os.remove(xmlFile) failedFiles.append(outputPath) failedFiles.append(xmlFile) continue else: logger.info('Valid tfw file: ' + outputPath) validFilesSet.add(outputPath) # Write to disk the list of validated files, but only if new # validations happened. First re-read that list, in case a # different process modified it in the meantime, such as if two # managers are running at the same time. numFinalValidFiles = len(validFilesSet) if numInitialValidFiles != numFinalValidFiles: validFilesSet = \ icebridge_common.updateValidFilesListFromDisk(validFilesList, validFilesSet) icebridge_common.writeValidFilesList(validFilesList, validFilesSet) numFailed = len(failedFiles) if numFailed > 0: logger.info("Number of files that could not be processed: " + str(numFailed)) return numFailed
def main(argsIn): try: usage = '''label_images.py <options>''' parser = argparse.ArgumentParser(usage=usage) parser.add_argument( "--yyyymmdd", dest="yyyymmdd", required=True, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument( "--site", dest="site", required=True, help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--output-folder", dest="outputFolder", default=None, help="Name of the output folder. If not specified, " + \ "use something like AN_YYYYMMDD.") parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument( '--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on. This frame will also be processed.') parser.add_argument("--training", dest="trainingPath", required=True, help="Path to the training file.") parser.add_argument( '--num-processes', dest='numProcesses', default=8, type=int, help='The number of simultaneous processes to run.') parser.add_argument('--num-threads', dest='numThreads', default=1, type=int, help='Used for mapproject.') options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) if not os.path.exists(options.trainingPath): print 'Error: Input training file ' + options.trainingPath + ' does not exist!' return -1 # TODO: Everything should use the RunHelper class for this! if options.outputFolder is None: options.outputFolder = icebridge_common.outputFolder( options.site, options.yyyymmdd) # Set up a processing tool to handle the frames, this will be more efficient # than using the built-in mulithreading support. pool = multiprocessing.Pool(options.numProcesses) taskHandles = [] for i in range(options.startFrame, options.stopFrame + 1): # Run on a single frame with one thread. #label_images(options.outputFolder, i, options.trainingPath, options.site, options.yyyymmdd, options.numThreads) taskHandles.append( pool.apply_async( label_images, (options.outputFolder, i, options.trainingPath, options.site, options.yyyymmdd, options.numThreads))) # Wait for all the tasks to complete print('Finished adding ' + str(len(taskHandles)) + ' tasks to the pool.') icebridge_common.waitForTaskCompletionOrKeypress(taskHandles, interactive=False) # All tasks should be finished, clean up the processing pool icebridge_common.stopTaskPool(pool) print('Jobs finished.')
def main(argsIn): try: # Sample usage: # python ~/projects/StereoPipeline/src/asp/IceBridge/blend_dems.py --site GR \ # --yyyymmdd 20120315 --start-frame 2490 --stop-frame 2491 --bundle-length 2 \ # --num-threads 8 --num-processes 10 usage = '''blend_dems.py <options>''' parser = argparse.ArgumentParser(usage=usage) # Run selection parser.add_argument( "--yyyymmdd", dest="yyyymmdd", required=True, help="Specify the year, month, and day in one YYYYMMDD string.") parser.add_argument( "--site", dest="site", required=True, help="Name of the location of the images (AN, GR, or AL)") parser.add_argument("--output-folder", dest="outputFolder", default=None, help="Name of the output folder. If not specified, " + \ "use something like AN_YYYYMMDD.") # Processing options parser.add_argument('--bundle-length', dest='bundleLength', default=2, type=int, help="The number of images to bundle adjust and process " + \ "in a single batch.") parser.add_argument('--start-frame', dest='startFrame', type=int, default=icebridge_common.getSmallestFrame(), help="Frame to start with. Leave this and stop-frame blank to " + \ "process all frames.") parser.add_argument( '--stop-frame', dest='stopFrame', type=int, default=icebridge_common.getLargestFrame(), help='Frame to stop on. This frame will also be processed.') parser.add_argument("--processing-subfolder", dest="processingSubfolder", default=None, help="Specify a subfolder name where the processing outputs will go. "+\ "The default is no additional folder.") parser.add_argument( "--compute-diff-to-prev-dem", action="store_true", dest="computeDiffToPrev", default=False, help="Compute the absolute difference between the current DEM " + "and the one before it.") parser.add_argument("--blend-to-fireball-footprint", action="store_true", dest="blendToFireball", default=False, help="Create additional blended DEMs having the same " + \ "footprint as Fireball DEMs.") # Performance options parser.add_argument( '--num-processes', dest='numProcesses', default=1, type=int, help='The number of simultaneous processes to run.') parser.add_argument('--num-threads', dest='numThreads', default=8, type=int, help='The number of threads per process.') options = parser.parse_args(argsIn) except argparse.ArgumentError as msg: parser.error(msg) icebridge_common.switchWorkDir() os.system("ulimit -c 0") # disable core dumps os.system("rm -f core.*") # these keep on popping up os.system("umask 022") # enforce files be readable by others if len(options.yyyymmdd) != 8 and len(options.yyyymmdd) != 9: # Make an exception for 20100422a raise Exception("The --yyyymmdd field must have length 8 or 9.") if options.outputFolder is None: options.outputFolder = icebridge_common.outputFolder( options.site, options.yyyymmdd) os.system('mkdir -p ' + options.outputFolder) logLevel = logging.INFO # Make this an option?? logger = icebridge_common.setUpLogger(options.outputFolder, logLevel, 'icebridge_blend_log') (out, err, status) = asp_system_utils.executeCommand(['uname', '-a'], suppressOutput=True) logger.info("Running on machine: " + out) logger.info(str(argsIn)) processFolder = os.path.join(options.outputFolder, 'processed') # Handle subfolder option. This is useful for comparing results with different parameters! if options.processingSubfolder: processFolder = os.path.join(processFolder, options.processingSubfolder) logger.info('Reading from processing subfolder: ' + options.processingSubfolder) orthoFolder = icebridge_common.getOrthoFolder(options.outputFolder) orthoIndexPath = icebridge_common.csvIndexFile(orthoFolder) if not os.path.exists(orthoIndexPath): raise Exception("Error: Missing ortho index file: " + orthoIndexPath + ".") (orthoFrameDict, orthoUrlDict) = icebridge_common.readIndexFile(orthoIndexPath) if options.blendToFireball: fireballFrameDict = icebridge_common.getCorrectedFireballDems( options.outputFolder) lidarFolder = icebridge_common.getLidarFolder(options.outputFolder) threadText = '' if options.numThreads: threadText = '--threads ' + str(options.numThreads) redo = False suppressOutput = True taskHandles = [] if options.numProcesses > 1: pool = multiprocessing.Pool(options.numProcesses) # Bound the frames sortedFrames = sorted(orthoFrameDict.keys()) if len(sortedFrames) > 0: if options.startFrame < sortedFrames[0]: options.startFrame = sortedFrames[0] if options.stopFrame > sortedFrames[-1] + 1: options.stopFrame = sortedFrames[-1] + 1 else: # No ortho files, that means nothing to do options.startFrame = 0 options.stopFrame = 0 for frame in range(options.startFrame, options.stopFrame): if not frame in orthoFrameDict: logger.info("Error: Missing ortho file for frame: " + str(frame) + ".") continue orthoFile = orthoFrameDict[frame] try: lidarFile = icebridge_common.findMatchingLidarFile( orthoFile, lidarFolder) except: # Skip if no lidar file matches this frame continue fireballDEM = "" if options.blendToFireball: if frame in fireballFrameDict: fireballDEM = fireballFrameDict[frame] else: logger.info("No fireball DEM for frame: " + str(frame)) args = (frame, processFolder, lidarFile, fireballDEM, options, threadText, redo, suppressOutput) # Run things sequentially if only one process, to make it easy to debug if options.numProcesses > 1: taskHandles.append(pool.apply_async(runBlend, args)) else: runBlend(*args) if options.numProcesses > 1: icebridge_common.waitForTaskCompletionOrKeypress(taskHandles, logger, interactive=False, quitKey='q', sleepTime=20) icebridge_common.stopTaskPool(pool)