def conversionIsFinished(self, startFrame, stopFrame, verbose=False):
        '''Return true if this run is present and conversion has finished running on it'''

        logger = logging.getLogger(__name__)

        # Make sure that there is a camera file for input image file.
        # - This could be a more expansive check.
        cameraFolder = self.getCameraFolder()
        imageList = self.getImageList()
        for imageFile in imageList:
            camFile = os.path.join(
                cameraFolder, icebridge_common.getCameraFileName(imageFile))

            # Check only within range
            # TODO: Actually we need the cameras to go a bit beyond
            frame = icebridge_common.getFrameNumberFromFilename(camFile)
            if frame < startFrame or frame >= stopFrame:
                continue

            if not os.path.exists(camFile):
                if verbose:
                    logger.error('Missing file ' + camFile)
                return False

        # Do a simple check of the converted lidar files

        prependFolder = True
        lidarFolder = self.getLidarFolder()
        convLidarFile = icebridge_common.getConvertedLidarIndexFile(
            lidarFolder)
        (lidarDict, dummyUrlDict) = icebridge_common.readIndexFile(
            convLidarFile, prependFolder)

        pairedLidarFolder = icebridge_common.getPairedLidarFolder(lidarFolder)
        pairedLidarFile = icebridge_common.getPairedIndexFile(
            pairedLidarFolder)
        (pairedLidarDict, dummyUrlDict) = icebridge_common.readIndexFile(
            pairedLidarFile, prependFolder)

        numLidar = len(lidarDict.values())
        numPairedLidar = len(pairedLidarDict.values())

        if numLidar != (numPairedLidar + 1):
            logger.error('Not enough paired lidar files found')
            return False

        # Make sure the lidar files are not empty
        success = True
        for f in lidarDict.values() + pairedLidarDict.values():
            if not asp_file_utils.fileIsNonZero(f):
                logger.error('lidar file ' + f + ' is empty!')
                os.system('rm -f ' + f)  # Remove bad files
                success = False

        return success
示例#2
0
 def getLidarList(self, paired=False, prependFolder=False):
     '''Return a list containing all the currently stored lidar files.
        This does not return converted csv files.'''
     lidarFolder = self.getLidarFolder()
     if paired:
         lidarFolder = icebridge_common.getPairedLidarFolder(lidarFolder)
     files = icebridge_common.getLidar(lidarFolder)
     if prependFolder:
         files = [os.path.join(lidarFolder, x) for x in files]
     files.sort()
     return files
 def getLidarList(self, paired=False, prependFolder=False):
     '''Return a list containing all the currently stored lidar files.
        This does not return converted csv files.'''
     lidarFolder = self.getLidarFolder()
     if paired:
         lidarFolder = icebridge_common.getPairedLidarFolder(lidarFolder)
     files = icebridge_common.getLidar(lidarFolder)
     if prependFolder:
         files = [os.path.join(lidarFolder, x) for x in files]
     files.sort()
     return files
示例#4
0
    def conversionIsFinished(self, startFrame, stopFrame, verbose=False):
        '''Return true if this run is present and conversion has finished running on it'''
        
        logger = logging.getLogger(__name__)

        # Make sure that there is a camera file for input image file.    
        # - This could be a more expansive check.
        cameraFolder = self.getCameraFolder()
        imageList    = self.getImageList()
        for imageFile in imageList:
            camFile = os.path.join(cameraFolder,
                                   icebridge_common.getCameraFileName(imageFile))

            # Check only within range
            # TODO: Actually we need the cameras to go a bit beyond
            frame = icebridge_common.getFrameNumberFromFilename(camFile)
            if frame < startFrame or frame >= stopFrame:
                continue
            
            if not os.path.exists(camFile):
                if verbose:
                    logger.error('Missing file ' + camFile)
                return False

        # Do a simple check of the converted lidar files

        prependFolder = True
        lidarFolder   = self.getLidarFolder()
        convLidarFile = icebridge_common.getConvertedLidarIndexFile(lidarFolder)
        (lidarDict, dummyUrlDict) = icebridge_common.readIndexFile(convLidarFile,
                                                                   prependFolder)

        pairedLidarFolder = icebridge_common.getPairedLidarFolder(lidarFolder)
        pairedLidarFile   = icebridge_common.getPairedIndexFile(pairedLidarFolder)
        (pairedLidarDict, dummyUrlDict) = icebridge_common.readIndexFile(pairedLidarFile,
                                                                         prependFolder)

        numLidar = len(lidarDict.values())
        numPairedLidar = len(pairedLidarDict.values())
        
        if numLidar != (numPairedLidar+1):
            logger.error('Not enough paired lidar files found')
            return False
        
        # Make sure the lidar files are not empty
        success = True
        for f in lidarDict.values() + pairedLidarDict.values():
            if not asp_file_utils.fileIsNonZero(f):
                logger.error('lidar file ' + f + ' is empty!')
                os.system('rm -f ' + f) # Remove bad files
                success = False

        return success
def pairLidarFiles(lidarFolder, skipValidate, logger):
    '''For each pair of lidar files generate a double size point cloud.
       We can use these later since they do not have any gaps between adjacent files.'''

    logger.info('Generating lidar pairs...')

    # Create the output folder
    pairedFolder = icebridge_common.getPairedLidarFolder(lidarFolder)
    os.system('mkdir -p ' + pairedFolder)

    convLidarFile = icebridge_common.getConvertedLidarIndexFile(lidarFolder)
    if not os.path.exists(convLidarFile):
        raise Exception("Missing file: " + convLidarFile)

    (lidarDict, dummyUrlDict) = icebridge_common.readIndexFile(convLidarFile)
    lidarExt = ''
    for frame in lidarDict:
        lidarExt = icebridge_common.fileExtension(lidarDict[frame])

    numLidarFiles = len(lidarDict.keys())

    pairedDict = {}

    # Loop through all pairs of csv files in the folder
    badFiles = False
    lidarKeys = sorted(lidarDict.keys())
    for i in range(len(lidarKeys) - 1):

        thisFile = lidarDict[lidarKeys[i]]
        nextFile = lidarDict[lidarKeys[i + 1]]

        date2, time2 = icebridge_common.parseTimeStamps(nextFile)

        # Record the name with the second file
        # - More useful because the time for the second file represents the middle of the file.
        outputName = icebridge_common.lidar_pair_prefix(
        ) + date2 + '_' + time2 + lidarExt

        pairedDict[lidarKeys[i]] = outputName

        # Handle paths
        path1 = os.path.join(lidarFolder, thisFile)
        path2 = os.path.join(lidarFolder, nextFile)
        outputPath = os.path.join(pairedFolder, outputName)

        if not os.path.exists(path1) or not os.path.exists(path2):
            logger.info("Cannot create " + outputPath +
                        " as we are missing its inputs")
            # If the inputs are missing, but the output is there, most likely it is corrupt.
            # Wipe it. Hopefully a subsequent fetch and convert step will bring it back.
            if os.path.exists(outputPath):
                logger.info("Wiping: " + outputPath)
                os.system('rm -f ' + outputPath)  # will not throw
                badFiles = True
            continue

        # Skip existing valid files
        if skipValidate:
            if os.path.exists(outputPath):
                logger.info("File exists, skipping: " + outputPath)
                continue
        else:
            if icebridge_common.isValidLidarCSV(outputPath):
                #logger.info("File exists and is valid, skipping: " + outputPath)
                continue

        # Concatenate the two files
        cmd1 = 'cat ' + path1 + ' > ' + outputPath
        cmd2 = 'tail -n +2 -q ' + path2 + ' >> ' + outputPath
        logger.info(cmd1)
        p = subprocess.Popen(cmd1, stdout=subprocess.PIPE, shell=True)
        out, err = p.communicate()
        logger.info(cmd2)
        p = subprocess.Popen(cmd2, stdout=subprocess.PIPE, shell=True)
        out, err = p.communicate()

        if not icebridge_common.isValidLidarCSV(outputPath):
            logger.error('Failed to generate merged LIDAR file, will wipe: ' +
                         outputPath)
            os.system('rm -f ' + outputPath)  # will not throw
            badFiles = True

    pairedLidarFile = icebridge_common.getPairedIndexFile(pairedFolder)

    willWritePairedFile = False
    if not os.path.exists(pairedLidarFile):
        willWritePairedFile = True
    else:
        # Bugfix: Sometimes the written converted file has the wrong size, maybe
        # something got interrupted earlier.
        (lidarDictIn,
         dummyUrlDict) = icebridge_common.readIndexFile(pairedLidarFile)
        if lidarDictIn != pairedDict:
            willWritePairedFile = True

    if willWritePairedFile:
        logger.info("Writing: " + pairedLidarFile)
        icebridge_common.writeIndexFile(pairedLidarFile, pairedDict, {})

    return (not badFiles)
 def getLidarPairFolder(self):
     return icebridge_common.getPairedLidarFolder(self.getLidarFolder())
def main(argsIn):

    try:
        # Sample usage:
        # python fetcher_script.py \
        #  --yyyymmdd 20091016 --site AN --start-frame 350 --stop-frame 353 --skip-validate
        # An output folder will be crated automatically (with a name like
        # AN_20091016), or its name can be specified via the --output-folder
        # option.
        usage = '''usage: fetcher_script.py <options>'''
                      
        parser = optparse.OptionParser(usage=usage)

        # Run selection
        parser.add_option("--yyyymmdd",  dest="yyyymmdd", default=None,
                          help="Specify the year, month, and day in one YYYYMMDD string.")
        parser.add_option("--site",  dest="site", default=None,
                          help="Name of the location of the images (AN, GR, or AL)")

        parser.add_option("--camera-calibration-folder",  dest="inputCalFolder", default=None,
                          help="The folder containing camera calibration.")

        parser.add_option("--reference-dem-folder",  dest="refDemFolder", default=None,
                          help="The folder containing DEMs that created orthoimages.")
        
        # Python treats numbers starting with 0 as being in octal rather than decimal.
        # Ridiculous. So read them as strings and convert to int. 
        parser.add_option('--start-frame', dest='startFrameStr', default=None,
                          help="Frame to start with.  Leave this and stop-frame blank to " + \
                          "process all frames.")
        parser.add_option('--stop-frame', dest='stopFrameStr', default=None,
                          help='Frame to stop on.')
        parser.add_option('--max-num-lidar-to-fetch', dest='maxNumLidarToFetch', default=100000000,
                          type='int', help='The maximum number of lidar files to fetch. ' + \
                          'This is used in debugging.')
        parser.add_option("--skip-validate", action="store_true", dest="skipValidate",
                          default=False,
                          help="Skip input data validation.")
        parser.add_option("--ignore-missing-lidar", action="store_true", dest="ignoreMissingLidar",
                          default=False,
                          help="Keep going if the lidar is missing.")
        parser.add_option("--no-lidar-convert", action="store_true", dest="noLidarConvert",
                          default=False,
                          help="Skip lidar files in the conversion step.")

        parser.add_option("--no-nav", action="store_true", dest="noNav",
                          default=False,
                          help="Skip dealing with raw nav data.")
        parser.add_option("--skip-processing", action="store_true", dest="skipProcessing",
                          default=False,
                          help="Skip fetch, validate, and convert. Assume all data is ready.")

        parser.add_option("--refetch-index", action="store_true", dest="refetchIndex",
                          default=False,
                          help="Force refetch of the index file.")
        parser.add_option("--stop-after-index-fetch", action="store_true",
                          dest="stopAfterIndexFetch", default=False,
                          help="Stop after fetching the indices.")

        parser.add_option("--tar", action="store_true", dest="tar", default=False,
                          help="After fetching all data and performing all conversions and " + \
                          "validations, make a tarball on lou.  Only valid on Pleiades!")
        parser.add_option("--wipe", action="store_true", dest="wipe", default=False,
                          help="After making a tarball with --tar, wipe the directory. ")
        parser.add_option("--start-with-lou-archive", action="store_true",
                          dest="startWithLouArchive", default=False,
                          help="Untar an existing archive from lou, then continue.")
                          
        (options, args) = parser.parse_args(argsIn)

    except optparse.OptionError as msg:
        raise Usage(msg)

    if options.yyyymmdd is None or options.site is None:
        print("The flight date and site must be specified.")
        return -1

    options.outputFolder = icebridge_common.outputFolder(options.site, options.yyyymmdd)
    os.system('mkdir -p ' + options.outputFolder)
    
    logLevel = logging.INFO
    logger   = icebridge_common.setUpLogger(options.outputFolder, logLevel,
                                            'icebridge_fetcher_log')

    # Explicitely go from strings to integers, per earlier note.
    if options.startFrameStr is not None:
        startFrame = int(options.startFrameStr)
    else:
        startFrame = icebridge_common.getSmallestFrame()
    if options.stopFrameStr is not None:
        stopFrame  = int(options.stopFrameStr)
    else:
        stopFrame = icebridge_common.getLargestFrame()

    # Unarchive, then continue with fetching
    if options.startWithLouArchive:
        start_time()
        startWithLouArchive(options, logger)
        stop_time("fetch_from_lou", logger)

    cmd = (('--yyyymmdd %s --site %s --start-frame %d --stop-frame %d ' +
            '--max-num-lidar-to-fetch %d --stop-after-convert --no-ortho-convert --refetch')
           % (options.yyyymmdd, options.site, startFrame, stopFrame,
              options.maxNumLidarToFetch))
    if options.refetchIndex:
        cmd += ' --refetch-index' # this was not right in older fetched runs
    if options.stopAfterIndexFetch:
        cmd += ' --stop-after-index-fetch' 
    if options.skipValidate:
        cmd += ' --skip-validate'
    if options.ignoreMissingLidar:
        cmd += ' --ignore-missing-lidar'
    if options.noLidarConvert:
        cmd += ' --no-lidar-convert'
    if options.noNav:
        cmd += ' --no-nav'
    if options.inputCalFolder is not None:
        cmd += ' --camera-calibration-folder ' + options.inputCalFolder

    if options.refDemFolder is not None:
        cmd += ' --reference-dem-folder ' + options.refDemFolder

    if not options.skipProcessing:

        # Refetch all nav stuff, as it was unreliable in the past
        navFolder = icebridge_common.getNavFolder(options.outputFolder)
        navCameraFolder = icebridge_common.getNavCameraFolder(options.outputFolder)
        if os.path.exists(navFolder):
            logger.info("Wiping: " + navFolder)
            os.system('rm -rfv ' + navFolder)
        if os.path.exists(navCameraFolder):
            logger.info("Wiping: " + navCameraFolder)
            os.system('rm -rfv ' + navCameraFolder)

        # Wipe processed lidar, as sometimes errors crept in.
        logger.info("Wiping processed lidar:")
        lidarFolder = icebridge_common.getLidarFolder(options.outputFolder)
        if os.path.exists(lidarFolder):
            os.system('rm -fv ' + lidarFolder + '/*csv')
        pairedFolder = icebridge_common.getPairedLidarFolder(lidarFolder)
        if os.path.exists(pairedFolder):
            os.system('rm -rfv ' + pairedFolder)
        
        logger.info("full_processing_script.py " + cmd)
        start_time()
        if full_processing_script.main(cmd.split()) < 0:
            return -1
        stop_time("fetch_validate", logger)

    # Archive after fetching
    if options.tar:
        tarAndWipe(options, logger)
示例#8
0
def main(argsIn):

    try:
        # Sample usage:
        # python fetcher_script.py \
        #  --yyyymmdd 20091016 --site AN --start-frame 350 --stop-frame 353 --skip-validate
        # An output folder will be crated automatically (with a name like
        # AN_20091016), or its name can be specified via the --output-folder
        # option.
        usage = '''usage: fetcher_script.py <options>'''

        parser = optparse.OptionParser(usage=usage)

        # Run selection
        parser.add_option(
            "--yyyymmdd",
            dest="yyyymmdd",
            default=None,
            help="Specify the year, month, and day in one YYYYMMDD string.")
        parser.add_option(
            "--site",
            dest="site",
            default=None,
            help="Name of the location of the images (AN, GR, or AL)")

        parser.add_option("--camera-calibration-folder",
                          dest="inputCalFolder",
                          default=None,
                          help="The folder containing camera calibration.")

        parser.add_option(
            "--reference-dem-folder",
            dest="refDemFolder",
            default=None,
            help="The folder containing DEMs that created orthoimages.")

        # Python treats numbers starting with 0 as being in octal rather than decimal.
        # Ridiculous. So read them as strings and convert to int.
        parser.add_option('--start-frame', dest='startFrameStr', default=None,
                          help="Frame to start with.  Leave this and stop-frame blank to " + \
                          "process all frames.")
        parser.add_option('--stop-frame',
                          dest='stopFrameStr',
                          default=None,
                          help='Frame to stop on.')
        parser.add_option('--max-num-lidar-to-fetch', dest='maxNumLidarToFetch', default=100000000,
                          type='int', help='The maximum number of lidar files to fetch. ' + \
                          'This is used in debugging.')
        parser.add_option("--skip-validate",
                          action="store_true",
                          dest="skipValidate",
                          default=False,
                          help="Skip input data validation.")
        parser.add_option("--ignore-missing-lidar",
                          action="store_true",
                          dest="ignoreMissingLidar",
                          default=False,
                          help="Keep going if the lidar is missing.")
        parser.add_option("--no-lidar-convert",
                          action="store_true",
                          dest="noLidarConvert",
                          default=False,
                          help="Skip lidar files in the conversion step.")

        parser.add_option("--no-nav",
                          action="store_true",
                          dest="noNav",
                          default=False,
                          help="Skip dealing with raw nav data.")
        parser.add_option(
            "--skip-processing",
            action="store_true",
            dest="skipProcessing",
            default=False,
            help="Skip fetch, validate, and convert. Assume all data is ready."
        )

        parser.add_option("--refetch-index",
                          action="store_true",
                          dest="refetchIndex",
                          default=False,
                          help="Force refetch of the index file.")
        parser.add_option("--stop-after-index-fetch",
                          action="store_true",
                          dest="stopAfterIndexFetch",
                          default=False,
                          help="Stop after fetching the indices.")

        parser.add_option("--tar", action="store_true", dest="tar", default=False,
                          help="After fetching all data and performing all conversions and " + \
                          "validations, make a tarball on lou.  Only valid on Pleiades!")
        parser.add_option(
            "--wipe",
            action="store_true",
            dest="wipe",
            default=False,
            help="After making a tarball with --tar, wipe the directory. ")
        parser.add_option(
            "--start-with-lou-archive",
            action="store_true",
            dest="startWithLouArchive",
            default=False,
            help="Untar an existing archive from lou, then continue.")

        (options, args) = parser.parse_args(argsIn)

    except optparse.OptionError as msg:
        raise Usage(msg)

    if options.yyyymmdd is None or options.site is None:
        print("The flight date and site must be specified.")
        return -1

    options.outputFolder = icebridge_common.outputFolder(
        options.site, options.yyyymmdd)
    os.system('mkdir -p ' + options.outputFolder)

    logLevel = logging.INFO
    logger = icebridge_common.setUpLogger(options.outputFolder, logLevel,
                                          'icebridge_fetcher_log')

    # Explicitely go from strings to integers, per earlier note.
    if options.startFrameStr is not None:
        startFrame = int(options.startFrameStr)
    else:
        startFrame = icebridge_common.getSmallestFrame()
    if options.stopFrameStr is not None:
        stopFrame = int(options.stopFrameStr)
    else:
        stopFrame = icebridge_common.getLargestFrame()

    # Unarchive, then continue with fetching
    if options.startWithLouArchive:
        start_time()
        startWithLouArchive(options, logger)
        stop_time("fetch_from_lou", logger)

    cmd = ((
        '--yyyymmdd %s --site %s --start-frame %d --stop-frame %d ' +
        '--max-num-lidar-to-fetch %d --stop-after-convert --no-ortho-convert --refetch'
    ) % (options.yyyymmdd, options.site, startFrame, stopFrame,
         options.maxNumLidarToFetch))
    if options.refetchIndex:
        cmd += ' --refetch-index'  # this was not right in older fetched runs
    if options.stopAfterIndexFetch:
        cmd += ' --stop-after-index-fetch'
    if options.skipValidate:
        cmd += ' --skip-validate'
    if options.ignoreMissingLidar:
        cmd += ' --ignore-missing-lidar'
    if options.noLidarConvert:
        cmd += ' --no-lidar-convert'
    if options.noNav:
        cmd += ' --no-nav'
    if options.inputCalFolder is not None:
        cmd += ' --camera-calibration-folder ' + options.inputCalFolder

    if options.refDemFolder is not None:
        cmd += ' --reference-dem-folder ' + options.refDemFolder

    if not options.skipProcessing:

        # Refetch all nav stuff, as it was unreliable in the past
        navFolder = icebridge_common.getNavFolder(options.outputFolder)
        navCameraFolder = icebridge_common.getNavCameraFolder(
            options.outputFolder)
        if os.path.exists(navFolder):
            logger.info("Wiping: " + navFolder)
            os.system('rm -rfv ' + navFolder)
        if os.path.exists(navCameraFolder):
            logger.info("Wiping: " + navCameraFolder)
            os.system('rm -rfv ' + navCameraFolder)

        # Wipe processed lidar, as sometimes errors crept in.
        logger.info("Wiping processed lidar:")
        lidarFolder = icebridge_common.getLidarFolder(options.outputFolder)
        if os.path.exists(lidarFolder):
            os.system('rm -fv ' + lidarFolder + '/*csv')
        pairedFolder = icebridge_common.getPairedLidarFolder(lidarFolder)
        if os.path.exists(pairedFolder):
            os.system('rm -rfv ' + pairedFolder)

        logger.info("full_processing_script.py " + cmd)
        start_time()
        if full_processing_script.main(cmd.split()) < 0:
            return -1
        stop_time("fetch_validate", logger)

    # Archive after fetching
    if options.tar:
        tarAndWipe(options, logger)
示例#9
0
 def getLidarPairFolder(self):
     return icebridge_common.getPairedLidarFolder(self.getLidarFolder())
def pairLidarFiles(lidarFolder, skipValidate, logger):
    '''For each pair of lidar files generate a double size point cloud.
       We can use these later since they do not have any gaps between adjacent files.'''
    
    logger.info('Generating lidar pairs...')

    # Create the output folder
    pairedFolder = icebridge_common.getPairedLidarFolder(lidarFolder)
    os.system('mkdir -p ' + pairedFolder)

    convLidarFile = icebridge_common.getConvertedLidarIndexFile(lidarFolder)
    if not os.path.exists(convLidarFile):
        raise Exception("Missing file: " + convLidarFile)

    (lidarDict, dummyUrlDict) = icebridge_common.readIndexFile(convLidarFile)
    lidarExt = ''
    for frame in lidarDict:
        lidarExt = icebridge_common.fileExtension(lidarDict[frame])

    numLidarFiles = len(lidarDict.keys())

    pairedDict = {}
    
    # Loop through all pairs of csv files in the folder    
    badFiles = False
    lidarKeys = sorted(lidarDict.keys())
    for i in range(len(lidarKeys)-1):
        
        thisFile = lidarDict[lidarKeys[i  ]]
        nextFile = lidarDict[lidarKeys[i+1]]

        date2, time2 = icebridge_common.parseTimeStamps(nextFile)
        
        # Record the name with the second file
        # - More useful because the time for the second file represents the middle of the file.
        outputName = icebridge_common.lidar_pair_prefix() + date2 +'_'+ time2 + lidarExt

        pairedDict[lidarKeys[i]] = outputName
        
        # Handle paths
        path1      = os.path.join(lidarFolder, thisFile)
        path2      = os.path.join(lidarFolder, nextFile)
        outputPath = os.path.join(pairedFolder, outputName)

        if not os.path.exists(path1) or not os.path.exists(path2):
            logger.info("Cannot create " + outputPath + " as we are missing its inputs")
            # If the inputs are missing, but the output is there, most likely it is corrupt.
            # Wipe it. Hopefully a subsequent fetch and convert step will bring it back.
            if os.path.exists(outputPath):
                logger.info("Wiping: " + outputPath)
                os.system('rm -f ' + outputPath) # will not throw
                badFiles = True
            continue
        
        # Skip existing valid files
        if skipValidate:
            if os.path.exists(outputPath):
                logger.info("File exists, skipping: " + outputPath)
                continue
        else:
            if icebridge_common.isValidLidarCSV(outputPath):
                #logger.info("File exists and is valid, skipping: " + outputPath)
                continue

        # Concatenate the two files
        cmd1 = 'cat ' + path1 + ' > ' + outputPath
        cmd2 = 'tail -n +2 -q ' + path2 + ' >> ' + outputPath
        logger.info(cmd1)
        p        = subprocess.Popen(cmd1, stdout=subprocess.PIPE, shell=True,
                                    universal_newlines=True)
        out, err = p.communicate()
        logger.info(cmd2)
        p        = subprocess.Popen(cmd2, stdout=subprocess.PIPE, shell=True,
                                    universal_newlines=True)
        out, err = p.communicate()

        if not icebridge_common.isValidLidarCSV(outputPath):
            logger.error('Failed to generate merged LIDAR file, will wipe: ' + outputPath)
            os.system('rm -f ' + outputPath) # will not throw
            badFiles = True

    pairedLidarFile = icebridge_common.getPairedIndexFile(pairedFolder)
    
    willWritePairedFile = False
    if not os.path.exists(pairedLidarFile):
        willWritePairedFile = True
    else: 
        # Bugfix: Sometimes the written converted file has the wrong size, maybe
        # something got interrupted earlier.
        (lidarDictIn, dummyUrlDict) = icebridge_common.readIndexFile(pairedLidarFile)
        if lidarDictIn != pairedDict:
            willWritePairedFile = True
    
    if willWritePairedFile:
        logger.info("Writing: " + pairedLidarFile)
        icebridge_common.writeIndexFile(pairedLidarFile, pairedDict, {})

    return (not badFiles)
示例#11
0
    # Refetch all nav stuff, as it was unreliable in the past
    navFolder = icebridge_common.getNavFolder(options.outputFolder)
    navCameraFolder = icebridge_common.getNavCameraFolder(options.outputFolder)
    if os.path.exists(navFolder):
        logger.info("Wiping: " + navFolder)
        os.system('rm -rfv ' + navFolder)
    if os.path.exists(navCameraFolder):
        logger.info("Wiping: " + navCameraFolder)
        os.system('rm -rfv ' + navCameraFolder)

    # Wipe processed lidar, as sometimes errors crept in.
    logger.info("Wiping processed lidar:")
    lidarFolder = icebridge_common.getLidarFolder(options.outputFolder)
    if os.path.exists(lidarFolder):
        os.system('rm -fv ' + lidarFolder + '/*csv')
    pairedFolder = icebridge_common.getPairedLidarFolder(lidarFolder)
    if os.path.exists(pairedFolder):
        os.system('rm -rfv ' + pairedFolder)

    logger.info("full_processing_script.py " + cmd)
    start_time()
    if full_processing_script.main(cmd.split()) < 0:
        return -1
    stop_time("fetch_validate", logger)

    # Archive after fetching
    if options.tarAndWipe:
        tarAndWipe(options, logger)


# Run main function if file used from shell