def conversionIsFinished(self, startFrame, stopFrame, verbose=False):
        '''Return true if this run is present and conversion has finished running on it'''

        logger = logging.getLogger(__name__)

        # Make sure that there is a camera file for input image file.
        # - This could be a more expansive check.
        cameraFolder = self.getCameraFolder()
        imageList = self.getImageList()
        for imageFile in imageList:
            camFile = os.path.join(
                cameraFolder, icebridge_common.getCameraFileName(imageFile))

            # Check only within range
            # TODO: Actually we need the cameras to go a bit beyond
            frame = icebridge_common.getFrameNumberFromFilename(camFile)
            if frame < startFrame or frame >= stopFrame:
                continue

            if not os.path.exists(camFile):
                if verbose:
                    logger.error('Missing file ' + camFile)
                return False

        # Do a simple check of the converted lidar files

        prependFolder = True
        lidarFolder = self.getLidarFolder()
        convLidarFile = icebridge_common.getConvertedLidarIndexFile(
            lidarFolder)
        (lidarDict, dummyUrlDict) = icebridge_common.readIndexFile(
            convLidarFile, prependFolder)

        pairedLidarFolder = icebridge_common.getPairedLidarFolder(lidarFolder)
        pairedLidarFile = icebridge_common.getPairedIndexFile(
            pairedLidarFolder)
        (pairedLidarDict, dummyUrlDict) = icebridge_common.readIndexFile(
            pairedLidarFile, prependFolder)

        numLidar = len(lidarDict.values())
        numPairedLidar = len(pairedLidarDict.values())

        if numLidar != (numPairedLidar + 1):
            logger.error('Not enough paired lidar files found')
            return False

        # Make sure the lidar files are not empty
        success = True
        for f in lidarDict.values() + pairedLidarDict.values():
            if not asp_file_utils.fileIsNonZero(f):
                logger.error('lidar file ' + f + ' is empty!')
                os.system('rm -f ' + f)  # Remove bad files
                success = False

        return success
Example #2
0
    def conversionIsFinished(self, startFrame, stopFrame, verbose=False):
        '''Return true if this run is present and conversion has finished running on it'''
        
        logger = logging.getLogger(__name__)

        # Make sure that there is a camera file for input image file.    
        # - This could be a more expansive check.
        cameraFolder = self.getCameraFolder()
        imageList    = self.getImageList()
        for imageFile in imageList:
            camFile = os.path.join(cameraFolder,
                                   icebridge_common.getCameraFileName(imageFile))

            # Check only within range
            # TODO: Actually we need the cameras to go a bit beyond
            frame = icebridge_common.getFrameNumberFromFilename(camFile)
            if frame < startFrame or frame >= stopFrame:
                continue
            
            if not os.path.exists(camFile):
                if verbose:
                    logger.error('Missing file ' + camFile)
                return False

        # Do a simple check of the converted lidar files

        prependFolder = True
        lidarFolder   = self.getLidarFolder()
        convLidarFile = icebridge_common.getConvertedLidarIndexFile(lidarFolder)
        (lidarDict, dummyUrlDict) = icebridge_common.readIndexFile(convLidarFile,
                                                                   prependFolder)

        pairedLidarFolder = icebridge_common.getPairedLidarFolder(lidarFolder)
        pairedLidarFile   = icebridge_common.getPairedIndexFile(pairedLidarFolder)
        (pairedLidarDict, dummyUrlDict) = icebridge_common.readIndexFile(pairedLidarFile,
                                                                         prependFolder)

        numLidar = len(lidarDict.values())
        numPairedLidar = len(pairedLidarDict.values())
        
        if numLidar != (numPairedLidar+1):
            logger.error('Not enough paired lidar files found')
            return False
        
        # Make sure the lidar files are not empty
        success = True
        for f in lidarDict.values() + pairedLidarDict.values():
            if not asp_file_utils.fileIsNonZero(f):
                logger.error('lidar file ' + f + ' is empty!')
                os.system('rm -f ' + f) # Remove bad files
                success = False

        return success
def pairLidarFiles(lidarFolder, skipValidate, logger):
    '''For each pair of lidar files generate a double size point cloud.
       We can use these later since they do not have any gaps between adjacent files.'''

    logger.info('Generating lidar pairs...')

    # Create the output folder
    pairedFolder = icebridge_common.getPairedLidarFolder(lidarFolder)
    os.system('mkdir -p ' + pairedFolder)

    convLidarFile = icebridge_common.getConvertedLidarIndexFile(lidarFolder)
    if not os.path.exists(convLidarFile):
        raise Exception("Missing file: " + convLidarFile)

    (lidarDict, dummyUrlDict) = icebridge_common.readIndexFile(convLidarFile)
    lidarExt = ''
    for frame in lidarDict:
        lidarExt = icebridge_common.fileExtension(lidarDict[frame])

    numLidarFiles = len(lidarDict.keys())

    pairedDict = {}

    # Loop through all pairs of csv files in the folder
    badFiles = False
    lidarKeys = sorted(lidarDict.keys())
    for i in range(len(lidarKeys) - 1):

        thisFile = lidarDict[lidarKeys[i]]
        nextFile = lidarDict[lidarKeys[i + 1]]

        date2, time2 = icebridge_common.parseTimeStamps(nextFile)

        # Record the name with the second file
        # - More useful because the time for the second file represents the middle of the file.
        outputName = icebridge_common.lidar_pair_prefix(
        ) + date2 + '_' + time2 + lidarExt

        pairedDict[lidarKeys[i]] = outputName

        # Handle paths
        path1 = os.path.join(lidarFolder, thisFile)
        path2 = os.path.join(lidarFolder, nextFile)
        outputPath = os.path.join(pairedFolder, outputName)

        if not os.path.exists(path1) or not os.path.exists(path2):
            logger.info("Cannot create " + outputPath +
                        " as we are missing its inputs")
            # If the inputs are missing, but the output is there, most likely it is corrupt.
            # Wipe it. Hopefully a subsequent fetch and convert step will bring it back.
            if os.path.exists(outputPath):
                logger.info("Wiping: " + outputPath)
                os.system('rm -f ' + outputPath)  # will not throw
                badFiles = True
            continue

        # Skip existing valid files
        if skipValidate:
            if os.path.exists(outputPath):
                logger.info("File exists, skipping: " + outputPath)
                continue
        else:
            if icebridge_common.isValidLidarCSV(outputPath):
                #logger.info("File exists and is valid, skipping: " + outputPath)
                continue

        # Concatenate the two files
        cmd1 = 'cat ' + path1 + ' > ' + outputPath
        cmd2 = 'tail -n +2 -q ' + path2 + ' >> ' + outputPath
        logger.info(cmd1)
        p = subprocess.Popen(cmd1, stdout=subprocess.PIPE, shell=True)
        out, err = p.communicate()
        logger.info(cmd2)
        p = subprocess.Popen(cmd2, stdout=subprocess.PIPE, shell=True)
        out, err = p.communicate()

        if not icebridge_common.isValidLidarCSV(outputPath):
            logger.error('Failed to generate merged LIDAR file, will wipe: ' +
                         outputPath)
            os.system('rm -f ' + outputPath)  # will not throw
            badFiles = True

    pairedLidarFile = icebridge_common.getPairedIndexFile(pairedFolder)

    willWritePairedFile = False
    if not os.path.exists(pairedLidarFile):
        willWritePairedFile = True
    else:
        # Bugfix: Sometimes the written converted file has the wrong size, maybe
        # something got interrupted earlier.
        (lidarDictIn,
         dummyUrlDict) = icebridge_common.readIndexFile(pairedLidarFile)
        if lidarDictIn != pairedDict:
            willWritePairedFile = True

    if willWritePairedFile:
        logger.info("Writing: " + pairedLidarFile)
        icebridge_common.writeIndexFile(pairedLidarFile, pairedDict, {})

    return (not badFiles)
def convertLidarDataToCsv(lidarFolder, startFrame, stopFrame, skipValidate,
                          logger):
    '''Make sure all lidar data is available in a readable text format.
       Returns false if any files failed to convert.'''

    logger.info('Converting LIDAR files...')

    lidarIndexPath = icebridge_common.csvIndexFile(lidarFolder)
    (frameDict, urlDict) = icebridge_common.readIndexFile(lidarIndexPath)

    if not skipValidate:
        validFilesList = icebridge_common.validFilesList(
            os.path.dirname(lidarFolder), startFrame, stopFrame)
        validFilesSet = set()
        validFilesSet = icebridge_common.updateValidFilesListFromDisk(
            validFilesList, validFilesSet)
        numInitialValidFiles = len(validFilesSet)

    convDict = {}

    # Loop through all files in the folder
    badFiles = False
    for frame in sorted(frameDict.keys()):

        f = frameDict[frame]
        extension = icebridge_common.fileExtension(f)

        # Only interested in a few file types
        if (extension != '.qi') and (extension != '.hdf5') and (extension !=
                                                                '.h5'):
            convDict[frame] = f  # these are already in plain text
            continue

        convDict[frame] = os.path.splitext(f)[0] + '.csv'
        outputPath = os.path.join(lidarFolder, convDict[frame])

        # Handle paths
        fullPath = os.path.join(lidarFolder, f)
        if not os.path.exists(fullPath):
            logger.info("Cannot convert missing file: " + fullPath)
            continue

        # If the input is invalid, wipe both it, its xml, and the output
        # Hopefully there will be a subsquent fetch step where it will get
        # refetched.
        if not icebridge_common.hasValidChkSum(fullPath, logger):
            logger.info("Will wipe invalid file: " + fullPath)
            xmlFile = icebridge_common.xmlFile(fullPath)
            os.system('rm -f ' + fullPath)  # will not throw
            os.system('rm -f ' + xmlFile)  # will not throw
            os.system('rm -f ' + outputPath)  # will not throw
            badFiles = True
            continue

        # Skip existing valid files
        if skipValidate:
            if os.path.exists(outputPath):
                logger.info("File exists, skipping: " + outputPath)
                continue
        else:
            if outputPath in validFilesSet and os.path.exists(outputPath):
                #logger.info('Previously validated: ' + outputPath) # verbose
                continue
            if icebridge_common.isValidLidarCSV(outputPath):
                #logger.info("File exists and is valid, skipping: " + outputPath)
                continue

        # Call the conversion
        logger.info("Process " + fullPath)
        extract_icebridge_ATM_points.main([fullPath])

        # Check the result
        if not icebridge_common.isValidLidarCSV(outputPath):
            logger.error('Failed to parse LIDAR file, will wipe: ' +
                         outputPath)
            os.system('rm -f ' + outputPath)  # will not throw
            badFiles = True
        else:
            if not skipValidate:
                validFilesSet.add(outputPath)  # mark it as validated

    convLidarFile = icebridge_common.getConvertedLidarIndexFile(lidarFolder)

    willWriteConvFile = False
    if not os.path.exists(convLidarFile):
        willWriteConvFile = True
    else:
        # Bugfix: Sometimes the written converted file has the wrong size, maybe
        # something got interrupted earlier.
        (lidarDictIn,
         dummyUrlDict) = icebridge_common.readIndexFile(convLidarFile)
        if lidarDictIn != convDict:
            willWriteConvFile = True

    if willWriteConvFile:
        logger.info("Writing: " + convLidarFile)
        icebridge_common.writeIndexFile(convLidarFile, convDict, {})

    if not skipValidate:
        # Write to disk the list of validated files, but only if new
        # validations happened.  First re-read that list, in case a
        # different process modified it in the meantime, such as if two
        # managers are running at the same time.
        numFinalValidFiles = len(validFilesSet)
        if numInitialValidFiles != numFinalValidFiles:
            validFilesSet = icebridge_common.updateValidFilesListFromDisk(
                validFilesList, validFilesSet)
            icebridge_common.writeValidFilesList(validFilesList, validFilesSet)

    return (not badFiles)
def pairLidarFiles(lidarFolder, skipValidate, logger):
    '''For each pair of lidar files generate a double size point cloud.
       We can use these later since they do not have any gaps between adjacent files.'''
    
    logger.info('Generating lidar pairs...')

    # Create the output folder
    pairedFolder = icebridge_common.getPairedLidarFolder(lidarFolder)
    os.system('mkdir -p ' + pairedFolder)

    convLidarFile = icebridge_common.getConvertedLidarIndexFile(lidarFolder)
    if not os.path.exists(convLidarFile):
        raise Exception("Missing file: " + convLidarFile)

    (lidarDict, dummyUrlDict) = icebridge_common.readIndexFile(convLidarFile)
    lidarExt = ''
    for frame in lidarDict:
        lidarExt = icebridge_common.fileExtension(lidarDict[frame])

    numLidarFiles = len(lidarDict.keys())

    pairedDict = {}
    
    # Loop through all pairs of csv files in the folder    
    badFiles = False
    lidarKeys = sorted(lidarDict.keys())
    for i in range(len(lidarKeys)-1):
        
        thisFile = lidarDict[lidarKeys[i  ]]
        nextFile = lidarDict[lidarKeys[i+1]]

        date2, time2 = icebridge_common.parseTimeStamps(nextFile)
        
        # Record the name with the second file
        # - More useful because the time for the second file represents the middle of the file.
        outputName = icebridge_common.lidar_pair_prefix() + date2 +'_'+ time2 + lidarExt

        pairedDict[lidarKeys[i]] = outputName
        
        # Handle paths
        path1      = os.path.join(lidarFolder, thisFile)
        path2      = os.path.join(lidarFolder, nextFile)
        outputPath = os.path.join(pairedFolder, outputName)

        if not os.path.exists(path1) or not os.path.exists(path2):
            logger.info("Cannot create " + outputPath + " as we are missing its inputs")
            # If the inputs are missing, but the output is there, most likely it is corrupt.
            # Wipe it. Hopefully a subsequent fetch and convert step will bring it back.
            if os.path.exists(outputPath):
                logger.info("Wiping: " + outputPath)
                os.system('rm -f ' + outputPath) # will not throw
                badFiles = True
            continue
        
        # Skip existing valid files
        if skipValidate:
            if os.path.exists(outputPath):
                logger.info("File exists, skipping: " + outputPath)
                continue
        else:
            if icebridge_common.isValidLidarCSV(outputPath):
                #logger.info("File exists and is valid, skipping: " + outputPath)
                continue

        # Concatenate the two files
        cmd1 = 'cat ' + path1 + ' > ' + outputPath
        cmd2 = 'tail -n +2 -q ' + path2 + ' >> ' + outputPath
        logger.info(cmd1)
        p        = subprocess.Popen(cmd1, stdout=subprocess.PIPE, shell=True,
                                    universal_newlines=True)
        out, err = p.communicate()
        logger.info(cmd2)
        p        = subprocess.Popen(cmd2, stdout=subprocess.PIPE, shell=True,
                                    universal_newlines=True)
        out, err = p.communicate()

        if not icebridge_common.isValidLidarCSV(outputPath):
            logger.error('Failed to generate merged LIDAR file, will wipe: ' + outputPath)
            os.system('rm -f ' + outputPath) # will not throw
            badFiles = True

    pairedLidarFile = icebridge_common.getPairedIndexFile(pairedFolder)
    
    willWritePairedFile = False
    if not os.path.exists(pairedLidarFile):
        willWritePairedFile = True
    else: 
        # Bugfix: Sometimes the written converted file has the wrong size, maybe
        # something got interrupted earlier.
        (lidarDictIn, dummyUrlDict) = icebridge_common.readIndexFile(pairedLidarFile)
        if lidarDictIn != pairedDict:
            willWritePairedFile = True
    
    if willWritePairedFile:
        logger.info("Writing: " + pairedLidarFile)
        icebridge_common.writeIndexFile(pairedLidarFile, pairedDict, {})

    return (not badFiles)
def convertLidarDataToCsv(lidarFolder, startFrame, stopFrame, 
                          skipValidate, logger):
    '''Make sure all lidar data is available in a readable text format.
       Returns false if any files failed to convert.'''

    logger.info('Converting LIDAR files...')

    lidarIndexPath = icebridge_common.csvIndexFile(lidarFolder)
    (frameDict, urlDict) = icebridge_common.readIndexFile(lidarIndexPath)

    if not skipValidate:
        validFilesList = icebridge_common.validFilesList(os.path.dirname(lidarFolder),
                                                         startFrame, stopFrame)
        validFilesSet = set()
        validFilesSet = icebridge_common.updateValidFilesListFromDisk(validFilesList, validFilesSet)
        numInitialValidFiles = len(validFilesSet)

    convDict = {}
    
    # Loop through all files in the folder
    badFiles = False
    for frame in sorted(frameDict.keys()):

        f = frameDict[frame]
        extension = icebridge_common.fileExtension(f)
        
        # Only interested in a few file types
        if (extension != '.qi') and (extension != '.hdf5') and (extension != '.h5'):
            convDict[frame] = f # these are already in plain text
            continue

        convDict[frame] = os.path.splitext(f)[0] + '.csv'
        outputPath = os.path.join(lidarFolder, convDict[frame])

        # Handle paths
        fullPath = os.path.join(lidarFolder, f)
        if not os.path.exists(fullPath):
            logger.info("Cannot convert missing file: " + fullPath)
            continue

        # If the input is invalid, wipe both it, its xml, and the output
        # Hopefully there will be a subsquent fetch step where it will get
        # refetched.
        if not icebridge_common.hasValidChkSum(fullPath, logger):
            logger.info("Will wipe invalid file: " + fullPath)
            xmlFile = icebridge_common.xmlFile(fullPath)
            os.system('rm -f ' + fullPath) # will not throw
            os.system('rm -f ' + xmlFile) # will not throw
            os.system('rm -f ' + outputPath) # will not throw
            badFiles = True
            continue

        # Skip existing valid files
        if skipValidate:
            if os.path.exists(outputPath):
                logger.info("File exists, skipping: " + outputPath)
                continue
        else:
            if outputPath in validFilesSet and os.path.exists(outputPath):
                #logger.info('Previously validated: ' + outputPath) # verbose
                continue
            if icebridge_common.isValidLidarCSV(outputPath):
                #logger.info("File exists and is valid, skipping: " + outputPath)
                continue
        
        # Call the conversion
        logger.info("Process " + fullPath)
        extract_icebridge_ATM_points.main([fullPath])
        
        # Check the result
        if not icebridge_common.isValidLidarCSV(outputPath):
            logger.error('Failed to parse LIDAR file, will wipe: ' + outputPath)
            os.system('rm -f ' + outputPath) # will not throw            
            badFiles = True
        else:
            if not skipValidate:
                validFilesSet.add(outputPath) # mark it as validated
            
    convLidarFile = icebridge_common.getConvertedLidarIndexFile(lidarFolder)

    willWriteConvFile = False
    if not os.path.exists(convLidarFile):
        willWriteConvFile = True
    else: 
        # Bugfix: Sometimes the written converted file has the wrong size, maybe
        # something got interrupted earlier.
        (lidarDictIn, dummyUrlDict) = icebridge_common.readIndexFile(convLidarFile)
        if lidarDictIn != convDict:
            willWriteConvFile = True
            
    if willWriteConvFile:
        logger.info("Writing: " + convLidarFile)
        icebridge_common.writeIndexFile(convLidarFile, convDict, {})
        
    if not skipValidate:
        # Write to disk the list of validated files, but only if new
        # validations happened.  First re-read that list, in case a
        # different process modified it in the meantime, such as if two
        # managers are running at the same time.
        numFinalValidFiles = len(validFilesSet)
        if numInitialValidFiles != numFinalValidFiles:
            validFilesSet = icebridge_common.updateValidFilesListFromDisk(validFilesList,
                                                                          validFilesSet)
            icebridge_common.writeValidFilesList(validFilesList, validFilesSet)

    return (not badFiles)