예제 #1
0
def convertLidarDataToCsv(lidarFolder):
    '''Make sure all lidar data is available in a readable text format'''

    logger = logging.getLogger(__name__)
    logger.info('Converting LIDAR files...')

    # Loop through all lidar files in the folder
    lidarFiles = os.listdir(lidarFolder)
    for f in lidarFiles:
        extension = icebridge_common.fileExtension(f)

        # Only interested in a few file types
        if (extension != '.qi') and (extension != '.hdf5') and (extension !=
                                                                '.h5'):
            continue

        # Handle paths
        fullPath = os.path.join(lidarFolder, f)
        outputPath = os.path.join(lidarFolder, os.path.splitext(f)[0] + '.csv')
        if os.path.exists(outputPath):
            continue

        # Call the conversion
        extract_icebridge_ATM_points.main([fullPath])
        if not os.path.exists(outputPath):
            raise Exception('Failed to parse LIDAR file: ' + fullPath)
def convertLidarDataToCsv(lidarFolder, startFrame, stopFrame, skipValidate,
                          logger):
    '''Make sure all lidar data is available in a readable text format.
       Returns false if any files failed to convert.'''

    logger.info('Converting LIDAR files...')

    lidarIndexPath = icebridge_common.csvIndexFile(lidarFolder)
    (frameDict, urlDict) = icebridge_common.readIndexFile(lidarIndexPath)

    if not skipValidate:
        validFilesList = icebridge_common.validFilesList(
            os.path.dirname(lidarFolder), startFrame, stopFrame)
        validFilesSet = set()
        validFilesSet = icebridge_common.updateValidFilesListFromDisk(
            validFilesList, validFilesSet)
        numInitialValidFiles = len(validFilesSet)

    convDict = {}

    # Loop through all files in the folder
    badFiles = False
    for frame in sorted(frameDict.keys()):

        f = frameDict[frame]
        extension = icebridge_common.fileExtension(f)

        # Only interested in a few file types
        if (extension != '.qi') and (extension != '.hdf5') and (extension !=
                                                                '.h5'):
            convDict[frame] = f  # these are already in plain text
            continue

        convDict[frame] = os.path.splitext(f)[0] + '.csv'
        outputPath = os.path.join(lidarFolder, convDict[frame])

        # Handle paths
        fullPath = os.path.join(lidarFolder, f)
        if not os.path.exists(fullPath):
            logger.info("Cannot convert missing file: " + fullPath)
            continue

        # If the input is invalid, wipe both it, its xml, and the output
        # Hopefully there will be a subsquent fetch step where it will get
        # refetched.
        if not icebridge_common.hasValidChkSum(fullPath, logger):
            logger.info("Will wipe invalid file: " + fullPath)
            xmlFile = icebridge_common.xmlFile(fullPath)
            os.system('rm -f ' + fullPath)  # will not throw
            os.system('rm -f ' + xmlFile)  # will not throw
            os.system('rm -f ' + outputPath)  # will not throw
            badFiles = True
            continue

        # Skip existing valid files
        if skipValidate:
            if os.path.exists(outputPath):
                logger.info("File exists, skipping: " + outputPath)
                continue
        else:
            if outputPath in validFilesSet and os.path.exists(outputPath):
                #logger.info('Previously validated: ' + outputPath) # verbose
                continue
            if icebridge_common.isValidLidarCSV(outputPath):
                #logger.info("File exists and is valid, skipping: " + outputPath)
                continue

        # Call the conversion
        logger.info("Process " + fullPath)
        extract_icebridge_ATM_points.main([fullPath])

        # Check the result
        if not icebridge_common.isValidLidarCSV(outputPath):
            logger.error('Failed to parse LIDAR file, will wipe: ' +
                         outputPath)
            os.system('rm -f ' + outputPath)  # will not throw
            badFiles = True
        else:
            if not skipValidate:
                validFilesSet.add(outputPath)  # mark it as validated

    convLidarFile = icebridge_common.getConvertedLidarIndexFile(lidarFolder)

    willWriteConvFile = False
    if not os.path.exists(convLidarFile):
        willWriteConvFile = True
    else:
        # Bugfix: Sometimes the written converted file has the wrong size, maybe
        # something got interrupted earlier.
        (lidarDictIn,
         dummyUrlDict) = icebridge_common.readIndexFile(convLidarFile)
        if lidarDictIn != convDict:
            willWriteConvFile = True

    if willWriteConvFile:
        logger.info("Writing: " + convLidarFile)
        icebridge_common.writeIndexFile(convLidarFile, convDict, {})

    if not skipValidate:
        # Write to disk the list of validated files, but only if new
        # validations happened.  First re-read that list, in case a
        # different process modified it in the meantime, such as if two
        # managers are running at the same time.
        numFinalValidFiles = len(validFilesSet)
        if numInitialValidFiles != numFinalValidFiles:
            validFilesSet = icebridge_common.updateValidFilesListFromDisk(
                validFilesList, validFilesSet)
            icebridge_common.writeValidFilesList(validFilesList, validFilesSet)

    return (not badFiles)
def convertLidarDataToCsv(lidarFolder, startFrame, stopFrame, 
                          skipValidate, logger):
    '''Make sure all lidar data is available in a readable text format.
       Returns false if any files failed to convert.'''

    logger.info('Converting LIDAR files...')

    lidarIndexPath = icebridge_common.csvIndexFile(lidarFolder)
    (frameDict, urlDict) = icebridge_common.readIndexFile(lidarIndexPath)

    if not skipValidate:
        validFilesList = icebridge_common.validFilesList(os.path.dirname(lidarFolder),
                                                         startFrame, stopFrame)
        validFilesSet = set()
        validFilesSet = icebridge_common.updateValidFilesListFromDisk(validFilesList, validFilesSet)
        numInitialValidFiles = len(validFilesSet)

    convDict = {}
    
    # Loop through all files in the folder
    badFiles = False
    for frame in sorted(frameDict.keys()):

        f = frameDict[frame]
        extension = icebridge_common.fileExtension(f)
        
        # Only interested in a few file types
        if (extension != '.qi') and (extension != '.hdf5') and (extension != '.h5'):
            convDict[frame] = f # these are already in plain text
            continue

        convDict[frame] = os.path.splitext(f)[0] + '.csv'
        outputPath = os.path.join(lidarFolder, convDict[frame])

        # Handle paths
        fullPath = os.path.join(lidarFolder, f)
        if not os.path.exists(fullPath):
            logger.info("Cannot convert missing file: " + fullPath)
            continue

        # If the input is invalid, wipe both it, its xml, and the output
        # Hopefully there will be a subsquent fetch step where it will get
        # refetched.
        if not icebridge_common.hasValidChkSum(fullPath, logger):
            logger.info("Will wipe invalid file: " + fullPath)
            xmlFile = icebridge_common.xmlFile(fullPath)
            os.system('rm -f ' + fullPath) # will not throw
            os.system('rm -f ' + xmlFile) # will not throw
            os.system('rm -f ' + outputPath) # will not throw
            badFiles = True
            continue

        # Skip existing valid files
        if skipValidate:
            if os.path.exists(outputPath):
                logger.info("File exists, skipping: " + outputPath)
                continue
        else:
            if outputPath in validFilesSet and os.path.exists(outputPath):
                #logger.info('Previously validated: ' + outputPath) # verbose
                continue
            if icebridge_common.isValidLidarCSV(outputPath):
                #logger.info("File exists and is valid, skipping: " + outputPath)
                continue
        
        # Call the conversion
        logger.info("Process " + fullPath)
        extract_icebridge_ATM_points.main([fullPath])
        
        # Check the result
        if not icebridge_common.isValidLidarCSV(outputPath):
            logger.error('Failed to parse LIDAR file, will wipe: ' + outputPath)
            os.system('rm -f ' + outputPath) # will not throw            
            badFiles = True
        else:
            if not skipValidate:
                validFilesSet.add(outputPath) # mark it as validated
            
    convLidarFile = icebridge_common.getConvertedLidarIndexFile(lidarFolder)

    willWriteConvFile = False
    if not os.path.exists(convLidarFile):
        willWriteConvFile = True
    else: 
        # Bugfix: Sometimes the written converted file has the wrong size, maybe
        # something got interrupted earlier.
        (lidarDictIn, dummyUrlDict) = icebridge_common.readIndexFile(convLidarFile)
        if lidarDictIn != convDict:
            willWriteConvFile = True
            
    if willWriteConvFile:
        logger.info("Writing: " + convLidarFile)
        icebridge_common.writeIndexFile(convLidarFile, convDict, {})
        
    if not skipValidate:
        # Write to disk the list of validated files, but only if new
        # validations happened.  First re-read that list, in case a
        # different process modified it in the meantime, such as if two
        # managers are running at the same time.
        numFinalValidFiles = len(validFilesSet)
        if numInitialValidFiles != numFinalValidFiles:
            validFilesSet = icebridge_common.updateValidFilesListFromDisk(validFilesList,
                                                                          validFilesSet)
            icebridge_common.writeValidFilesList(validFilesList, validFilesSet)

    return (not badFiles)