예제 #1
0
def translate(info, infile, outfile, spatial=False, extent=None):
    """
    Does the translation between SPD V4 and .las format files.

    * Info is a fileinfo object for the input file.
    * infile and outfile are paths to the input and output files respectively.
    * spatial is True or False - dictates whether we are processing spatially or not.
        If True then spatial index will be created on the output file on the fly.
    * extent is a tuple of values specifying the extent to work with. 
        xmin ymin xmax ymax

    Currently does not take any command line scaling options so LAS scaling
    will be the same as the SPDV4 input file scaling. Not sure if this is
    a problem or not...    
    """
    # first we need to determine if the file is spatial or not
    if spatial and not info.has_Spatial_Index:
        msg = "Spatial processing requested but file does not have spatial index"
        raise generic.LiDARInvalidSetting(msg)

    # get the waveform info
    print('Getting waveform description')
    try:
        wavePacketDescr = las.getWavePacketDescriptions(infile)
    except generic.LiDARInvalidData:
        wavePacketDescr = None

    # set up the variables
    dataFiles = lidarprocessor.DataFiles()

    dataFiles.input1 = lidarprocessor.LidarFile(infile, lidarprocessor.READ)

    controls = lidarprocessor.Controls()
    progress = cuiprogress.GDALProgressBar()
    controls.setProgress(progress)
    controls.setSpatialProcessing(spatial)

    if extent is not None:
        extent = [float(x) for x in extent]
        binSize = info.header['BIN_SIZE']
        pixgrid = pixelgrid.PixelGridDefn(xMin=extent[0],
                                          yMin=extent[1],
                                          xMax=extent[2],
                                          yMax=extent[3],
                                          xRes=binSize,
                                          yRes=binSize)
        controls.setReferencePixgrid(pixgrid)
        controls.setFootprint(lidarprocessor.BOUNDS_FROM_REFERENCE)

    dataFiles.output1 = lidarprocessor.LidarFile(outfile,
                                                 lidarprocessor.CREATE)
    dataFiles.output1.setLiDARDriver('LAS')
    if wavePacketDescr is not None:
        dataFiles.output1.setLiDARDriverOption('WAVEFORM_DESCR',
                                               wavePacketDescr)

    lidarprocessor.doProcessing(transFunc, dataFiles, controls=controls)
예제 #2
0
def prepareInputFiles(infiles, otherargs, index=None):
    """
    Prepare input files for calculation of canopy metrics
    """
    dataFiles = lidarprocessor.DataFiles()
    if index is not None:
        dataFiles.inFiles = [
            lidarprocessor.LidarFile(infiles[index], lidarprocessor.READ)
        ]
    else:
        dataFiles.inFiles = [
            lidarprocessor.LidarFile(fname, lidarprocessor.READ)
            for fname in infiles
        ]

    otherargs.lidardriver = []
    otherargs.proj = []

    nFiles = len(dataFiles.inFiles)
    for i in range(nFiles):
        info = generic.getLidarFileInfo(dataFiles.inFiles[i].fname)
        if info.getDriverName() == 'riegl':
            if otherargs.externaltransformfn is not None:
                if index is not None:
                    externaltransform = numpy.loadtxt(
                        otherargs.externaltransformfn[index],
                        ndmin=2,
                        delimiter=" ",
                        dtype=numpy.float32)
                else:
                    externaltransform = numpy.loadtxt(
                        otherargs.externaltransformfn[i],
                        ndmin=2,
                        delimiter=" ",
                        dtype=numpy.float32)
                dataFiles.inFiles[i].setLiDARDriverOption(
                    "ROTATION_MATRIX", externaltransform)
            elif "ROTATION_MATRIX" in info.header:
                dataFiles.inFiles[i].setLiDARDriverOption(
                    "ROTATION_MATRIX", info.header["ROTATION_MATRIX"])
            else:
                msg = 'Input file %s has no valid pitch/roll/yaw data' % dataFiles.inFiles[
                    i].fname
                raise generic.LiDARInvalidData(msg)

        otherargs.lidardriver.append(info.getDriverName())

        if "SPATIAL_REFERENCE" in info.header.keys():
            if len(info.header["SPATIAL_REFERENCE"]) > 0:
                otherargs.proj.append(info.header["SPATIAL_REFERENCE"])
            else:
                otherargs.proj.append(None)
        else:
            otherargs.proj.append(None)

    return dataFiles
예제 #3
0
def translate(info,
              infile,
              outfile,
              expectRange=None,
              scalings=None,
              nullVals=None,
              constCols=None,
              epsg=None,
              wkt=None):
    """
    Main function which does the work.

    * Info is a fileinfo object for the input file.
    * infile and outfile are paths to the input and output files respectively.
    * expectRange is a list of tuples with (type, varname, min, max).
    * scaling is a list of tuples with (type, varname, gain, offset).
    * nullVals is a list of tuples with (type, varname, value)
    * constCols is a list of tupes with (type, varname, dtype, value)
    """
    scalingsDict = translatecommon.overRideDefaultScalings(scalings)

    # set up the variables
    dataFiles = lidarprocessor.DataFiles()

    dataFiles.input1 = lidarprocessor.LidarFile(infile, lidarprocessor.READ)

    controls = lidarprocessor.Controls()
    progress = cuiprogress.GDALProgressBar()
    controls.setProgress(progress)
    controls.setSpatialProcessing(False)

    otherArgs = lidarprocessor.OtherArgs()
    # and the header so we don't collect it again
    otherArgs.rieglInfo = info.header
    # also need the default/overriden scaling
    otherArgs.scaling = scalingsDict
    # expected range of the data
    otherArgs.expectRange = expectRange
    # null values
    otherArgs.nullVals = nullVals
    # constant columns
    otherArgs.constCols = constCols
    otherArgs.epsg = epsg
    otherArgs.wkt = wkt

    dataFiles.output1 = lidarprocessor.LidarFile(outfile,
                                                 lidarprocessor.CREATE)
    dataFiles.output1.setLiDARDriver('SPDV4')
    dataFiles.output1.setLiDARDriverOption('SCALING_BUT_NO_DATA_WARNING',
                                           False)

    lidarprocessor.doProcessing(transFunc,
                                dataFiles,
                                controls=controls,
                                otherArgs=otherArgs)
예제 #4
0
파일: las.py 프로젝트: rcjetpilot/LIDAR-1
def getWavePacketDescriptions(fname):
    """
    When writing a LAS file, it is necessary to write information to a
    table in the header that really belongs to the waveforms. This 
    function reads the waveform info from the input file (in any format) and 
    gathers the unique information from it so it can be passed to as
    the WAVEFORM_DESCR LAS driver option.
    
    Note: LAS only supports received waveforms.
    
    """
    from pylidar import lidarprocessor

    dataFiles = lidarprocessor.DataFiles()
    dataFiles.input = lidarprocessor.LidarFile(fname, lidarprocessor.READ)

    controls = lidarprocessor.Controls()
    controls.setSpatialProcessing(False)

    otherArgs = lidarprocessor.OtherArgs()
    otherArgs.uniqueInfo = None

    lidarprocessor.doProcessing(gatherWavePackets,
                                dataFiles,
                                otherArgs,
                                controls=controls)

    return otherArgs.uniqueInfo
예제 #5
0
def testWrite(infile, outfile, binSize):
    outNull = 0

    dataFiles = lidarprocessor.DataFiles()

    dataFiles.input1 = lidarprocessor.LidarFile(infile, lidarprocessor.READ)
    dataFiles.imageOut1 = lidarprocessor.ImageFile(outfile,
                                                   lidarprocessor.CREATE)
    dataFiles.imageOut1.setRasterIgnore(outNull)

    controls = lidarprocessor.Controls()
    #controls.setOverlap(5)
    #controls.setWindowSize(30)
    #controls.setReferenceResolution(binSize)
    controls.setWindowSize(16)
    progress = cuiprogress.GDALProgressBar()
    controls.setProgress(progress)

    otherargs = lidarprocessor.OtherArgs()
    otherargs.ignore = outNull
    otherargs.interp = "pynn"
    otherargs.minVal = None
    otherargs.outNull = outNull

    lidarprocessor.doProcessing(writeImageFunc,
                                dataFiles,
                                otherArgs=otherargs,
                                controls=controls)
예제 #6
0
def calculateCheckSum(infile, windowSize=None):
    """
    Returns a Checksum instance for the given file
    """
    print('Calculating LiDAR Checksum...')
    dataFiles = lidarprocessor.DataFiles()
    dataFiles.input = lidarprocessor.LidarFile(infile, lidarprocessor.READ)

    otherArgs = lidarprocessor.OtherArgs()
    otherArgs.checksum = Checksum()

    controls = lidarprocessor.Controls()
    progress = cuiprogress.GDALProgressBar()
    controls.setProgress(progress)
    controls.setMessageHandler(lidarprocessor.silentMessageFn)
    if windowSize is not None:
        controls.setWindowSize(windowSize)

    lidarprocessor.doProcessing(pylidarChecksum, dataFiles, otherArgs=otherArgs,
            controls=controls)

    # as a last step, calculate the digests
    otherArgs.checksum.convertToDigests()

    return otherArgs.checksum
예제 #7
0
def run(oldpath, newpath):
    """
    Runs the 15th basic test suite. Tests:

    creating a raster using the 'new' non-spatial mode
    """
    inputSPD = os.path.join(oldpath, INPUT_SPD)
    outputDEM = os.path.join(newpath, OUTPUT_DEM)

    info = generic.getLidarFileInfo(inputSPD)
    header = info.header

    dataFiles = lidarprocessor.DataFiles()
    dataFiles.input1 = lidarprocessor.LidarFile(inputSPD, lidarprocessor.READ)

    xMin, yMax, ncols, nrows = spatial.getGridInfoFromHeader(header, BINSIZE)

    outImage = numpy.zeros((nrows, ncols))

    otherArgs = lidarprocessor.OtherArgs()
    otherArgs.outImage = outImage
    otherArgs.xMin = xMin
    otherArgs.yMax = yMax

    controls = lidarprocessor.Controls()
    controls.setSpatialProcessing(False)

    lidarprocessor.doProcessing(processChunk, dataFiles, otherArgs=otherArgs, controls=controls)

    iw = spatial.ImageWriter(outputDEM, tlx=xMin, tly=yMax, binSize=BINSIZE)
    iw.setLayer(outImage)
    iw.close()

    utils.compareImageFiles(os.path.join(oldpath, OUTPUT_DEM), outputDEM)
    
예제 #8
0
def run(oldpath, newpath):
    """
    Runs the 7th basic test suite. Tests:

    setting pixel grid different from the spatial index
    """
    input = os.path.join(oldpath, IN_FILE)
    interp = os.path.join(newpath, OUT_FILE)
    origInterp = os.path.join(oldpath, OUT_FILE)

    dataFiles = lidarprocessor.DataFiles()
    dataFiles.input = lidarprocessor.LidarFile(input, lidarprocessor.READ)
    dataFiles.imageOut = lidarprocessor.ImageFile(interp, lidarprocessor.CREATE)

    controls = lidarprocessor.Controls()
    progress = cuiprogress.GDALProgressBar()
    controls.setProgress(progress)
    controls.setSpatialProcessing(True)

    # can't use origInterp as projection source as this might not 
    # be created yet (eg called from testing_cmds.sh)
    projectionSource = os.path.join(oldpath, PROJECTION_SOURCE)
    wkt = getProjection(projectionSource)
    pixGrid = pixelgrid.PixelGridDefn(xMin=509199.0, yMax=6944830, xMax=509857, 
                    yMin=6944130, xRes=2.0, yRes=2.0, projection=wkt)
    controls.setFootprint(lidarprocessor.BOUNDS_FROM_REFERENCE)
    controls.setReferencePixgrid(pixGrid)

    lidarprocessor.doProcessing(writeImageFunc, dataFiles, controls=controls)

    utils.compareImageFiles(origInterp, interp)
예제 #9
0
def run(oldpath, newpath):
    """
    Runs the 17th basic test suite. Tests:

    update a spd file using an image using the non spatial mode
    """
    inputSPD = os.path.join(oldpath, INPUT_SPD)
    updateSPD = os.path.join(newpath, UPDATE_SPD)
    shutil.copyfile(inputSPD, updateSPD)

    inputDEM = os.path.join(oldpath, INPUT_DEM)

    dataFiles = lidarprocessor.DataFiles()
    dataFiles.input1 = lidarprocessor.LidarFile(updateSPD,
                                                lidarprocessor.UPDATE)

    otherArgs = lidarprocessor.OtherArgs()
    (otherArgs.inImage, otherArgs.xMin, otherArgs.yMax,
     otherArgs.binSize) = spatial.readImageLayer(inputDEM)

    controls = lidarprocessor.Controls()
    controls.setSpatialProcessing(False)

    lidarprocessor.doProcessing(processChunk,
                                dataFiles,
                                otherArgs=otherArgs,
                                controls=controls)

    origUpdate = os.path.join(oldpath, UPDATE_SPD)
    utils.compareLiDARFiles(origUpdate, updateSPD)
예제 #10
0
def translate(info,
              infile,
              outfile,
              expectRange=None,
              scaling=None,
              nullVals=None,
              constCols=None):
    """
    Main function which does the work.

    * Info is a fileinfo object for the input file.
    * infile and outfile are paths to the input and output files respectively.
    * expectRange is a list of tuples with (type, varname, min, max).
    * scaling is a list of tuples with (type, varname, dtype, gain, offset).
    * nullVals is a list of tuples with (type, varname, value)
    * constCols is a list of tupes with (type, varname, dtype, value)
    
    """
    scalingsDict = translatecommon.overRideDefaultScalings(scaling)

    # set up the variables
    dataFiles = lidarprocessor.DataFiles()

    dataFiles.input1 = lidarprocessor.LidarFile(infile, lidarprocessor.READ)

    controls = lidarprocessor.Controls()
    progress = cuiprogress.GDALProgressBar()
    controls.setProgress(progress)

    otherArgs = lidarprocessor.OtherArgs()
    otherArgs.scaling = scalingsDict
    otherArgs.expectRange = expectRange
    otherArgs.nullVals = nullVals
    otherArgs.constCols = constCols

    dataFiles.output1 = lidarprocessor.LidarFile(outfile,
                                                 lidarprocessor.CREATE)
    dataFiles.output1.setLiDARDriver('SPDV4')
    dataFiles.output1.setLiDARDriverOption('SCALING_BUT_NO_DATA_WARNING',
                                           False)

    lidarprocessor.doProcessing(transFunc,
                                dataFiles,
                                controls=controls,
                                otherArgs=otherArgs)
예제 #11
0
def run(oldpath, newpath):
    """
    Runs the 8th basic test suite. Tests:

    Importing Riegl
    Creating spatial index
    Create an image file
    updating resulting file
    """
    inputRiegl = os.path.join(oldpath, INPUT_RIEGL)
    info = generic.getLidarFileInfo(inputRiegl)

    importedSPD = os.path.join(newpath, IMPORTED_SPD)
    translate(info,
              inputRiegl,
              importedSPD,
              scalings=SCALINGS,
              internalrotation=True)
    utils.compareLiDARFiles(os.path.join(oldpath, IMPORTED_SPD),
                            importedSPD,
                            windowSize=WINDOWSIZE)

    indexedSPD = os.path.join(newpath, INDEXED_SPD)
    createGridSpatialIndex(importedSPD,
                           indexedSPD,
                           binSize=1.0,
                           tempDir=newpath)
    utils.compareLiDARFiles(os.path.join(oldpath, INDEXED_SPD),
                            indexedSPD,
                            windowSize=WINDOWSIZE)

    outputRaster = os.path.join(newpath, OUTPUT_RASTER)
    rasterize([indexedSPD],
              outputRaster, ['Z'],
              function="numpy.ma.min",
              atype='POINT',
              windowSize=WINDOWSIZE)
    utils.compareImageFiles(os.path.join(oldpath, OUTPUT_RASTER), outputRaster)

    outputUpdate = os.path.join(newpath, UPDATED_SPD)
    shutil.copyfile(indexedSPD, outputUpdate)

    dataFiles = lidarprocessor.DataFiles()
    dataFiles.input1 = lidarprocessor.LidarFile(outputUpdate,
                                                lidarprocessor.UPDATE)

    controls = lidarprocessor.Controls()
    progress = cuiprogress.GDALProgressBar()
    controls.setProgress(progress)
    controls.setWindowSize(WINDOWSIZE)
    controls.setSpatialProcessing(True)

    lidarprocessor.doProcessing(updatePointFunc, dataFiles, controls=controls)

    utils.compareLiDARFiles(os.path.join(oldpath, UPDATED_SPD),
                            outputUpdate,
                            windowSize=WINDOWSIZE)
예제 #12
0
def main(inFile, shpfile, outFile):
    """
    Main function
    """
    # set up and input and output
    dataFiles = lidarprocessor.DataFiles()
    dataFiles.input = lidarprocessor.LidarFile(inFile, lidarprocessor.READ)
    dataFiles.input.setLiDARDriverOption('BUILD_PULSES', False)
    dataFiles.output = lidarprocessor.LidarFile(outFile, lidarprocessor.CREATE)
    dataFiles.output.setLiDARDriver('LAS')

    # open the OGR layer so we pass it in
    otherArgs = lidarprocessor.OtherArgs()
    ogrds = ogr.Open(shpfile)
    otherArgs.layer = ogrds.GetLayer(0)

    # run the processor
    lidarprocessor.doProcessing(chop, dataFiles, otherArgs=otherArgs)
예제 #13
0
def readLidarPoints(filename,
                    classification=None,
                    boundingbox=None,
                    colNames=['X', 'Y', 'Z']):
    """
    Read the requested columns for the points in the given file (or files if 
    filename is a list), in a memory-efficient manner. 
    Uses pylidar to read only a block of points at a time, and select out just the 
    desired columns. When the input file is a .las file, this saves quite a lot
    of memory, in comparison to reading in all points at once, since all columns for all points
    have to be read in at the same time. 
    
    Optionally filter by CLASSIFICATION column with a value from the generic.CLASSIFICATION_*
    constants.
    
    If boundingbox is given, it is a tuple of
    (xmin, xmax, ymin, ymax)
    and only points within this box are included. 
    
    Return a single recarray with only the selected columns, and only the selected points. 
    
    """
    datafiles = lidarprocessor.DataFiles()
    # could be a list
    datafiles.infile = lidarprocessor.LidarFile(filename, lidarprocessor.READ)

    otherargs = lidarprocessor.OtherArgs()
    otherargs.classification = classification
    otherargs.colNames = colNames
    otherargs.dataArrList = []
    otherargs.boundingbox = boundingbox

    controls = lidarprocessor.Controls()
    controls.setSpatialProcessing(False)

    lidarprocessor.doProcessing(selectColumns,
                                datafiles,
                                otherArgs=otherargs,
                                controls=controls)

    # Put all the separate rec-arrays together
    nPts = sum([len(a) for a in otherargs.dataArrList])
    if nPts > 0:
        fullArr = numpy.zeros(nPts, dtype=otherargs.dataArrList[0].dtype)
        i = 0
        for dataArr in otherargs.dataArrList:
            numPts = len(dataArr)
            fullArr[i:i + numPts] = dataArr
            i += len(dataArr)
    else:
        fullArr = numpy.array([])

    return fullArr
예제 #14
0
def translate(info, infile, outfile):
    """
    Does the translation between SPD V4 and PulseWaves format files.

    * Info is a fileinfo object for the input file.
    * infile and outfile are paths to the input and output files respectively.

    """
    # set up the variables
    dataFiles = lidarprocessor.DataFiles()
        
    dataFiles.input1 = lidarprocessor.LidarFile(infile, lidarprocessor.READ)

    controls = lidarprocessor.Controls()
    progress = cuiprogress.GDALProgressBar()
    controls.setProgress(progress)

    dataFiles.output1 = lidarprocessor.LidarFile(outfile, lidarprocessor.CREATE)
    dataFiles.output1.setLiDARDriver('PulseWaves')

    lidarprocessor.doProcessing(transFunc, dataFiles, controls=controls)
예제 #15
0
def run(oldpath, newpath):
    """
    Runs the 23nd basic test suite. Tests:

    toolbox.interpolation.interpPoints
    """
    inputSPD = os.path.join(oldpath, INPUT_SPD)
    outputDAT = os.path.join(newpath, OUTPUT_DATA)

    dataFiles = lidarprocessor.DataFiles()
    dataFiles.input1 = lidarprocessor.LidarFile(inputSPD, lidarprocessor.READ)

    otherArgs = lidarprocessor.OtherArgs()
    otherArgs.output = None

    lidarprocessor.doProcessing(processChunk, dataFiles, otherArgs=otherArgs)

    numpy.save(outputDAT, otherArgs.output)

    utils.compareNumpyFiles(os.path.join(oldpath, OUTPUT_DATA), outputDAT)
예제 #16
0
def run(oldpath, newpath):
    """
    Runs the 11th basic test suite. Tests:

    Updating a column in an SPDV3 file
    """
    input = os.path.join(oldpath, ORIG_FILE)
    update = os.path.join(newpath, UPDATE_FILE)
    shutil.copyfile(input, update)

    dataFiles = lidarprocessor.DataFiles()

    dataFiles.input1 = lidarprocessor.LidarFile(update, lidarprocessor.UPDATE)

    controls = lidarprocessor.Controls()
    progress = cuiprogress.GDALProgressBar()
    controls.setProgress(progress)
    controls.setSpatialProcessing(False)

    lidarprocessor.doProcessing(updatePointFunc, dataFiles, controls=controls)

    origUpdate = os.path.join(oldpath, UPDATE_FILE)
    utils.compareLiDARFiles(origUpdate, update)
예제 #17
0
def run(oldpath, newpath):
    """
    Runs the 12th basic test suite. Tests:

    Updating a spd file with information in a raster
    """
    updateFile = os.path.join(newpath, UPDATE_FILE)
    shutil.copyfile(os.path.join(oldpath, ORIG_FILE), updateFile)

    imageFile = os.path.join(oldpath, IMAGE_FILE)

    dataFiles = lidarprocessor.DataFiles()
    dataFiles.input = lidarprocessor.LidarFile(updateFile,
                                               lidarprocessor.UPDATE)
    dataFiles.imageIn = lidarprocessor.ImageFile(imageFile,
                                                 lidarprocessor.READ)

    controls = lidarprocessor.Controls()
    progress = cuiprogress.GDALProgressBar()
    controls.setProgress(progress)
    controls.setSpatialProcessing(True)

    lidarprocessor.doProcessing(updatePointFunc, dataFiles, controls=controls)
    utils.compareLiDARFiles(os.path.join(oldpath, UPDATE_FILE), updateFile)
예제 #18
0
def rasterize(infiles,
              outfile,
              attributes,
              function=DEFAULT_FUNCTION,
              atype=DEFAULT_ATTRIBUTE,
              background=0,
              binSize=None,
              extraModule=None,
              quiet=False,
              footprint=None,
              windowSize=None,
              driverName=None,
              driverOptions=None):
    """
    Apply the given function to the list of input files and create
    an output raster file. attributes is a list of attributes to run
    the function on. The function name must contain a module
    name and the specified function must take a masked array, plus
    the 'axis' parameter. atype should be a string containing 
    either POINT|PULSE.
    background is the background raster value to use. binSize is the bin size
    to use which defaults to that of the spatial indices used.
    extraModule should be a string with an extra module to import - use
    this for modules other than numpy that are needed by your function.
    quiet means no progress etc
    footprint specifies the footprint type
    """
    dataFiles = lidarprocessor.DataFiles()
    dataFiles.inList = [
        lidarprocessor.LidarFile(fname, lidarprocessor.READ)
        for fname in infiles
    ]
    dataFiles.imageOut = lidarprocessor.ImageFile(outfile,
                                                  lidarprocessor.CREATE)
    dataFiles.imageOut.setRasterIgnore(background)

    if driverName is not None:
        dataFiles.imageOut.setRasterDriver(driverName)

    if driverOptions is not None:
        dataFiles.imageOut.setRasterDriverOptions(driverOptions)

    # import any other modules required
    globalsDict = globals()
    if extraModule is not None:
        globalsDict[extraModule] = importlib.import_module(extraModule)

    controls = lidarprocessor.Controls()
    controls.setSpatialProcessing(True)
    if not quiet:
        progress = cuiprogress.GDALProgressBar()
        controls.setProgress(progress)

    if binSize is not None:
        controls.setReferenceResolution(binSize)

    if footprint is not None:
        controls.setFootprint(footprint)

    if windowSize is not None:
        controls.setWindowSize(windowSize)

    otherArgs = lidarprocessor.OtherArgs()
    # reference to the function to call
    otherArgs.func = eval(function, globalsDict)
    otherArgs.attributes = attributes
    otherArgs.background = background
    atype = atype.upper()
    if atype == 'POINT':
        otherArgs.atype = POINT
    elif atype == 'PULSE':
        otherArgs.atype = PULSE
    else:
        msg = 'Unsupported type %s' % atype
        raise RasterizationError(msg)

    lidarprocessor.doProcessing(writeImageFunc,
                                dataFiles,
                                controls=controls,
                                otherArgs=otherArgs)
예제 #19
0
def translate(info,
              infile,
              outfile,
              expectRange=None,
              spatial=None,
              extent=None,
              scaling=None,
              epsg=None,
              binSize=None,
              buildPulses=False,
              pulseIndex=None,
              nullVals=None,
              constCols=None,
              useLASScaling=False):
    """
    Main function which does the work.

    * Info is a fileinfo object for the input file.
    * infile and outfile are paths to the input and output files respectively.
    * expectRange is a list of tuples with (type, varname, min, max).
    * spatial is True or False - dictates whether we are processing spatially or not.
        If True then spatial index will be created on the output file on the fly.
    * extent is a tuple of values specifying the extent to work with. 
        xmin ymin xmax ymax
    * scaling is a list of tuples with (type, varname, dtype, gain, offset).
    * if epsg is not None should be a EPSG number to use as the coord system
    * binSize is the used by the LAS spatial index
    * buildPulses dictates whether to attempt to build the pulse structure
    * pulseIndex should be 'FIRST_RETURN' or 'LAST_RETURN' and determines how the
        pulses are indexed.
    * nullVals is a list of tuples with (type, varname, value)
    * constCols is a list of tupes with (type, varname, dtype, value)
    * if useLASScaling is True, then the scaling used in the LAS file
        is used for columns. Overrides anything given in 'scaling'
    
    """
    scalingsDict = translatecommon.overRideDefaultScalings(scaling)

    if epsg is None and (info.wkt is None or len(info.wkt) == 0):
        msg = 'No projection set in las file. Must set EPSG on command line'
        raise generic.LiDARInvalidSetting(msg)

    if spatial and not info.hasSpatialIndex:
        msg = 'Spatial processing requested but file does not have spatial index'
        raise generic.LiDARInvalidSetting(msg)

    if spatial and binSize is None:
        msg = "For spatial processing, the bin size must be set"
        raise generic.LiDARInvalidSetting(msg)

    if extent is not None and not spatial:
        msg = 'Extent can only be set when processing spatially'
        raise generic.LiDARInvalidSetting(msg)

    # set up the variables
    dataFiles = lidarprocessor.DataFiles()

    dataFiles.input1 = lidarprocessor.LidarFile(infile, lidarprocessor.READ)
    if pulseIndex == 'FIRST_RETURN':
        dataFiles.input1.setLiDARDriverOption('PULSE_INDEX', las.FIRST_RETURN)
    elif pulseIndex == 'LAST_RETURN':
        dataFiles.input1.setLiDARDriverOption('PULSE_INDEX', las.LAST_RETURN)
    else:
        msg = "Pulse index argument not recognised."
        raise generic.LiDARInvalidSetting(msg)

    dataFiles.input1.setLiDARDriverOption('BUILD_PULSES', buildPulses)

    if spatial:
        dataFiles.input1.setLiDARDriverOption('BIN_SIZE', float(binSize))

    controls = lidarprocessor.Controls()
    progress = cuiprogress.GDALProgressBar()
    controls.setProgress(progress)
    controls.setSpatialProcessing(spatial)

    otherArgs = lidarprocessor.OtherArgs()
    otherArgs.scaling = scalingsDict
    otherArgs.epsg = epsg
    otherArgs.expectRange = expectRange
    otherArgs.lasInfo = info
    otherArgs.nullVals = nullVals
    otherArgs.constCols = constCols
    otherArgs.useLASScaling = useLASScaling

    if extent is not None:
        extent = [float(x) for x in extent]
        pixgrid = pixelgrid.PixelGridDefn(xMin=extent[0],
                                          yMin=extent[1],
                                          xMax=extent[2],
                                          yMax=extent[3],
                                          xRes=binSize,
                                          yRes=binSize)
        controls.setReferencePixgrid(pixgrid)
        controls.setFootprint(lidarprocessor.BOUNDS_FROM_REFERENCE)

    dataFiles.output1 = lidarprocessor.LidarFile(outfile,
                                                 lidarprocessor.CREATE)
    dataFiles.output1.setLiDARDriver('SPDV4')
    dataFiles.output1.setLiDARDriverOption('SCALING_BUT_NO_DATA_WARNING',
                                           False)

    lidarprocessor.doProcessing(transFunc,
                                dataFiles,
                                controls=controls,
                                otherArgs=otherArgs)
예제 #20
0
def splitFileIntoTiles(infiles, binSize=1.0, blockSize=None, 
        tempDir='.', extent=None, indexType=INDEX_CARTESIAN,
        pulseIndexMethod=PULSE_INDEX_FIRST_RETURN, 
        footprint=lidarprocessor.UNION, outputFormat='SPDV4',
        buildPulses=False):
    """
    Takes a filename (or list of filenames) and creates a tempfile for every 
    block (using blockSize).
    If blockSize isn't set then it is picked using BLOCKSIZE_N_BLOCKS.
    binSize is the size of the bins to create the spatial index.
    indexType is one of the INDEX_* constants.
    pulseIndexMethod is one of the PULSE_INDEX_* constants.
    footprint is one of lidarprocessor.UNION or lidarprocessor.INTERSECTION
    and is how to combine extents if there is more than one file.
    outputFormat is either 'SPDV4' or 'LAS'. 'LAS' outputs only supported
    when input is 'LAS'.
    buildPulses relevant for 'LAS' and determines whether to build the 
    pulse structure or not. 

    returns the header of the first input file, the extent used and a list
    of (fname, extent) tuples that contain the information for 
    each tempfile.
    """

    if isinstance(infiles, basestring):
        infiles = [infiles]

    # use the first file for header. Not
    # clear how to combine headers from multiple inputs
    # or if one should.
    # leave setting this in case we grab it when working out the extent.
    firstHeader = None
    
    if extent is None:
        # work out from headers
        pixGrid = None
        for infile in infiles:
            info = generic.getLidarFileInfo(infile)
            header = info.header

            if firstHeader is None:
                firstHeader = header

            try:
                if indexType == INDEX_CARTESIAN:
                    xMax = header['X_MAX']
                    xMin = header['X_MIN']
                    yMax = header['Y_MAX']
                    yMin = header['Y_MIN']
                elif indexType == INDEX_SPHERICAL:
                    xMax = header['AZIMUTH_MAX']
                    xMin = header['AZIMUTH_MIN']
                    yMax = header['ZENITH_MAX']
                    yMin = header['ZENITH_MIN']
                elif indexType == INDEX_SCAN:
                    xMax = header['SCANLINE_IDX_MAX']
                    xMin = header['SCANLINE_IDX_MIN']
                    yMax = header['SCANLINE_MAX']
                    yMin = header['SCANLINE_MIN']
                else:
                    msg = 'unsupported indexing method'
                    raise generic.LiDARSpatialIndexNotAvailable(msg)
            except KeyError:
                msg = 'info for creating bounding box not available'
                raise generic.LiDARFunctionUnsupported(msg)

            newPixGrid = pixelgrid.PixelGridDefn(xMin=xMin, xMax=xMax, 
                            yMin=yMin, yMax=yMax, xRes=binSize, yRes=binSize)
            if pixGrid is None:
                pixGrid = newPixGrid
            elif footprint == lidarprocessor.UNION:
                pixGrid = pixGrid.union(newPixGrid)
            elif footprint == lidarprocessor.INTERSECTION:
                pixGrid = pixGrid.intersection(newPixGrid)
            else:
                msg = 'Unsupported footprint option'
                raise generic.LiDARFunctionUnsupported(msg)

        # TODO: we treat points as being in the block when they are >=
        # the min coords and < the max coords. What happens on the bottom
        # and right margins?? We could possibly miss points that are there.

        # round the coords to the nearest multiple
        xMin = numpy.floor(pixGrid.xMin / binSize) * binSize
        yMin = numpy.floor(pixGrid.yMin / binSize) * binSize
        xMax = numpy.ceil(pixGrid.xMax / binSize) * binSize
        yMax = numpy.ceil(pixGrid.yMax / binSize) * binSize
            
        extent = Extent(xMin, xMax, yMin, yMax, binSize)
        
    else:
        # ensure that our binSize comes from their exent
        binSize = extent.binSize

        # get the first header since we aren't doing the above
        info = generic.getLidarFileInfo(infiles[0])
        firstHeader = info.header
    
    if blockSize is None:
        minAxis = min(extent.xMax - extent.xMin, extent.yMax - extent.yMin)
        blockSize = min(minAxis / BLOCKSIZE_N_BLOCKS, 200.0)
        # make it a multiple of binSize
        blockSize = int(numpy.ceil(blockSize / binSize)) * binSize
    else:
        # ensure that their given block size can be evenly divided by 
        # the binSize
        # the modulo operator doesn't work too well with floats 
        # so we take a different approach
        a = blockSize / binSize
        if a != int(a):
            msg = 'blockSize must be evenly divisible be the binSize'
            raise generic.LiDARInvalidData(msg)
        
    extentList = []
    subExtent = Extent(extent.xMin, extent.xMin + blockSize, 
            extent.yMax - blockSize, extent.yMax, binSize)
    controls = lidarprocessor.Controls()
    controls.setSpatialProcessing(False)

    tmpSuffix = '.' + outputFormat.lower()

    bMoreToDo = True
    while bMoreToDo:
        fd, fname = tempfile.mkstemp(suffix=tmpSuffix, dir=tempDir)
        os.close(fd)
        
        userClass = lidarprocessor.LidarFile(fname, generic.CREATE)
        if outputFormat == 'SPDV4':
            userClass.setLiDARDriverOption('SCALING_BUT_NO_DATA_WARNING', False)
            driver = spdv4.SPDV4File(fname, generic.CREATE, controls, userClass)
        elif outputFormat == 'LAS':
            driver = las.LasFile(fname, generic.CREATE, controls, userClass)
        else:
            msg = 'Unsupported output format %s' % outputFormat
            raise generic.LiDARFunctionUnsupported(msg)
        data = (copy.copy(subExtent), driver)
        extentList.append(data)

        # move it along
        subExtent.xMin += blockSize
        subExtent.xMax += blockSize

        if subExtent.xMin >= extent.xMax:
            # next line down
            subExtent.xMin = extent.xMin
            subExtent.xMax = extent.xMin + blockSize
            subExtent.yMax -= blockSize
            subExtent.yMin -= blockSize
            
        # done?
        bMoreToDo = subExtent.yMax > extent.yMin

    # ok now set up to read the input files using lidarprocessor
    dataFiles = lidarprocessor.DataFiles()
    dataFiles.inputs = []

    for infile in infiles:
        input = lidarprocessor.LidarFile(infile, lidarprocessor.READ)

        # must be a better way of doing this, but this is what 
        # translate does. We don't know what formats we are getting ahead of time
        info = generic.getLidarFileInfo(infile)
        inFormat = info.getDriverName()
        if inFormat == 'LAS':
            input.setLiDARDriverOption('BUILD_PULSES', buildPulses)

        dataFiles.inputs.append(input)
        
    controls = lidarprocessor.Controls()
    progress = cuiprogress.GDALProgressBar()
    progress.setLabelText('Splitting...')
    controls.setProgress(progress)
    controls.setSpatialProcessing(False)
    controls.setMessageHandler(lidarprocessor.silentMessageFn)
        
    otherArgs = lidarprocessor.OtherArgs()
    otherArgs.outList = extentList
    otherArgs.indexType = indexType
    otherArgs.pulseIndexMethod = pulseIndexMethod
        
    lidarprocessor.doProcessing(classifyFunc, dataFiles, controls=controls, 
                otherArgs=otherArgs)
    
    # close all the output files and save their names to return
    newExtentList = []
    for subExtent, driver in extentList:
        fname = driver.fname
        driver.close()

        data = (fname, subExtent)
        newExtentList.append(data)

    return firstHeader, extent, newExtentList
예제 #21
0
def indexAndMerge(extentList, extent, wkt, outfile, header):
    """
    Internal method to merge all the temporary files into the output
    spatially indexing as we go.
    """
    controls = lidarprocessor.Controls()
    controls.setSpatialProcessing(False)

    # open in read mode
    driverExtentList = []
    for fname, subExtent in extentList:
        userClass = lidarprocessor.LidarFile(fname, generic.READ)
        driver = spdv4.SPDV4File(fname, generic.READ, controls, userClass)
        
        data = (subExtent, driver)
        driverExtentList.append(data)


    # create output file    
    userClass = lidarprocessor.LidarFile(outfile, generic.CREATE)
    userClass.setLiDARDriverOption('SCALING_BUT_NO_DATA_WARNING', False)
    controls = lidarprocessor.Controls()
    controls.setSpatialProcessing(True)
    outDriver = spdv4.SPDV4File(outfile, generic.CREATE, controls, userClass)
    pixGrid = pixelgrid.PixelGridDefn(xMin=extent.xMin, xMax=extent.xMax,
                yMin=extent.yMin, yMax=extent.yMax, projection=wkt,
                xRes=extent.binSize, yRes=extent.binSize)
    outDriver.setPixelGrid(pixGrid)
    
    # update header
    nrows,ncols = pixGrid.getDimensions()
    header['NUMBER_BINS_X'] = ncols
    header['NUMBER_BINS_Y'] = nrows

    # clobber these values since we don't want to 
    # start with the number in the original file
    # they will be reset to 0 in the new file
    del header['NUMBER_OF_POINTS']
    del header['NUMBER_OF_PULSES']
    # these too
    del header['GENERATING_SOFTWARE']
    del header['CREATION_DATETIME']
    
    progress = cuiprogress.GDALProgressBar()
    progress.setLabelText('Merging...')
    progress.setTotalSteps(len(extentList))
    progress.setProgress(0)
    nFilesProcessed = 0
    nFilesWritten = 0
    for subExtent, driver in driverExtentList:

        # read in all the data
        # NOTE: can't write data in blocks as the driver needs to be able to 
        # sort all the data in one go.
        bDataWritten = False
        npulses = driver.getTotalNumberPulses()
        if npulses > 0:
            pulseRange = generic.PulseRange(0, npulses)
            driver.setPulseRange(pulseRange)
            pulses = driver.readPulsesForRange()
            points = driver.readPointsByPulse()
            waveformInfo = driver.readWaveformInfo()
            recv = driver.readReceived()
            trans = driver.readTransmitted()

            outDriver.setExtent(subExtent)
            if nFilesWritten == 0:
                copyScaling(driver, outDriver)
                outDriver.setHeader(header)

            # on create, a spatial index is created
            outDriver.writeData(pulses, points, trans, recv, 
                            waveformInfo)
            nFilesWritten += 1

        # close the driver while we are here
        driver.close()
        
        if bDataWritten:
            nFilesWritten += 1
            
        nFilesProcessed += 1
        progress.setProgress(nFilesProcessed)

    outDriver.close()
예제 #22
0
def translate(info,
              infile,
              outfile,
              expectRange=None,
              scalings=None,
              internalrotation=False,
              magneticdeclination=0.0,
              externalrotationfn=None,
              nullVals=None,
              constCols=None,
              epsg=None,
              wkt=None):
    """
    Main function which does the work.

    * Info is a fileinfo object for the input file.
    * infile and outfile are paths to the input and output files respectively.
    * expectRange is a list of tuples with (type, varname, min, max).
    * scaling is a list of tuples with (type, varname, gain, offset).
    * if internalrotation is True then the internal rotation will be applied
        to data. Overrides externalrotationfn
    * if externalrotationfn is not None then then the external rotation matrix
        will be read from this file and applied to the data
    * magneticdeclination. If not 0, then this will be applied to the data
    * nullVals is a list of tuples with (type, varname, value)
    * constCols is a list of tupes with (type, varname, dtype, value)
    """
    scalingsDict = translatecommon.overRideDefaultScalings(scalings)

    # set up the variables
    dataFiles = lidarprocessor.DataFiles()

    dataFiles.input1 = lidarprocessor.LidarFile(infile, lidarprocessor.READ)

    # first set the rotation matrix if asked for
    if internalrotation and externalrotationfn:
        msg = "Can't use both internal and external rotation"
        raise generic.LiDARInvalidSetting(msg)

    rotationMatrix = None
    if internalrotation:
        if "ROTATION_MATRIX" in info.header:
            dataFiles.input1.setLiDARDriverOption(
                "ROTATION_MATRIX", info.header["ROTATION_MATRIX"])
            rotationMatrix = info.header["ROTATION_MATRIX"]
        else:
            msg = "Internal Rotation requested but no information found in input file"
            raise generic.LiDARInvalidSetting(msg)
    elif externalrotationfn is not None:
        externalrotation = numpy.loadtxt(externalrotationfn,
                                         ndmin=2,
                                         delimiter=" ",
                                         dtype=numpy.float32)
        dataFiles.input1.setLiDARDriverOption("ROTATION_MATRIX",
                                              externalrotation)
        rotationMatrix = externalrotation

    # set the magnetic declination if not 0 (the default)
    if magneticdeclination != 0:
        dataFiles.input1.setLiDARDriverOption("MAGNETIC_DECLINATION",
                                              magneticdeclination)

    controls = lidarprocessor.Controls()
    progress = cuiprogress.GDALProgressBar()
    controls.setProgress(progress)
    controls.setSpatialProcessing(False)

    otherArgs = lidarprocessor.OtherArgs()
    # and the header so we don't collect it again
    otherArgs.rieglInfo = info.header
    # also need the default/overriden scaling
    otherArgs.scaling = scalingsDict
    # Add the rotation matrix to otherArgs
    # for updating the header
    otherArgs.rotationMatrix = rotationMatrix
    # expected range of the data
    otherArgs.expectRange = expectRange
    # null values
    otherArgs.nullVals = nullVals
    # constant columns
    otherArgs.constCols = constCols
    otherArgs.epsg = epsg
    otherArgs.wkt = wkt

    dataFiles.output1 = lidarprocessor.LidarFile(outfile,
                                                 lidarprocessor.CREATE)
    dataFiles.output1.setLiDARDriver('SPDV4')
    dataFiles.output1.setLiDARDriverOption('SCALING_BUT_NO_DATA_WARNING',
                                           False)

    lidarprocessor.doProcessing(transFunc,
                                dataFiles,
                                controls=controls,
                                otherArgs=otherArgs)
예제 #23
0
def translate(info,
              infile,
              outfile,
              colTypes,
              pulseCols=None,
              expectRange=None,
              scaling=None,
              classificationTranslation=None,
              nullVals=None,
              constCols=None):
    """
    Main function which does the work.

    * Info is a fileinfo object for the input file.
    * infile and outfile are paths to the input and output files respectively.
    * expectRange is a list of tuples with (type, varname, min, max).
    * scaling is a list of tuples with (type, varname, gain, offset).
    * colTypes is a list of name and data type tuples for every column
    * pulseCols is a list of strings defining the pulse columns
    * classificationTranslation is a list of tuples specifying how to translate
        between the codes within the files and the 
        lidarprocessor.CLASSIFICATION_* ones. First element of tuple is file 
        number, second the lidarprocessor code.
    * nullVals is a list of tuples with (type, varname, value)
    * constCols is a list of tupes with (type, varname, dtype, value)
    """
    scalingsDict = translatecommon.overRideDefaultScalings(scaling)

    # set up the variables
    dataFiles = lidarprocessor.DataFiles()
    dataFiles.input1 = lidarprocessor.LidarFile(infile, lidarprocessor.READ)

    # convert from strings to numpy dtypes
    numpyColTypes = []
    for name, typeString in colTypes:
        numpydtype = translatecommon.STRING_TO_DTYPE[typeString.upper()]
        numpyColTypes.append((name, numpydtype))

    dataFiles.input1.setLiDARDriverOption('COL_TYPES', numpyColTypes)
    if pulseCols is not None:
        dataFiles.input1.setLiDARDriverOption('PULSE_COLS', pulseCols)

    if classificationTranslation is not None:
        dataFiles.input1.setLiDARDriverOption('CLASSIFICATION_CODES',
                                              classificationTranslation)

    controls = lidarprocessor.Controls()
    progress = cuiprogress.GDALProgressBar()
    controls.setProgress(progress)
    controls.setSpatialProcessing(False)

    otherArgs = lidarprocessor.OtherArgs()
    otherArgs.scaling = scalingsDict
    otherArgs.expectRange = expectRange
    otherArgs.nullVals = nullVals
    otherArgs.constCols = constCols

    dataFiles.output1 = lidarprocessor.LidarFile(outfile,
                                                 lidarprocessor.CREATE)
    dataFiles.output1.setLiDARDriver('SPDV4')
    dataFiles.output1.setLiDARDriverOption('SCALING_BUT_NO_DATA_WARNING',
                                           False)

    lidarprocessor.doProcessing(transFunc,
                                dataFiles,
                                controls=controls,
                                otherArgs=otherArgs)
예제 #24
0
def translate(info,
              infile,
              outfile,
              expectRange=None,
              spatial=False,
              extent=None,
              scaling=None,
              nullVals=None,
              constCols=None):
    """
    Main function which does the work.

    * Info is a fileinfo object for the input file.
    * infile and outfile are paths to the input and output files respectively.
    * expectRange is a list of tuples with (type, varname, min, max).
    * spatial is True or False - dictates whether we are processing spatially or not.
        If True then spatial index will be created on the output file on the fly.
    * extent is a tuple of values specifying the extent to work with. 
        xmin ymin xmax ymax
    * scaling is a list of tuples with (type, varname, gain, offset).
    * nullVals is a list of tuples with (type, varname, value)
    * constCols is a list of tupes with (type, varname, dtype, value)
    """
    scalingsDict = translatecommon.overRideDefaultScalings(scaling)

    # first we need to determine if the file is spatial or not
    if spatial and not info.hasSpatialIndex:
        msg = "Spatial processing requested but file does not have spatial index"
        raise generic.LiDARInvalidSetting(msg)

    if extent is not None and not spatial:
        msg = 'Extent can only be set when processing spatially'
        raise generic.LiDARInvalidSetting(msg)

    dataFiles = lidarprocessor.DataFiles()

    dataFiles.input1 = lidarprocessor.LidarFile(infile, lidarprocessor.READ)
    dataFiles.output1 = lidarprocessor.LidarFile(outfile,
                                                 lidarprocessor.CREATE)
    dataFiles.output1.setLiDARDriver('SPDV4')
    dataFiles.output1.setLiDARDriverOption('SCALING_BUT_NO_DATA_WARNING',
                                           False)

    controls = lidarprocessor.Controls()
    progress = cuiprogress.GDALProgressBar()
    controls.setProgress(progress)
    controls.setSpatialProcessing(spatial)

    if extent is not None:
        extent = [float(x) for x in extent]
        binSize = info.header['BIN_SIZE']
        pixgrid = pixelgrid.PixelGridDefn(xMin=extent[0],
                                          yMin=extent[1],
                                          xMax=extent[2],
                                          yMax=extent[3],
                                          xRes=binSize,
                                          yRes=binSize)
        controls.setReferencePixgrid(pixgrid)
        controls.setFootprint(lidarprocessor.BOUNDS_FROM_REFERENCE)

    otherArgs = lidarprocessor.OtherArgs()
    otherArgs.scaling = scalingsDict
    otherArgs.expectRange = expectRange
    otherArgs.nullVals = nullVals
    otherArgs.constCols = constCols

    lidarprocessor.doProcessing(transFunc,
                                dataFiles,
                                controls=controls,
                                otherArgs=otherArgs)