def testWrite(infile, outfile, binSize): outNull = 0 dataFiles = lidarprocessor.DataFiles() dataFiles.input1 = lidarprocessor.LidarFile(infile, lidarprocessor.READ) dataFiles.imageOut1 = lidarprocessor.ImageFile(outfile, lidarprocessor.CREATE) dataFiles.imageOut1.setRasterIgnore(outNull) controls = lidarprocessor.Controls() #controls.setOverlap(5) #controls.setWindowSize(30) #controls.setReferenceResolution(binSize) controls.setWindowSize(16) progress = cuiprogress.GDALProgressBar() controls.setProgress(progress) otherargs = lidarprocessor.OtherArgs() otherargs.ignore = outNull otherargs.interp = "pynn" otherargs.minVal = None otherargs.outNull = outNull lidarprocessor.doProcessing(writeImageFunc, dataFiles, otherArgs=otherargs, controls=controls)
def calculateCheckSum(infile, windowSize=None): """ Returns a Checksum instance for the given file """ print('Calculating LiDAR Checksum...') dataFiles = lidarprocessor.DataFiles() dataFiles.input = lidarprocessor.LidarFile(infile, lidarprocessor.READ) otherArgs = lidarprocessor.OtherArgs() otherArgs.checksum = Checksum() controls = lidarprocessor.Controls() progress = cuiprogress.GDALProgressBar() controls.setProgress(progress) controls.setMessageHandler(lidarprocessor.silentMessageFn) if windowSize is not None: controls.setWindowSize(windowSize) lidarprocessor.doProcessing(pylidarChecksum, dataFiles, otherArgs=otherArgs, controls=controls) # as a last step, calculate the digests otherArgs.checksum.convertToDigests() return otherArgs.checksum
def getWavePacketDescriptions(fname): """ When writing a LAS file, it is necessary to write information to a table in the header that really belongs to the waveforms. This function reads the waveform info from the input file (in any format) and gathers the unique information from it so it can be passed to as the WAVEFORM_DESCR LAS driver option. Note: LAS only supports received waveforms. """ from pylidar import lidarprocessor dataFiles = lidarprocessor.DataFiles() dataFiles.input = lidarprocessor.LidarFile(fname, lidarprocessor.READ) controls = lidarprocessor.Controls() controls.setSpatialProcessing(False) otherArgs = lidarprocessor.OtherArgs() otherArgs.uniqueInfo = None lidarprocessor.doProcessing(gatherWavePackets, dataFiles, otherArgs, controls=controls) return otherArgs.uniqueInfo
def run(oldpath, newpath): """ Runs the 15th basic test suite. Tests: creating a raster using the 'new' non-spatial mode """ inputSPD = os.path.join(oldpath, INPUT_SPD) outputDEM = os.path.join(newpath, OUTPUT_DEM) info = generic.getLidarFileInfo(inputSPD) header = info.header dataFiles = lidarprocessor.DataFiles() dataFiles.input1 = lidarprocessor.LidarFile(inputSPD, lidarprocessor.READ) xMin, yMax, ncols, nrows = spatial.getGridInfoFromHeader(header, BINSIZE) outImage = numpy.zeros((nrows, ncols)) otherArgs = lidarprocessor.OtherArgs() otherArgs.outImage = outImage otherArgs.xMin = xMin otherArgs.yMax = yMax controls = lidarprocessor.Controls() controls.setSpatialProcessing(False) lidarprocessor.doProcessing(processChunk, dataFiles, otherArgs=otherArgs, controls=controls) iw = spatial.ImageWriter(outputDEM, tlx=xMin, tly=yMax, binSize=BINSIZE) iw.setLayer(outImage) iw.close() utils.compareImageFiles(os.path.join(oldpath, OUTPUT_DEM), outputDEM)
def run(oldpath, newpath): """ Runs the 17th basic test suite. Tests: update a spd file using an image using the non spatial mode """ inputSPD = os.path.join(oldpath, INPUT_SPD) updateSPD = os.path.join(newpath, UPDATE_SPD) shutil.copyfile(inputSPD, updateSPD) inputDEM = os.path.join(oldpath, INPUT_DEM) dataFiles = lidarprocessor.DataFiles() dataFiles.input1 = lidarprocessor.LidarFile(updateSPD, lidarprocessor.UPDATE) otherArgs = lidarprocessor.OtherArgs() (otherArgs.inImage, otherArgs.xMin, otherArgs.yMax, otherArgs.binSize) = spatial.readImageLayer(inputDEM) controls = lidarprocessor.Controls() controls.setSpatialProcessing(False) lidarprocessor.doProcessing(processChunk, dataFiles, otherArgs=otherArgs, controls=controls) origUpdate = os.path.join(oldpath, UPDATE_SPD) utils.compareLiDARFiles(origUpdate, updateSPD)
def translate(info, infile, outfile, expectRange=None, scalings=None, nullVals=None, constCols=None, epsg=None, wkt=None): """ Main function which does the work. * Info is a fileinfo object for the input file. * infile and outfile are paths to the input and output files respectively. * expectRange is a list of tuples with (type, varname, min, max). * scaling is a list of tuples with (type, varname, gain, offset). * nullVals is a list of tuples with (type, varname, value) * constCols is a list of tupes with (type, varname, dtype, value) """ scalingsDict = translatecommon.overRideDefaultScalings(scalings) # set up the variables dataFiles = lidarprocessor.DataFiles() dataFiles.input1 = lidarprocessor.LidarFile(infile, lidarprocessor.READ) controls = lidarprocessor.Controls() progress = cuiprogress.GDALProgressBar() controls.setProgress(progress) controls.setSpatialProcessing(False) otherArgs = lidarprocessor.OtherArgs() # and the header so we don't collect it again otherArgs.rieglInfo = info.header # also need the default/overriden scaling otherArgs.scaling = scalingsDict # expected range of the data otherArgs.expectRange = expectRange # null values otherArgs.nullVals = nullVals # constant columns otherArgs.constCols = constCols otherArgs.epsg = epsg otherArgs.wkt = wkt dataFiles.output1 = lidarprocessor.LidarFile(outfile, lidarprocessor.CREATE) dataFiles.output1.setLiDARDriver('SPDV4') dataFiles.output1.setLiDARDriverOption('SCALING_BUT_NO_DATA_WARNING', False) lidarprocessor.doProcessing(transFunc, dataFiles, controls=controls, otherArgs=otherArgs)
def readLidarPoints(filename, classification=None, boundingbox=None, colNames=['X', 'Y', 'Z']): """ Read the requested columns for the points in the given file (or files if filename is a list), in a memory-efficient manner. Uses pylidar to read only a block of points at a time, and select out just the desired columns. When the input file is a .las file, this saves quite a lot of memory, in comparison to reading in all points at once, since all columns for all points have to be read in at the same time. Optionally filter by CLASSIFICATION column with a value from the generic.CLASSIFICATION_* constants. If boundingbox is given, it is a tuple of (xmin, xmax, ymin, ymax) and only points within this box are included. Return a single recarray with only the selected columns, and only the selected points. """ datafiles = lidarprocessor.DataFiles() # could be a list datafiles.infile = lidarprocessor.LidarFile(filename, lidarprocessor.READ) otherargs = lidarprocessor.OtherArgs() otherargs.classification = classification otherargs.colNames = colNames otherargs.dataArrList = [] otherargs.boundingbox = boundingbox controls = lidarprocessor.Controls() controls.setSpatialProcessing(False) lidarprocessor.doProcessing(selectColumns, datafiles, otherArgs=otherargs, controls=controls) # Put all the separate rec-arrays together nPts = sum([len(a) for a in otherargs.dataArrList]) if nPts > 0: fullArr = numpy.zeros(nPts, dtype=otherargs.dataArrList[0].dtype) i = 0 for dataArr in otherargs.dataArrList: numPts = len(dataArr) fullArr[i:i + numPts] = dataArr i += len(dataArr) else: fullArr = numpy.array([]) return fullArr
def run(): """ Main function. Checks the command line parameters and calls the canopymetrics routine. """ cmdargs = getCmdargs() otherargs = lidarprocessor.OtherArgs() if cmdargs.metric == "PAVD_CALDERS2014": otherargs.weighted = cmdargs.weighted otherargs.heightcol = cmdargs.heightcol otherargs.heightbinsize = cmdargs.heightbinsize otherargs.minheight = cmdargs.minheight otherargs.maxheight = cmdargs.maxheight otherargs.zenithbinsize = cmdargs.zenithbinsize otherargs.minzenith = cmdargs.minzenith otherargs.maxzenith = cmdargs.maxzenith otherargs.minazimuth = cmdargs.minazimuth otherargs.maxazimuth = cmdargs.maxazimuth otherargs.planecorrection = cmdargs.planecorrection otherargs.rptfile = cmdargs.reportfile otherargs.gridsize = cmdargs.gridsize otherargs.gridbinsize = cmdargs.gridbinsize otherargs.excludedclasses = cmdargs.excludedclasses otherargs.externaltransformfn = cmdargs.externaltransformfn otherargs.totalpaimethod = cmdargs.totalpaimethod otherargs.totalpai = cmdargs.totalpai otherargs.externaldem = cmdargs.externaldem elif cmdargs.metric == "VOXEL_HANCOCK2016": otherargs.voxelsize = numpy.repeat(cmdargs.voxelsize, 3) otherargs.bounds = numpy.array(cmdargs.bounds, dtype=numpy.float32) otherargs.rasterdriver = cmdargs.rasterdriver otherargs.externaltransformfn = cmdargs.externaltransformfn otherargs.externaldem = cmdargs.externaldem elif cmdargs.metric == "PGAP_ARMSTON2013": pass else: msg = 'Unsupported metric %s' % cmdargs.metric raise CanopyMetricError(msg) canopymetric.runCanopyMetric(cmdargs.infiles, cmdargs.output, cmdargs.metric, otherargs)
def translate(info, infile, outfile, expectRange=None, scaling=None, nullVals=None, constCols=None): """ Main function which does the work. * Info is a fileinfo object for the input file. * infile and outfile are paths to the input and output files respectively. * expectRange is a list of tuples with (type, varname, min, max). * scaling is a list of tuples with (type, varname, dtype, gain, offset). * nullVals is a list of tuples with (type, varname, value) * constCols is a list of tupes with (type, varname, dtype, value) """ scalingsDict = translatecommon.overRideDefaultScalings(scaling) # set up the variables dataFiles = lidarprocessor.DataFiles() dataFiles.input1 = lidarprocessor.LidarFile(infile, lidarprocessor.READ) controls = lidarprocessor.Controls() progress = cuiprogress.GDALProgressBar() controls.setProgress(progress) otherArgs = lidarprocessor.OtherArgs() otherArgs.scaling = scalingsDict otherArgs.expectRange = expectRange otherArgs.nullVals = nullVals otherArgs.constCols = constCols dataFiles.output1 = lidarprocessor.LidarFile(outfile, lidarprocessor.CREATE) dataFiles.output1.setLiDARDriver('SPDV4') dataFiles.output1.setLiDARDriverOption('SCALING_BUT_NO_DATA_WARNING', False) lidarprocessor.doProcessing(transFunc, dataFiles, controls=controls, otherArgs=otherArgs)
def main(inFile, shpfile, outFile): """ Main function """ # set up and input and output dataFiles = lidarprocessor.DataFiles() dataFiles.input = lidarprocessor.LidarFile(inFile, lidarprocessor.READ) dataFiles.input.setLiDARDriverOption('BUILD_PULSES', False) dataFiles.output = lidarprocessor.LidarFile(outFile, lidarprocessor.CREATE) dataFiles.output.setLiDARDriver('LAS') # open the OGR layer so we pass it in otherArgs = lidarprocessor.OtherArgs() ogrds = ogr.Open(shpfile) otherArgs.layer = ogrds.GetLayer(0) # run the processor lidarprocessor.doProcessing(chop, dataFiles, otherArgs=otherArgs)
def run(oldpath, newpath): """ Runs the 23nd basic test suite. Tests: toolbox.interpolation.interpPoints """ inputSPD = os.path.join(oldpath, INPUT_SPD) outputDAT = os.path.join(newpath, OUTPUT_DATA) dataFiles = lidarprocessor.DataFiles() dataFiles.input1 = lidarprocessor.LidarFile(inputSPD, lidarprocessor.READ) otherArgs = lidarprocessor.OtherArgs() otherArgs.output = None lidarprocessor.doProcessing(processChunk, dataFiles, otherArgs=otherArgs) numpy.save(outputDAT, otherArgs.output) utils.compareNumpyFiles(os.path.join(oldpath, OUTPUT_DATA), outputDAT)
def run(oldpath, newpath): """ Runs the 9th basic test suite. Tests: Creating a canopy profile """ otherargs = lidarprocessor.OtherArgs() otherargs.weighted = False otherargs.heightcol = 'Z' otherargs.heightbinsize = 0.5 otherargs.minheight = 0.0 otherargs.maxheight = 50.0 otherargs.zenithbinsize = 5.0 otherargs.minazimuth = [0.0] otherargs.maxazimuth = [360.0] otherargs.minzenith = [35.0] otherargs.maxzenith = [70.0] otherargs.planecorrection = False otherargs.rptfile = None otherargs.gridsize = 20 otherargs.gridbinsize = 5.0 otherargs.excludedclasses = [] otherargs.externaldem = None otherargs.totalpaimethod = "HINGE" inFile = os.path.join(oldpath, IMPORTED_SPD) outFile = os.path.join(newpath, CANOPY_CSV) runCanopyMetric([inFile], [outFile], "PAVD_CALDERS2014", otherargs) newData = numpy.genfromtxt(outFile, delimiter=',', names=True) oldData = numpy.genfromtxt(os.path.join(oldpath, CANOPY_CSV), delimiter=',', names=True) if not (newData == oldData).all(): msg = 'New canopy profile does not match old' raise utils.TestingDataMismatch(msg)
def translate(info, infile, outfile, colTypes, pulseCols=None, expectRange=None, scaling=None, classificationTranslation=None, nullVals=None, constCols=None): """ Main function which does the work. * Info is a fileinfo object for the input file. * infile and outfile are paths to the input and output files respectively. * expectRange is a list of tuples with (type, varname, min, max). * scaling is a list of tuples with (type, varname, gain, offset). * colTypes is a list of name and data type tuples for every column * pulseCols is a list of strings defining the pulse columns * classificationTranslation is a list of tuples specifying how to translate between the codes within the files and the lidarprocessor.CLASSIFICATION_* ones. First element of tuple is file number, second the lidarprocessor code. * nullVals is a list of tuples with (type, varname, value) * constCols is a list of tupes with (type, varname, dtype, value) """ scalingsDict = translatecommon.overRideDefaultScalings(scaling) # set up the variables dataFiles = lidarprocessor.DataFiles() dataFiles.input1 = lidarprocessor.LidarFile(infile, lidarprocessor.READ) # convert from strings to numpy dtypes numpyColTypes = [] for name, typeString in colTypes: numpydtype = translatecommon.STRING_TO_DTYPE[typeString.upper()] numpyColTypes.append((name, numpydtype)) dataFiles.input1.setLiDARDriverOption('COL_TYPES', numpyColTypes) if pulseCols is not None: dataFiles.input1.setLiDARDriverOption('PULSE_COLS', pulseCols) if classificationTranslation is not None: dataFiles.input1.setLiDARDriverOption('CLASSIFICATION_CODES', classificationTranslation) controls = lidarprocessor.Controls() progress = cuiprogress.GDALProgressBar() controls.setProgress(progress) controls.setSpatialProcessing(False) otherArgs = lidarprocessor.OtherArgs() otherArgs.scaling = scalingsDict otherArgs.expectRange = expectRange otherArgs.nullVals = nullVals otherArgs.constCols = constCols dataFiles.output1 = lidarprocessor.LidarFile(outfile, lidarprocessor.CREATE) dataFiles.output1.setLiDARDriver('SPDV4') dataFiles.output1.setLiDARDriverOption('SCALING_BUT_NO_DATA_WARNING', False) lidarprocessor.doProcessing(transFunc, dataFiles, controls=controls, otherArgs=otherArgs)
def rasterize(infiles, outfile, attributes, function=DEFAULT_FUNCTION, atype=DEFAULT_ATTRIBUTE, background=0, binSize=None, extraModule=None, quiet=False, footprint=None, windowSize=None, driverName=None, driverOptions=None): """ Apply the given function to the list of input files and create an output raster file. attributes is a list of attributes to run the function on. The function name must contain a module name and the specified function must take a masked array, plus the 'axis' parameter. atype should be a string containing either POINT|PULSE. background is the background raster value to use. binSize is the bin size to use which defaults to that of the spatial indices used. extraModule should be a string with an extra module to import - use this for modules other than numpy that are needed by your function. quiet means no progress etc footprint specifies the footprint type """ dataFiles = lidarprocessor.DataFiles() dataFiles.inList = [ lidarprocessor.LidarFile(fname, lidarprocessor.READ) for fname in infiles ] dataFiles.imageOut = lidarprocessor.ImageFile(outfile, lidarprocessor.CREATE) dataFiles.imageOut.setRasterIgnore(background) if driverName is not None: dataFiles.imageOut.setRasterDriver(driverName) if driverOptions is not None: dataFiles.imageOut.setRasterDriverOptions(driverOptions) # import any other modules required globalsDict = globals() if extraModule is not None: globalsDict[extraModule] = importlib.import_module(extraModule) controls = lidarprocessor.Controls() controls.setSpatialProcessing(True) if not quiet: progress = cuiprogress.GDALProgressBar() controls.setProgress(progress) if binSize is not None: controls.setReferenceResolution(binSize) if footprint is not None: controls.setFootprint(footprint) if windowSize is not None: controls.setWindowSize(windowSize) otherArgs = lidarprocessor.OtherArgs() # reference to the function to call otherArgs.func = eval(function, globalsDict) otherArgs.attributes = attributes otherArgs.background = background atype = atype.upper() if atype == 'POINT': otherArgs.atype = POINT elif atype == 'PULSE': otherArgs.atype = PULSE else: msg = 'Unsupported type %s' % atype raise RasterizationError(msg) lidarprocessor.doProcessing(writeImageFunc, dataFiles, controls=controls, otherArgs=otherArgs)
def translate(info, infile, outfile, expectRange=None, spatial=None, extent=None, scaling=None, epsg=None, binSize=None, buildPulses=False, pulseIndex=None, nullVals=None, constCols=None, useLASScaling=False): """ Main function which does the work. * Info is a fileinfo object for the input file. * infile and outfile are paths to the input and output files respectively. * expectRange is a list of tuples with (type, varname, min, max). * spatial is True or False - dictates whether we are processing spatially or not. If True then spatial index will be created on the output file on the fly. * extent is a tuple of values specifying the extent to work with. xmin ymin xmax ymax * scaling is a list of tuples with (type, varname, dtype, gain, offset). * if epsg is not None should be a EPSG number to use as the coord system * binSize is the used by the LAS spatial index * buildPulses dictates whether to attempt to build the pulse structure * pulseIndex should be 'FIRST_RETURN' or 'LAST_RETURN' and determines how the pulses are indexed. * nullVals is a list of tuples with (type, varname, value) * constCols is a list of tupes with (type, varname, dtype, value) * if useLASScaling is True, then the scaling used in the LAS file is used for columns. Overrides anything given in 'scaling' """ scalingsDict = translatecommon.overRideDefaultScalings(scaling) if epsg is None and (info.wkt is None or len(info.wkt) == 0): msg = 'No projection set in las file. Must set EPSG on command line' raise generic.LiDARInvalidSetting(msg) if spatial and not info.hasSpatialIndex: msg = 'Spatial processing requested but file does not have spatial index' raise generic.LiDARInvalidSetting(msg) if spatial and binSize is None: msg = "For spatial processing, the bin size must be set" raise generic.LiDARInvalidSetting(msg) if extent is not None and not spatial: msg = 'Extent can only be set when processing spatially' raise generic.LiDARInvalidSetting(msg) # set up the variables dataFiles = lidarprocessor.DataFiles() dataFiles.input1 = lidarprocessor.LidarFile(infile, lidarprocessor.READ) if pulseIndex == 'FIRST_RETURN': dataFiles.input1.setLiDARDriverOption('PULSE_INDEX', las.FIRST_RETURN) elif pulseIndex == 'LAST_RETURN': dataFiles.input1.setLiDARDriverOption('PULSE_INDEX', las.LAST_RETURN) else: msg = "Pulse index argument not recognised." raise generic.LiDARInvalidSetting(msg) dataFiles.input1.setLiDARDriverOption('BUILD_PULSES', buildPulses) if spatial: dataFiles.input1.setLiDARDriverOption('BIN_SIZE', float(binSize)) controls = lidarprocessor.Controls() progress = cuiprogress.GDALProgressBar() controls.setProgress(progress) controls.setSpatialProcessing(spatial) otherArgs = lidarprocessor.OtherArgs() otherArgs.scaling = scalingsDict otherArgs.epsg = epsg otherArgs.expectRange = expectRange otherArgs.lasInfo = info otherArgs.nullVals = nullVals otherArgs.constCols = constCols otherArgs.useLASScaling = useLASScaling if extent is not None: extent = [float(x) for x in extent] pixgrid = pixelgrid.PixelGridDefn(xMin=extent[0], yMin=extent[1], xMax=extent[2], yMax=extent[3], xRes=binSize, yRes=binSize) controls.setReferencePixgrid(pixgrid) controls.setFootprint(lidarprocessor.BOUNDS_FROM_REFERENCE) dataFiles.output1 = lidarprocessor.LidarFile(outfile, lidarprocessor.CREATE) dataFiles.output1.setLiDARDriver('SPDV4') dataFiles.output1.setLiDARDriverOption('SCALING_BUT_NO_DATA_WARNING', False) lidarprocessor.doProcessing(transFunc, dataFiles, controls=controls, otherArgs=otherArgs)
def translate(info, infile, outfile, expectRange=None, scalings=None, internalrotation=False, magneticdeclination=0.0, externalrotationfn=None, nullVals=None, constCols=None, epsg=None, wkt=None): """ Main function which does the work. * Info is a fileinfo object for the input file. * infile and outfile are paths to the input and output files respectively. * expectRange is a list of tuples with (type, varname, min, max). * scaling is a list of tuples with (type, varname, gain, offset). * if internalrotation is True then the internal rotation will be applied to data. Overrides externalrotationfn * if externalrotationfn is not None then then the external rotation matrix will be read from this file and applied to the data * magneticdeclination. If not 0, then this will be applied to the data * nullVals is a list of tuples with (type, varname, value) * constCols is a list of tupes with (type, varname, dtype, value) """ scalingsDict = translatecommon.overRideDefaultScalings(scalings) # set up the variables dataFiles = lidarprocessor.DataFiles() dataFiles.input1 = lidarprocessor.LidarFile(infile, lidarprocessor.READ) # first set the rotation matrix if asked for if internalrotation and externalrotationfn: msg = "Can't use both internal and external rotation" raise generic.LiDARInvalidSetting(msg) rotationMatrix = None if internalrotation: if "ROTATION_MATRIX" in info.header: dataFiles.input1.setLiDARDriverOption( "ROTATION_MATRIX", info.header["ROTATION_MATRIX"]) rotationMatrix = info.header["ROTATION_MATRIX"] else: msg = "Internal Rotation requested but no information found in input file" raise generic.LiDARInvalidSetting(msg) elif externalrotationfn is not None: externalrotation = numpy.loadtxt(externalrotationfn, ndmin=2, delimiter=" ", dtype=numpy.float32) dataFiles.input1.setLiDARDriverOption("ROTATION_MATRIX", externalrotation) rotationMatrix = externalrotation # set the magnetic declination if not 0 (the default) if magneticdeclination != 0: dataFiles.input1.setLiDARDriverOption("MAGNETIC_DECLINATION", magneticdeclination) controls = lidarprocessor.Controls() progress = cuiprogress.GDALProgressBar() controls.setProgress(progress) controls.setSpatialProcessing(False) otherArgs = lidarprocessor.OtherArgs() # and the header so we don't collect it again otherArgs.rieglInfo = info.header # also need the default/overriden scaling otherArgs.scaling = scalingsDict # Add the rotation matrix to otherArgs # for updating the header otherArgs.rotationMatrix = rotationMatrix # expected range of the data otherArgs.expectRange = expectRange # null values otherArgs.nullVals = nullVals # constant columns otherArgs.constCols = constCols otherArgs.epsg = epsg otherArgs.wkt = wkt dataFiles.output1 = lidarprocessor.LidarFile(outfile, lidarprocessor.CREATE) dataFiles.output1.setLiDARDriver('SPDV4') dataFiles.output1.setLiDARDriverOption('SCALING_BUT_NO_DATA_WARNING', False) lidarprocessor.doProcessing(transFunc, dataFiles, controls=controls, otherArgs=otherArgs)
def splitFileIntoTiles(infiles, binSize=1.0, blockSize=None, tempDir='.', extent=None, indexType=INDEX_CARTESIAN, pulseIndexMethod=PULSE_INDEX_FIRST_RETURN, footprint=lidarprocessor.UNION, outputFormat='SPDV4', buildPulses=False): """ Takes a filename (or list of filenames) and creates a tempfile for every block (using blockSize). If blockSize isn't set then it is picked using BLOCKSIZE_N_BLOCKS. binSize is the size of the bins to create the spatial index. indexType is one of the INDEX_* constants. pulseIndexMethod is one of the PULSE_INDEX_* constants. footprint is one of lidarprocessor.UNION or lidarprocessor.INTERSECTION and is how to combine extents if there is more than one file. outputFormat is either 'SPDV4' or 'LAS'. 'LAS' outputs only supported when input is 'LAS'. buildPulses relevant for 'LAS' and determines whether to build the pulse structure or not. returns the header of the first input file, the extent used and a list of (fname, extent) tuples that contain the information for each tempfile. """ if isinstance(infiles, basestring): infiles = [infiles] # use the first file for header. Not # clear how to combine headers from multiple inputs # or if one should. # leave setting this in case we grab it when working out the extent. firstHeader = None if extent is None: # work out from headers pixGrid = None for infile in infiles: info = generic.getLidarFileInfo(infile) header = info.header if firstHeader is None: firstHeader = header try: if indexType == INDEX_CARTESIAN: xMax = header['X_MAX'] xMin = header['X_MIN'] yMax = header['Y_MAX'] yMin = header['Y_MIN'] elif indexType == INDEX_SPHERICAL: xMax = header['AZIMUTH_MAX'] xMin = header['AZIMUTH_MIN'] yMax = header['ZENITH_MAX'] yMin = header['ZENITH_MIN'] elif indexType == INDEX_SCAN: xMax = header['SCANLINE_IDX_MAX'] xMin = header['SCANLINE_IDX_MIN'] yMax = header['SCANLINE_MAX'] yMin = header['SCANLINE_MIN'] else: msg = 'unsupported indexing method' raise generic.LiDARSpatialIndexNotAvailable(msg) except KeyError: msg = 'info for creating bounding box not available' raise generic.LiDARFunctionUnsupported(msg) newPixGrid = pixelgrid.PixelGridDefn(xMin=xMin, xMax=xMax, yMin=yMin, yMax=yMax, xRes=binSize, yRes=binSize) if pixGrid is None: pixGrid = newPixGrid elif footprint == lidarprocessor.UNION: pixGrid = pixGrid.union(newPixGrid) elif footprint == lidarprocessor.INTERSECTION: pixGrid = pixGrid.intersection(newPixGrid) else: msg = 'Unsupported footprint option' raise generic.LiDARFunctionUnsupported(msg) # TODO: we treat points as being in the block when they are >= # the min coords and < the max coords. What happens on the bottom # and right margins?? We could possibly miss points that are there. # round the coords to the nearest multiple xMin = numpy.floor(pixGrid.xMin / binSize) * binSize yMin = numpy.floor(pixGrid.yMin / binSize) * binSize xMax = numpy.ceil(pixGrid.xMax / binSize) * binSize yMax = numpy.ceil(pixGrid.yMax / binSize) * binSize extent = Extent(xMin, xMax, yMin, yMax, binSize) else: # ensure that our binSize comes from their exent binSize = extent.binSize # get the first header since we aren't doing the above info = generic.getLidarFileInfo(infiles[0]) firstHeader = info.header if blockSize is None: minAxis = min(extent.xMax - extent.xMin, extent.yMax - extent.yMin) blockSize = min(minAxis / BLOCKSIZE_N_BLOCKS, 200.0) # make it a multiple of binSize blockSize = int(numpy.ceil(blockSize / binSize)) * binSize else: # ensure that their given block size can be evenly divided by # the binSize # the modulo operator doesn't work too well with floats # so we take a different approach a = blockSize / binSize if a != int(a): msg = 'blockSize must be evenly divisible be the binSize' raise generic.LiDARInvalidData(msg) extentList = [] subExtent = Extent(extent.xMin, extent.xMin + blockSize, extent.yMax - blockSize, extent.yMax, binSize) controls = lidarprocessor.Controls() controls.setSpatialProcessing(False) tmpSuffix = '.' + outputFormat.lower() bMoreToDo = True while bMoreToDo: fd, fname = tempfile.mkstemp(suffix=tmpSuffix, dir=tempDir) os.close(fd) userClass = lidarprocessor.LidarFile(fname, generic.CREATE) if outputFormat == 'SPDV4': userClass.setLiDARDriverOption('SCALING_BUT_NO_DATA_WARNING', False) driver = spdv4.SPDV4File(fname, generic.CREATE, controls, userClass) elif outputFormat == 'LAS': driver = las.LasFile(fname, generic.CREATE, controls, userClass) else: msg = 'Unsupported output format %s' % outputFormat raise generic.LiDARFunctionUnsupported(msg) data = (copy.copy(subExtent), driver) extentList.append(data) # move it along subExtent.xMin += blockSize subExtent.xMax += blockSize if subExtent.xMin >= extent.xMax: # next line down subExtent.xMin = extent.xMin subExtent.xMax = extent.xMin + blockSize subExtent.yMax -= blockSize subExtent.yMin -= blockSize # done? bMoreToDo = subExtent.yMax > extent.yMin # ok now set up to read the input files using lidarprocessor dataFiles = lidarprocessor.DataFiles() dataFiles.inputs = [] for infile in infiles: input = lidarprocessor.LidarFile(infile, lidarprocessor.READ) # must be a better way of doing this, but this is what # translate does. We don't know what formats we are getting ahead of time info = generic.getLidarFileInfo(infile) inFormat = info.getDriverName() if inFormat == 'LAS': input.setLiDARDriverOption('BUILD_PULSES', buildPulses) dataFiles.inputs.append(input) controls = lidarprocessor.Controls() progress = cuiprogress.GDALProgressBar() progress.setLabelText('Splitting...') controls.setProgress(progress) controls.setSpatialProcessing(False) controls.setMessageHandler(lidarprocessor.silentMessageFn) otherArgs = lidarprocessor.OtherArgs() otherArgs.outList = extentList otherArgs.indexType = indexType otherArgs.pulseIndexMethod = pulseIndexMethod lidarprocessor.doProcessing(classifyFunc, dataFiles, controls=controls, otherArgs=otherArgs) # close all the output files and save their names to return newExtentList = [] for subExtent, driver in extentList: fname = driver.fname driver.close() data = (fname, subExtent) newExtentList.append(data) return firstHeader, extent, newExtentList
def translate(info, infile, outfile, expectRange=None, spatial=False, extent=None, scaling=None, nullVals=None, constCols=None): """ Main function which does the work. * Info is a fileinfo object for the input file. * infile and outfile are paths to the input and output files respectively. * expectRange is a list of tuples with (type, varname, min, max). * spatial is True or False - dictates whether we are processing spatially or not. If True then spatial index will be created on the output file on the fly. * extent is a tuple of values specifying the extent to work with. xmin ymin xmax ymax * scaling is a list of tuples with (type, varname, gain, offset). * nullVals is a list of tuples with (type, varname, value) * constCols is a list of tupes with (type, varname, dtype, value) """ scalingsDict = translatecommon.overRideDefaultScalings(scaling) # first we need to determine if the file is spatial or not if spatial and not info.hasSpatialIndex: msg = "Spatial processing requested but file does not have spatial index" raise generic.LiDARInvalidSetting(msg) if extent is not None and not spatial: msg = 'Extent can only be set when processing spatially' raise generic.LiDARInvalidSetting(msg) dataFiles = lidarprocessor.DataFiles() dataFiles.input1 = lidarprocessor.LidarFile(infile, lidarprocessor.READ) dataFiles.output1 = lidarprocessor.LidarFile(outfile, lidarprocessor.CREATE) dataFiles.output1.setLiDARDriver('SPDV4') dataFiles.output1.setLiDARDriverOption('SCALING_BUT_NO_DATA_WARNING', False) controls = lidarprocessor.Controls() progress = cuiprogress.GDALProgressBar() controls.setProgress(progress) controls.setSpatialProcessing(spatial) if extent is not None: extent = [float(x) for x in extent] binSize = info.header['BIN_SIZE'] pixgrid = pixelgrid.PixelGridDefn(xMin=extent[0], yMin=extent[1], xMax=extent[2], yMax=extent[3], xRes=binSize, yRes=binSize) controls.setReferencePixgrid(pixgrid) controls.setFootprint(lidarprocessor.BOUNDS_FROM_REFERENCE) otherArgs = lidarprocessor.OtherArgs() otherArgs.scaling = scalingsDict otherArgs.expectRange = expectRange otherArgs.nullVals = nullVals otherArgs.constCols = constCols lidarprocessor.doProcessing(transFunc, dataFiles, controls=controls, otherArgs=otherArgs)