def calculateCv(self, las): x, y = self.tileCentre(parseHeader(las)) # calculate plot centre pid = multiprocessing.current_process()._identity[0] tempDirectoryName = 'lidar.processing.' + str(np.random.mtrand.RandomState(pid).randint(0, 9999999)) + '.tmp' tempDirectory = os.path.join(tempfile.gettempdir(), tempDirectoryName) las = lasIO(las, tmpDir=tempDirectory, keepTemp=False).all().asArray() self.plot_dictionary[x, y] = las['z'].std() / las['z'].mean()
def readLAS(self, lasF): # read las file and send to different processes pid = multiprocessing.current_process()._identity[0] tempDirectoryName = 'lidar.processing.' + str( np.random.mtrand.RandomState(pid).randint(0, 9999999)) + '.tmp' tempDirectory = os.path.join(tempfile.gettempdir(), tempDirectoryName) las = lasIO(lasF, tmpDir=tempDirectory) X, Y = self.tileCentre(las) if 'point_only' in self.metrics.keys(): self.metrics['point_only'][X, Y] = 1 else: las = las.all().asArray() if 'height' in self.metrics.keys(): self.metrics['height'][X, Y] = np.percentile(las['z'], 95) if len( set(self.metrics.keys()).intersection([ 'layers', 'canopyDensity', 'underDensity', 'pgap', 'baseHeight' ])) > 0: self.calculateStructure(las, X, Y, lasF)
def __init__(self, las, ppsm): if isinstance(las, str): las = lasIO(las).all().asArray() self.ppsm = ppsm self.las = las self.xy = 1. / np.sqrt(ppsm) self.xmin = np.round(self.las['x'].min()) self.xmax = np.round(self.las['x'].max()) self.ymin = np.round(self.las['y'].min()) self.ymax = np.round(self.las['y'].max())
def fromLAS(self, las, threshold=2.0, top_threshold=99, z_scale=False): self.threshold = threshold if isinstance(las, str): lasRAW = lasIO(las, tmpDir=self.tempDir, keepTemp=False).all().asArray() # reads las file if lasRAW['rtn_tot'].max() == 0: lasRAW['rtn_tot'][:] = 1 self.las = lasRAW[ lasRAW['rtn_tot'] > 0] # inds lines where rtn_tot == 0 and removes them self.z_scale = parseHeader(las)['zscale'] elif isinstance(las, np.ndarray): self.las = las if not z_scale: self.z_scale = .01 else: raise Exception( 'input needs to path to LAS file or a LAS file array') self.z = self.las['z'] self.zw = self.las['rtn_tot'] self.lenLAS = len(self.las) rtn_weight = np.around(1. / self.las['rtn_tot'], decimals=2) self.total = np.sum(rtn_weight) # sum of weighted returns # removes ground and outliers idx = [(self.z > threshold) & (self.z < np.percentile(self.z, top_threshold))] self.z = self.z[idx] # ...from z self.zw = rtn_weight[idx] # ... and from weighted heights self._create_bins() return self
processingList = self.l[maxProcess * listI:(maxProcess * listI) + maxProcess] else: processingList = self.l[maxProcess * listI:] for j, las in enumerate( processingList): # limits number of images run at once p = multiprocessing.Process(target=self.chp, args=(las, )) jobs.append(p) p.start() for proc in jobs: proc.join() listI += 1 self.bsCHPmutiple = dict(self.chp_dictionary) if __name__ == '__main__': path = '/Users/phil/ALS/WC/spl/tile_20/WC1_5m_TILES/383475.0_5828910.0.las' las = lasIO(path).all().asArray() # plot = 'PE2744N2556' # las_path = os.path.join(path, plot + '.las') # las = CanopyComplexity().fromSample(las['z'], las['rtn_tot']).CHP('model') las = CanopyComplexity().fromLAS(las).CHP('model') chp = CanopyComplexity().fromLAS(las.simulateCloud()).CHP() print chp.zw