def joinNode(node, nodeAbsPathA, nodeAbsPathB, nodeAbsPathO, hierarchyStepSize, extension, cmcommand): hrcFile = node + '.hrc' hrcA = None if os.path.isfile(nodeAbsPathA + '/' + hrcFile): # Check if there is data in this node in Octtree A (we check if the HRC file for this node exist) hrcA = utils.readHRC(nodeAbsPathA + '/' + hrcFile, hierarchyStepSize) if len(os.listdir(nodeAbsPathA)) == 2: hrcA[0][0] = utils.getPCFileDetails(nodeAbsPathA + '/' + node + extension)[0] hrcB = None if os.path.isfile(nodeAbsPathB + '/' + hrcFile): # Check if there is data in this node in Octtree B (we check if the HRC file for this node exist) hrcB = utils.readHRC(nodeAbsPathB + '/' + hrcFile, hierarchyStepSize) if len(os.listdir(nodeAbsPathB)) == 2: hrcB[0][0] = utils.getPCFileDetails(nodeAbsPathB + '/' + node + extension)[0] if hrcA != None and hrcB != None: utils.shellExecute('mkdir -p ' + nodeAbsPathO) # If both Octtrees A and B have data in this node we have to merge them hrcO = utils.initHRC(hierarchyStepSize) for level in range(hierarchyStepSize+2): numChildrenA = len(hrcA[level]) numChildrenB = len(hrcB[level]) numChildrenO = max((numChildrenA, numChildrenB)) if level < (hierarchyStepSize+1): for i in range(numChildrenO): hasNodeA = (i < numChildrenA) and (hrcA[level][i] > 0) hasNodeB = (i < numChildrenB) and (hrcB[level][i] > 0) (childNode, isFile) = utils.getNodeName(level, i, node, hierarchyStepSize, extension) if hasNodeA and hasNodeB: hrcO[level].append(hrcA[level][i] + hrcB[level][i]) #merge lAZ or folder (iteratively) if isFile: utils.shellExecute('lasmerge -i ' + nodeAbsPathA + '/' + childNode + ' ' + nodeAbsPathB + '/' + childNode + ' -o ' + nodeAbsPathO + '/' + childNode) #We now need to set the header of the output file as the input files (lasmerge will have shrink it and we do not want that fixHeader(nodeAbsPathA + '/' + childNode, nodeAbsPathO + '/' + childNode) else: joinNode(node + childNode, nodeAbsPathA + '/' + childNode, nodeAbsPathB + '/' + childNode, nodeAbsPathO + '/' + childNode, hierarchyStepSize, extension, cmcommand) elif hasNodeA: #mv / cp hrcO[level].append(hrcA[level][i]) utils.shellExecute(cmcommand + nodeAbsPathA + '/' + childNode + ' ' + nodeAbsPathO + '/' + childNode) elif hasNodeB: #mv / cp hrcO[level].append(hrcB[level][i]) utils.shellExecute(cmcommand + nodeAbsPathB + '/' + childNode + ' ' + nodeAbsPathO + '/' + childNode) else: hrcO[level].append(0) else: hrcO[level] = list(numpy.array(hrcA[level] + ([0]*(numChildrenO - numChildrenA))) + numpy.array(hrcB[level] + ([0]*(numChildrenO - numChildrenB)))) # Write the HRC file utils.writeHRC(nodeAbsPathO + '/' + hrcFile, hierarchyStepSize, hrcO) elif hrcA != None: # Only Octtree A has data in this node. We can directly copy it to the output Octtree utils.shellExecute(cmcommand + nodeAbsPathA + ' ' + nodeAbsPathO) elif hrcB != None: # Only Octtree B has data in this node. We can directly copy it to the output Octtree utils.shellExecute(cmcommand + nodeAbsPathB + ' ' + nodeAbsPathO)
def runProcess(processIndex, tasksQueue, resultsQueue, connectionString, srid, tableName): connection = psycopg2.connect(connectionString) cursor = connection.cursor() kill_received = False while not kill_received: fileAbsPath = None try: # This call will patiently wait until new job is available fileAbsPath = tasksQueue.get() except: # if there is an error we will quit kill_received = True if fileAbsPath == None: # If we receive a None job, it means we can stop kill_received = True else: (count, minX, minY, minZ, maxX, maxY, maxZ, _, _, _, _, _, _) = utils.getPCFileDetails(fileAbsPath) insertStatement = "INSERT INTO " + tableName + "(filepath,numberpoints,minz,maxz,geom) VALUES (%s, %s, %s, %s, ST_MakeEnvelope(%s, %s, %s, %s, %s))" insertArgs = [ fileAbsPath, int(count), float(minZ), float(maxZ), float(minX), float(minY), float(maxX), float(maxY), int(srid) ] cursor.execute(insertStatement, insertArgs) cursor.connection.commit() resultsQueue.put((processIndex, fileAbsPath)) connection.close()
def runPDALSplitter(processIndex, inputFile, outputFolder, tempFolder, minX, minY, maxX, maxY, axisTiles): pTempFolder = tempFolder + '/' + str(processIndex) if not os.path.isdir(pTempFolder): utils.shellExecute('mkdir -p ' + pTempFolder) # Get the lenght required by the PDAL split filter in order to get "squared" tiles lengthPDAL = (maxX - minX) / float(axisTiles) utils.shellExecute('pdal split -i ' + inputFile + ' -o ' + pTempFolder + '/' + os.path.basename(inputFile) + ' --origin_x ' + str(minX) + ' --origin_y ' + str(minY) + ' --length ' + str(lengthPDAL)) tGCount = 0 for gFile in os.listdir(pTempFolder): (gCount, gFileMinX, gFileMinY, _, gFileMaxX, gFileMaxY, _, _, _, _, _, _, _) = utils.getPCFileDetails(pTempFolder + '/' + gFile) # This tile should match with some tile. Let's use the central point to see which one pX = gFileMinX + ((gFileMaxX - gFileMinX) / 2.) pY = gFileMinY + ((gFileMaxY - gFileMinY) / 2.) tileFolder = outputFolder + '/' + getTileName( *getTileIndex(pX, pY, minX, minY, maxX, maxY, axisTiles)) if not os.path.isdir(tileFolder): utils.shellExecute('mkdir -p ' + tileFolder) utils.shellExecute('mv ' + pTempFolder + '/' + gFile + ' ' + tileFolder + '/' + gFile) tGCount += gCount return tGCount
def runProcess(processIndex, tasksQueue, resultsQueue, minX, minY, maxX, maxY, outputFolder, tempFolder, axisTiles): kill_received = False while not kill_received: inputFile = None try: # This call will patiently wait until new job is available inputFile = tasksQueue.get() except: # if there is an error we will quit kill_received = True if inputFile == None: # If we receive a None job, it means we can stop kill_received = True else: # Get number of points and BBOX of this file (fCount, fMinX, fMinY, _, fMaxX, fMaxY, _, _, _, _, _, _, _) = utils.getPCFileDetails(inputFile) print ('Processing', os.path.basename(inputFile), fCount, fMinX, fMinY, fMaxX, fMaxY) # For the four vertices of the BBOX we get in which tile they should go posMinXMinY = getTileIndex(fMinX, fMinY, minX, minY, maxX, maxY, axisTiles) posMinXMaxY = getTileIndex(fMinX, fMaxY, minX, minY, maxX, maxY, axisTiles) posMaxXMinY = getTileIndex(fMaxX, fMinY, minX, minY, maxX, maxY, axisTiles) posMaxXMaxY = getTileIndex(fMaxX, fMaxY, minX, minY, maxX, maxY, axisTiles) if (posMinXMinY == posMinXMaxY) and (posMinXMinY == posMaxXMinY) and (posMinXMinY == posMaxXMaxY): # If they are the same the whole file can be directly copied to the tile tileFolder = outputFolder + '/' + getTileName(*posMinXMinY) if not os.path.isdir(tileFolder): utils.shellExecute('mkdir -p ' + tileFolder) utils.shellExecute('cp ' + inputFile + ' ' + tileFolder) else: # If not, we run PDAL gridder to split the file in pieces that can go to the tiles tGCount = runPDALSplitter(processIndex, inputFile, outputFolder, tempFolder, minX, minY, maxX, maxY, axisTiles) if tGCount != fCount: print ('WARNING: split version of ', inputFile, ' does not have same number of points (', tGCount, 'expected', fCount, ')') resultsQueue.put((processIndex, inputFile, fCount))
def runProcess(processIndex, tasksQueue, resultsQueue, outputFolder, useApprox): kill_received = False while not kill_received: tileAbsPath = None try: # This call will patiently wait until new job is available tileAbsPath = tasksQueue.get() except: # if there is an error we will quit kill_received = True if tileAbsPath == None: # If we receive a None job, it means we can stop kill_received = True else: tFile = open( outputFolder + '/' + os.path.basename(tileAbsPath) + '.wkt', 'w') (tMinX, tMinY, tMaxX, tMaxY) = (None, None, None, None) if os.path.isfile(tileAbsPath): tilefiles = [ tileAbsPath, ] else: tilefiles = glob.glob(tileAbsPath + '/*') for tilefile in tilefiles: (_, fMinX, fMinY, _, fMaxX, fMaxY, _, _, _, _, _, _, _) = utils.getPCFileDetails(tilefile) if useApprox: if tMinX == None or tMinX > fMinX: tMinX = fMinX if tMinY == None or tMinY > fMinY: tMinY = fMinY if tMaxX == None or tMaxX < fMaxX: tMaxX = fMaxX if tMaxY == None or tMaxY < fMaxY: tMaxY = fMaxY else: tFile.write( 'POLYGON ((%f %f, %f %f, %f %f, %f %f, %f %f))\n' % (fMinX, fMaxY, fMinX, fMinY, fMaxX, fMinY, fMaxX, fMaxY, fMinX, fMaxY)) if useApprox and tMinX != None: tFile.write('POLYGON ((%f %f, %f %f, %f %f, %f %f, %f %f))\n' % (tMinX, tMaxY, tMinX, tMinY, tMaxX, tMinY, tMaxX, tMaxY, tMinX, tMaxY)) tFile.close() resultsQueue.put((processIndex, tileAbsPath))
def runProcess(processIndex, tasksQueue, resultsQueue, minX, minY, maxX, maxY, outputFolder, tempFolder, axisTiles): kill_received = False while not kill_received: inputFile = None try: # This call will patiently wait until new job is available inputFile = tasksQueue.get() except: # if there is an error we will quit kill_received = True if inputFile == None: # If we receive a None job, it means we can stop kill_received = True else: # Get number of points and BBOX of this file (fCount, fMinX, fMinY, _, fMaxX, fMaxY, _, _, _, _, _, _, _) = utils.getPCFileDetails(inputFile) print('Processing', os.path.basename(inputFile), fCount, fMinX, fMinY, fMaxX, fMaxY) # For the four vertices of the BBOX we get in which tile they should go posMinXMinY = getTileIndex(fMinX, fMinY, minX, minY, maxX, maxY, axisTiles) posMinXMaxY = getTileIndex(fMinX, fMaxY, minX, minY, maxX, maxY, axisTiles) posMaxXMinY = getTileIndex(fMaxX, fMinY, minX, minY, maxX, maxY, axisTiles) posMaxXMaxY = getTileIndex(fMaxX, fMaxY, minX, minY, maxX, maxY, axisTiles) if (posMinXMinY == posMinXMaxY) and (posMinXMinY == posMaxXMinY) and (posMinXMinY == posMaxXMaxY): # If they are the same the whole file can be directly copied to the tile tileFolder = outputFolder + '/' + getTileName(*posMinXMinY) if not os.path.isdir(tileFolder): utils.shellExecute('mkdir -p ' + tileFolder) utils.shellExecute('cp ' + inputFile + ' ' + tileFolder) else: # If not, we run PDAL gridder to split the file in pieces that can go to the tiles tGCount = runPDALSplitter(processIndex, inputFile, outputFolder, tempFolder, minX, minY, maxX, maxY, axisTiles) if tGCount != fCount: print('WARNING: split version of ', inputFile, ' does not have same number of points (', tGCount, 'expected', fCount, ')') resultsQueue.put((processIndex, inputFile, fCount))
def runProcess(processIndex, tasksQueue, resultsQueue, outputFolder, useApprox): kill_received = False while not kill_received: tileAbsPath = None try: # This call will patiently wait until new job is available tileAbsPath = tasksQueue.get() except: # if there is an error we will quit kill_received = True if tileAbsPath == None: # If we receive a None job, it means we can stop kill_received = True else: tFile = open(outputFolder + '/' + os.path.basename(tileAbsPath) + '.wkt', 'w') (tMinX,tMinY,tMaxX,tMaxY) = (None, None, None, None) if os.path.isfile(tileAbsPath): tilefiles = [tileAbsPath,] else: tilefiles = glob.glob(tileAbsPath + '/*') for tilefile in tilefiles: (_, fMinX, fMinY, _, fMaxX, fMaxY, _, _, _, _, _, _, _) = utils.getPCFileDetails(tilefile) if useApprox: if tMinX == None or tMinX > fMinX: tMinX = fMinX if tMinY == None or tMinY > fMinY: tMinY = fMinY if tMaxX == None or tMaxX < fMaxX: tMaxX = fMaxX if tMaxY == None or tMaxY < fMaxY: tMaxY = fMaxY else: tFile.write('POLYGON ((%f %f, %f %f, %f %f, %f %f, %f %f))\n' % (fMinX, fMaxY, fMinX, fMinY, fMaxX, fMinY, fMaxX, fMaxY, fMinX, fMaxY)) if useApprox and tMinX != None: tFile.write('POLYGON ((%f %f, %f %f, %f %f, %f %f, %f %f))\n' % (tMinX, tMaxY, tMinX, tMinY, tMaxX, tMinY, tMaxX, tMaxY, tMinX, tMaxY)) tFile.close() resultsQueue.put((processIndex, tileAbsPath))
def validateNode(node, nodeAbsPath, hierarchyStepSize, extension): hrcFile = node + '.hrc' hrc = None if not os.path.isfile(nodeAbsPath + '/' + hrcFile): # Check if there is data in this node in Octtree A (we check if the HRC file for this node exist) raise Exception(nodeAbsPath + '/' + hrcFile + ' could not be read') hrc = utils.readHRC(nodeAbsPath + '/' + hrcFile, hierarchyStepSize) for level in range(hierarchyStepSize+1): hrcLevel = hrc[level] for i in range(len(hrcLevel)): hrcNumPoints = hrcLevel[i] if hrcNumPoints: (childNode, isFile) = utils.getNodeName(level, i, node, hierarchyStepSize, extension) childNodeAbsPath = nodeAbsPath + '/' + childNode if not os.path.exists(childNodeAbsPath): print ('Error: could not find ', childNodeAbsPath) raise Exception(node + ' in ' + nodeAbsPath + ' is not correct') if isFile: fNumPoints = utils.getPCFileDetails(childNodeAbsPath)[0] if hrcNumPoints != fNumPoints: print ('Error: number of points in HRC (' + str(hrcNumPoints) + ') != number of points in file (' + str(fNumPoints) + ') in ' + childNodeAbsPath) else: validateNode(node + childNode, childNodeAbsPath, hierarchyStepSize, extension)
def fixHeader(inputFile, outputFile): (_, minX, minY, minZ, maxX, maxY, maxZ, _, _, _, _, _, _) = utils.getPCFileDetails(inputFile) utils.shellExecute('lasinfo -i %s -nc -nv -nco -set_bounding_box %f %f %f %f %f %f' % (outputFile, minX, minY, minZ, maxX, maxY, maxZ))
def fixHeader(inputFile, outputFile): (_, minX, minY, minZ, maxX, maxY, maxZ, _, _, _, _, _, _) = utils.getPCFileDetails(inputFile) utils.shellExecute( 'lasinfo -i %s -nc -nv -nco -set_bounding_box %f %f %f %f %f %f' % (outputFile, minX, minY, minZ, maxX, maxY, maxZ))
def joinNode(node, nodeAbsPathA, nodeAbsPathB, nodeAbsPathO, hierarchyStepSize, extension, cmcommand): hrcFile = node + '.hrc' hrcA = None if os.path.isfile(nodeAbsPathA + '/' + hrcFile): # Check if there is data in this node in Octtree A (we check if the HRC file for this node exist) hrcA = utils.readHRC(nodeAbsPathA + '/' + hrcFile, hierarchyStepSize) if len(os.listdir(nodeAbsPathA)) == 2: hrcA[0][0] = utils.getPCFileDetails(nodeAbsPathA + '/' + node + extension)[0] hrcB = None if os.path.isfile(nodeAbsPathB + '/' + hrcFile): # Check if there is data in this node in Octtree B (we check if the HRC file for this node exist) hrcB = utils.readHRC(nodeAbsPathB + '/' + hrcFile, hierarchyStepSize) if len(os.listdir(nodeAbsPathB)) == 2: hrcB[0][0] = utils.getPCFileDetails(nodeAbsPathB + '/' + node + extension)[0] if hrcA != None and hrcB != None: utils.shellExecute('mkdir -p ' + nodeAbsPathO) # If both Octtrees A and B have data in this node we have to merge them hrcO = utils.initHRC(hierarchyStepSize) for level in range(hierarchyStepSize + 2): numChildrenA = len(hrcA[level]) numChildrenB = len(hrcB[level]) numChildrenO = max((numChildrenA, numChildrenB)) if level < (hierarchyStepSize + 1): for i in range(numChildrenO): hasNodeA = (i < numChildrenA) and (hrcA[level][i] > 0) hasNodeB = (i < numChildrenB) and (hrcB[level][i] > 0) (childNode, isFile) = utils.getNodeName(level, i, node, hierarchyStepSize, extension) if hasNodeA and hasNodeB: hrcO[level].append(hrcA[level][i] + hrcB[level][i]) #merge lAZ or folder (iteratively) if isFile: utils.shellExecute('lasmerge -i ' + nodeAbsPathA + '/' + childNode + ' ' + nodeAbsPathB + '/' + childNode + ' -o ' + nodeAbsPathO + '/' + childNode) #We now need to set the header of the output file as the input files (lasmerge will have shrink it and we do not want that fixHeader(nodeAbsPathA + '/' + childNode, nodeAbsPathO + '/' + childNode) else: joinNode(node + childNode, nodeAbsPathA + '/' + childNode, nodeAbsPathB + '/' + childNode, nodeAbsPathO + '/' + childNode, hierarchyStepSize, extension, cmcommand) elif hasNodeA: #mv / cp hrcO[level].append(hrcA[level][i]) utils.shellExecute(cmcommand + nodeAbsPathA + '/' + childNode + ' ' + nodeAbsPathO + '/' + childNode) elif hasNodeB: #mv / cp hrcO[level].append(hrcB[level][i]) utils.shellExecute(cmcommand + nodeAbsPathB + '/' + childNode + ' ' + nodeAbsPathO + '/' + childNode) else: hrcO[level].append(0) else: hrcO[level] = list( numpy.array(hrcA[level] + ([0] * (numChildrenO - numChildrenA))) + numpy.array(hrcB[level] + ([0] * (numChildrenO - numChildrenB)))) # Write the HRC file utils.writeHRC(nodeAbsPathO + '/' + hrcFile, hierarchyStepSize, hrcO) elif hrcA != None: # Only Octtree A has data in this node. We can directly copy it to the output Octtree utils.shellExecute(cmcommand + nodeAbsPathA + ' ' + nodeAbsPathO) elif hrcB != None: # Only Octtree B has data in this node. We can directly copy it to the output Octtree utils.shellExecute(cmcommand + nodeAbsPathB + ' ' + nodeAbsPathO)
def runPDALSplitter(processIndex, inputFile, outputFolder, tempFolder, minX, minY, maxX, maxY, axisTiles): pTempFolder = tempFolder + '/' + str(processIndex) if not os.path.isdir(pTempFolder): utils.shellExecute('mkdir -p ' + pTempFolder) # Get the lenght required by the PDAL split filter in order to get "squared" tiles lengthPDAL = (maxX - minX) / float(axisTiles) utils.shellExecute('pdal split -i ' + inputFile + ' -o ' + pTempFolder + '/' + os.path.basename(inputFile) + ' --origin_x=' + str(minX) + ' --origin_y=' + str(minY) + ' --length ' + str(lengthPDAL)) tGCount = 0 for gFile in os.listdir(pTempFolder): (gCount, gFileMinX, gFileMinY, _, gFileMaxX, gFileMaxY, _, _, _, _, _, _, _) = utils.getPCFileDetails(pTempFolder + '/' + gFile) # This tile should match with some tile. Let's use the central point to see which one pX = gFileMinX + ((gFileMaxX - gFileMinX) / 2.) pY = gFileMinY + ((gFileMaxY - gFileMinY) / 2.) tileFolder = outputFolder + '/' + getTileName(*getTileIndex(pX, pY, minX, minY, maxX, maxY, axisTiles)) if not os.path.isdir(tileFolder): utils.shellExecute('mkdir -p ' + tileFolder) utils.shellExecute('mv ' + pTempFolder + '/' + gFile + ' ' + tileFolder + '/' + gFile) tGCount += gCount return tGCount