def pythonParallelization(self): connection = self.getConnection() cursor = connection.cursor() if self.parallelType == 'cand': idsQuery = "SELECT " + self.blockTable + ".id FROM " + self.blockTable + ", (SELECT geom FROM " + self.queryTable + " WHERE id = %s) A WHERE pc_intersects(pa,geom)" idsQueryArgs = [ self.queryIndex, ] (eTime, result) = dbops.genericQueryParallelCand( cursor, self.qp.queryMethod, postgresops.mogrifyExecute, self.qp.columns, self.DM_FLAT, self.qp.statistics, self.resultTable, idsQuery, idsQueryArgs, self.runGenericQueryParallelCandChild, self.numProcessesQuery, postgresops.createSQLFile, postgresops.executeSQLFileCount, self.getConnectionString(False, True)) else: gridTable = 'query_grid_' + str(self.queryIndex) postgresops.dropTable(cursor, gridTable, True) (eTime, result) = dbops.genericQueryParallelGrid( cursor, self.qp.queryMethod, postgresops.mogrifyExecute, self.qp.columns, self.DM_FLAT, self.qp.statistics, self.resultTable, gridTable, self.createGridTableMethod, self.runGenericQueryParallelGridChild, self.numProcessesQuery, (self.parallelType == 'griddis'), postgresops.createSQLFile, postgresops.executeSQLFileCount, self.getConnectionString(False, True)) connection.close() return (eTime, result)
def query(self, queryId, iterationId, queriesParameters): (eTime, result) = (-1, None) connection = self.getConnection() cursor = connection.cursor() self.prepareQuery(cursor, queryId, queriesParameters, iterationId == 0) postgresops.dropTable(cursor, self.resultTable, True) if self.numProcessesQuery > 1: if self.qp.queryType in ('rectangle','circle','generic') : return self.pythonParallelization() else: logging.error('Python parallelization only available for disk which are not NN!') return (eTime, result) t0 = time.time() (query, queryArgs) = self.getSelect(self.qp) if self.qp.queryMethod != 'stream': # disk or stat postgresops.mogrifyExecute(cursor, "CREATE TABLE " + self.resultTable + " AS ( " + query + " )", queryArgs) (eTime, result) = dbops.getResult(cursor, t0, self.resultTable, None, True, self.qp.columns, self.qp.statistics) else: sqlFileName = str(queryId) + '.sql' postgresops.createSQLFile(cursor, sqlFileName, query, queryArgs) result = postgresops.executeSQLFileCount(self.getConnectionString(False, True), sqlFileName) eTime = time.time() - t0 connection.close() return (eTime, result)
def query(self, queryId, iterationId, queriesParameters): (eTime, result) = (-1, None) connection = self.getConnection() cursor = connection.cursor() self.prepareQuery(cursor, queryId, queriesParameters, iterationId == 0) postgresops.dropTable(cursor, self.resultTable, True) wkt = self.qp.wkt if self.qp.queryType == 'nn': g = loads(self.qp.wkt) wkt = dumps(g.buffer(self.qp.rad)) t0 = time.time() scaledWKT = wktops.scale(wkt, self.scaleX, self.scaleY, self.minX, self.minY) (mimranges, mxmranges) = self.quadtree.getMortonRanges(scaledWKT, self.mortonDistinctIn, maxRanges=MAXIMUM_RANGES) if len(mimranges) == 0 and len(mxmranges) == 0: logging.info('None morton range in specified extent!') return (eTime, result) if self.numProcessesQuery > 1: if self.qp.queryMethod != 'stream' and self.qp.queryType in ( 'rectangle', 'circle', 'generic'): return self.pythonParallelization(t0, mimranges, mxmranges) else: logging.error( 'Python parallelization only available for disk queries (CTAS) which are not NN queries!' ) return (eTime, result) (query, queryArgs) = dbops.getSelectMorton(mimranges, mxmranges, self.qp, self.flatTable, self.addContainsCondition, self.DM_FLAT) if self.qp.queryMethod != 'stream': # disk or stat postgresops.mogrifyExecute( cursor, "CREATE TABLE " + self.resultTable + " AS " + query + "", queryArgs) (eTime, result) = dbops.getResult(cursor, t0, self.resultTable, self.DM_FLAT, (not self.mortonDistinctIn), self.qp.columns, self.qp.statistics) else: sqlFileName = str(queryId) + '.sql' postgresops.createSQLFile(cursor, sqlFileName, query, queryArgs) result = postgresops.executeSQLFileCount( self.getConnectionString(False, True), sqlFileName) eTime = time.time() - t0 connection.close() return (eTime, result)
def initialize(self): connection = self.getConnection() cursor = connection.cursor() postgresops.mogrifyExecute(cursor, "SELECT srid from " + self.metaTable) self.srid = cursor.fetchone()[0] postgresops.dropTable(cursor, self.queryTable, check = True) postgresops.mogrifyExecute(cursor, "CREATE TABLE " + self.queryTable + " (id integer, geom public.geometry(Geometry," + str(self.srid) + "));") connection.close()
def loadFromFileSequential(self, fileAbsPath, index, numFiles): fileBlockTable = self.getFileBlockTable(index) connection = self.getConnection() cursor = connection.cursor() # Insert the blocks on the global blocks table (with correct order) query = "INSERT INTO " + self.blockTable + " (pa) SELECT pa FROM " + fileBlockTable + " ORDER BY id" postgresops.mogrifyExecute(cursor, query) # Drop the temporal table postgresops.dropTable(cursor, fileBlockTable) connection.close()
def pythonParallelization(self): connection = self.getConnection() cursor = connection.cursor() gridTable = ('query_grid_' + str(self.queryIndex)).upper() postgresops.dropTable(cursor, gridTable, True) (eTime, result) = dbops.genericQueryParallelGrid(cursor, self.qp.queryMethod, postgresops.mogrifyExecute, self.qp.columns, self.DM_FLAT, self.qp.statistics, self.resultTable, gridTable, self.createGridTableMethod, self.runGenericQueryParallelGridChild, self.numProcessesQuery, (self.parallelType == 'griddis'), postgresops.createSQLFile, postgresops.executeSQLFileCount, self.getConnectionString(False, True)) connection.close() return (eTime, result)
def initialize(self): connection = self.getConnection() cursor = connection.cursor() postgresops.mogrifyExecute(cursor, "SELECT srid from " + self.metaTable) self.srid = cursor.fetchone()[0] postgresops.dropTable(cursor, self.queryTable, check=True) postgresops.mogrifyExecute( cursor, "CREATE TABLE " + self.queryTable + " (id integer, geom public.geometry(Geometry," + str(self.srid) + "));") connection.close()
def pythonParallelization(self): connection = self.getConnection() cursor = connection.cursor() gridTable = ('query_grid_' + str(self.queryIndex)).upper() postgresops.dropTable(cursor, gridTable, True) (eTime, result) = dbops.genericQueryParallelGrid( cursor, self.qp.queryMethod, postgresops.mogrifyExecute, self.qp.columns, self.DM_FLAT, self.qp.statistics, self.resultTable, gridTable, self.createGridTableMethod, self.runGenericQueryParallelGridChild, self.numProcessesQuery, (self.parallelType == 'griddis'), postgresops.createSQLFile, postgresops.executeSQLFileCount, self.getConnectionString(False, True)) connection.close() return (eTime, result)
def initialize(self): connection = self.getConnection() cursor = connection.cursor() postgresops.mogrifyExecute(cursor, "SELECT srid, minx, miny, maxx, maxy, scalex, scaley from " + self.metaTable) (self.srid, self.minX, self.minY, self.maxX, self.maxY, self.scaleX, self.scaleY) = cursor.fetchone() postgresops.dropTable(cursor, self.queryTable, check = True) postgresops.mogrifyExecute(cursor, "CREATE TABLE " + self.queryTable + " (id integer, geom public.geometry(Geometry," + str(self.srid) + "));") connection.close() qtDomain = (0, 0, int((self.maxX-self.minX)/self.scaleX), int((self.maxY-self.minY)/self.scaleY)) self.quadtree = QuadTree(qtDomain, 'auto') # Differentiate QuadTree nodes that are fully in the query region self.mortonDistinctIn = False
def initialize(self): connection = self.getConnection() cursor = connection.cursor() cursor.execute('SELECT srid from pointcloud_formats LIMIT 1') self.srid = cursor.fetchone()[0] postgresops.dropTable(cursor, self.queryTable, check = True) postgresops.mogrifyExecute(cursor, "CREATE TABLE " + self.queryTable + " (id integer, geom public.geometry(Geometry," + str(self.srid) + "));") connection.close() self.columnsNameDict = {} for c in self.DM_PDAL: if self.DM_PDAL[c] != None: self.columnsNameDict[c] = ("PC_Get(qpoint, '" + self.DM_PDAL[c].lower() + "')",)
def query(self, queryId, iterationId, queriesParameters): (eTime, result) = (-1, None) connection = self.getConnection() cursor = connection.cursor() self.prepareQuery(cursor, queryId, queriesParameters, iterationId == 0) postgresops.dropTable(cursor, self.resultTable, True) wkt = self.qp.wkt if self.qp.queryType == 'nn': g = loads(self.qp.wkt) wkt = dumps(g.buffer(self.qp.rad)) t0 = time.time() scaledWKT = wktops.scale(wkt, self.scaleX, self.scaleY, self.minX, self.minY) (mimranges,mxmranges) = self.quadtree.getMortonRanges(scaledWKT, self.mortonDistinctIn, maxRanges = MAXIMUM_RANGES) if len(mimranges) == 0 and len(mxmranges) == 0: logging.info('None morton range in specified extent!') return (eTime, result) if self.qp.queryType == 'nn': logging.error('NN queries not available!') return (eTime, result) if self.numProcessesQuery > 1: if self.qp.queryMethod != 'stream' and self.qp.queryType in ('rectangle','circle','generic') : return self.pythonParallelization(t0, mimranges, mxmranges) else: logging.error('Python parallelization only available for disk queries (CTAS) which are not NN queries!') return (eTime, result) (query, queryArgs) = self.getSelect(self.qp, mimranges, mxmranges) if self.qp.queryMethod != 'stream': # disk or stat postgresops.mogrifyExecute(cursor, "CREATE TABLE " + self.resultTable + " AS (" + query + ")", queryArgs) (eTime, result) = dbops.getResult(cursor, t0, self.resultTable, self.DM_FLAT, (not self.mortonDistinctIn), self.qp.columns, self.qp.statistics) else: sqlFileName = str(queryId) + '.sql' postgresops.createSQLFile(cursor, sqlFileName, query, queryArgs) result = postgresops.executeSQLFileCount(self.getConnectionString(False, True), sqlFileName) eTime = time.time() - t0 connection.close() return (eTime, result)
def pythonParallelization(self): connection = self.getConnection() cursor = connection.cursor() if self.parallelType == 'cand': idsQuery = "SELECT " + self.blockTable +".id FROM " + self.blockTable +", (SELECT geom FROM " + self.queryTable + " WHERE id = %s) A WHERE pc_intersects(pa,geom)" idsQueryArgs = [self.queryIndex, ] (eTime, result) = dbops.genericQueryParallelCand(cursor, self.qp.queryMethod, postgresops.mogrifyExecute, self.qp.columns, self.DM_FLAT, self.qp.statistics, self.resultTable, idsQuery, idsQueryArgs, self.runGenericQueryParallelCandChild, self.numProcessesQuery, postgresops.createSQLFile, postgresops.executeSQLFileCount, self.getConnectionString(False, True)) else: gridTable = 'query_grid_' + str(self.queryIndex) postgresops.dropTable(cursor, gridTable, True) (eTime, result) = dbops.genericQueryParallelGrid(cursor, self.qp.queryMethod, postgresops.mogrifyExecute, self.qp.columns, self.DM_FLAT, self.qp.statistics, self.resultTable, gridTable, self.createGridTableMethod, self.runGenericQueryParallelGridChild, self.numProcessesQuery, (self.parallelType == 'griddis'), postgresops.createSQLFile, postgresops.executeSQLFileCount, self.getConnectionString(False, True)) connection.close() return (eTime, result)
def initialize(self): # Check whether DB already exist connectionSuper = self.getConnection(True) cursorSuper = connectionSuper.cursor() cursorSuper.execute('SELECT datname FROM pg_database WHERE datname = %s', (self.dbName,)) self.exists = cursorSuper.fetchone() cursorSuper.close() connectionSuper.close() # Creates the DB if not existing if not self.exists: logging.info('Creating auxiliary DB ' + self.dbName) connString = self.getConnectionString(False, True) os.system('createdb ' + connString) # We create the PostGIS extension connection = self.getConnection() cursor = connection.cursor() if not self.exists: cursor.execute('CREATE EXTENSION postgis;') connection.commit() logging.info('Getting default list of files and data SRID') # Get the list of PC files pcFiles = glob.glob(os.path.join(os.path.abspath(self.dataFolder), '*' + self.dataExtension)) # We need to know if it is a single file or multiple (in order to use -merged in lasclip or not) self.isSingle = False if len(pcFiles) == 1: self.isSingle = True # Write the input file list to be used by lasmerge and lasclip inputListFile = open(DEFAULT_INPUT_FILE_LIST_FILE_NAME, 'w') for pcFile in pcFiles: inputListFile.write(pcFile + '\n') inputListFile.close() # Gets the SRID of the PC files (we assume all have the same SRID as the first file) #self.srid = lasops.getSRID(pcFiles[0]) logging.info('Creating auxiliary table ' + utils.QUERY_TABLE) # Drops possible query table postgresops.dropTable(cursor, utils.QUERY_TABLE, check = True) # Create query table cursor.execute("CREATE TABLE " + utils.QUERY_TABLE + " (id integer, geom public.geometry(Geometry," + str(self.srid) + "));") connection.commit() cursor.close() connection.close()
def loadFromFile(self, index, fileAbsPath): connection = self.getConnection() cursor = connection.cursor() # Add PC format to pointcloud_formats (columns, pcid, compression) = self.addPCFormat(cursor, self.schemaFile, fileAbsPath, self.srid) # Add the morton2D code to the requeste columns columns.append('k') # Create a temporal flat table and load the points to it flatTable = self.blockTable + '_temp_' + str(index) self.createFlatTable(cursor, flatTable, self.indexTableSpace, columns) # use index table space for temporal table self.loadFromBinaryLoader(self.getConnectionString(False, True), flatTable, fileAbsPath, columns, self.minX, self.minY, self.scaleX, self.scaleY) # Create the blocks by grouping points in QuadTree cells query = """INSERT INTO """ + self.blockTable + """ (pa,quadcellid) SELECT PC_Patch(pt),quadCellId FROM (SELECT PC_MakePoint(%s, ARRAY[x,y,z]) pt, quadCellId(morton2D,%s) as quadCellId FROM """ + flatTable + """) A GROUP BY quadCellId""" queryArgs = [pcid, BLOCKQUADTREELEVEL] cursor.execute(query, queryArgs) connection.commit() # Drop the temporal table postgresops.dropTable(cursor, flatTable)
def initialize(self): connection = self.getConnection() cursor = connection.cursor() cursor.execute('SELECT srid from pointcloud_formats LIMIT 1') self.srid = cursor.fetchone()[0] postgresops.dropTable(cursor, self.queryTable, check=True) postgresops.mogrifyExecute( cursor, "CREATE TABLE " + self.queryTable + " (id integer, geom public.geometry(Geometry," + str(self.srid) + "));") connection.close() self.columnsNameDict = {} for c in self.DM_PDAL: if self.DM_PDAL[c] != None: self.columnsNameDict[c] = ("PC_Get(qpoint, '" + self.DM_PDAL[c].lower() + "')", )
def initialize(self): connection = self.getConnection() cursor = connection.cursor() self.metaTable = self.blockTable + '_meta' postgresops.mogrifyExecute(cursor, "SELECT srid, minx, miny, maxx, maxy, scalex, scaley from " + self.metaTable) (self.srid, self.minX, self.minY, self.maxX, self.maxY, self.scaleX, self.scaleY) = cursor.fetchone() postgresops.dropTable(cursor, self.queryTable, check = True) postgresops.mogrifyExecute(cursor, "CREATE TABLE " + self.queryTable + " (id integer, geom public.geometry(Geometry," + str(self.srid) + "));") connection.close() self.columnsNameDict = {} for c in self.DM_PDAL: if self.DM_PDAL[c] != None: self.columnsNameDict[c] = ("PC_Get(qpoint, '" + self.DM_PDAL[c].lower() + "')",) qtDomain = (0, 0, int((self.maxX-self.minX)/self.scaleX), int((self.maxY-self.minY)/self.scaleY)) self.quadtree = QuadTree(qtDomain, 'auto') # Differentiate QuadTree nodes that are fully in the query region self.mortonDistinctIn = False
def query(self, queryId, iterationId, queriesParameters): (eTime, result) = (-1, None) connection = self.getConnection() cursor = connection.cursor() self.prepareQuery(cursor, queryId, queriesParameters, iterationId == 0) postgresops.dropTable(cursor, self.resultTable, True) if self.numProcessesQuery > 1: if self.qp.queryType in ('rectangle', 'circle', 'generic'): return self.pythonParallelization() else: logging.error( 'Python parallelization only available for queries which are not NN!' ) return (eTime, result) t0 = time.time() (query, queryArgs) = dbops.getSelect(self.qp, self.flatTable, self.addContainsCondition, self.DM_FLAT) if self.qp.queryMethod != 'stream': # disk or stat postgresops.mogrifyExecute( cursor, "CREATE TABLE " + self.resultTable + " AS ( " + query + " )", queryArgs) (eTime, result) = dbops.getResult(cursor, t0, self.resultTable, self.DM_FLAT, True, self.qp.columns, self.qp.statistics) else: sqlFileName = str(queryId) + '.sql' postgresops.createSQLFile(cursor, sqlFileName, query, queryArgs) result = postgresops.executeSQLFileCount( self.getConnectionString(False, True), sqlFileName) eTime = time.time() - t0 connection.close() return (eTime, result)
def initialize(self): connection = self.getConnection() cursor = connection.cursor() postgresops.mogrifyExecute( cursor, "SELECT srid, minx, miny, maxx, maxy, scalex, scaley from " + self.metaTable) (self.srid, self.minX, self.minY, self.maxX, self.maxY, self.scaleX, self.scaleY) = cursor.fetchone() postgresops.dropTable(cursor, self.queryTable, check=True) postgresops.mogrifyExecute( cursor, "CREATE TABLE " + self.queryTable + " (id integer, geom public.geometry(Geometry," + str(self.srid) + "));") connection.close() qtDomain = (0, 0, int((self.maxX - self.minX) / self.scaleX), int((self.maxY - self.minY) / self.scaleY)) self.quadtree = QuadTree(qtDomain, 'auto') # Differentiate QuadTree nodes that are fully in the query region self.mortonDistinctIn = False