Ejemplo n.º 1
0
 def query(self, queryId, iterationId, queriesParameters):
     (eTime, result) = (-1, None)
     connection = self.getConnection()
     cursor = connection.cursor()
 
     self.prepareQuery(cursor, queryId, queriesParameters, iterationId == 0)
     postgresops.dropTable(cursor, self.resultTable, True)
     
     if self.numProcessesQuery > 1:
         if self.qp.queryType in ('rectangle','circle','generic') :
              return self.pythonParallelization()
         else:
              logging.error('Python parallelization only available for disk which are not NN!')
              return (eTime, result)
     
     t0 = time.time()
     (query, queryArgs) = self.getSelect(self.qp)
     
     if self.qp.queryMethod != 'stream': # disk or stat
         postgresops.mogrifyExecute(cursor, "CREATE TABLE "  + self.resultTable + " AS ( " + query + " )", queryArgs)
         (eTime, result) = dbops.getResult(cursor, t0, self.resultTable, None, True, self.qp.columns, self.qp.statistics)
     else:
         sqlFileName = str(queryId) + '.sql'
         postgresops.createSQLFile(cursor, sqlFileName, query, queryArgs)
         
         result = postgresops.executeSQLFileCount(self.getConnectionString(False, True), sqlFileName)
         eTime = time.time() - t0
     connection.close()
     return (eTime, result)
Ejemplo n.º 2
0
 def childInsert(self, iMortonRanges, xMortonRanges):
     connection = self.getConnection()
     cqp = copy.copy(self.qp)
     cqp.statistics = None
     (query, queryArgs) = self.getSelect(cqp, iMortonRanges, xMortonRanges)
     postgresops.mogrifyExecute(connection.cursor(), "INSERT INTO " + self.resultTable + " "  + query, queryArgs)
     connection.close()  
 def createMetaTable(self, cursor, metaTable):
     postgresops.mogrifyExecute(
         cursor,
         "CREATE TABLE "
         + metaTable
         + " (tablename text, srid integer, minx DOUBLE PRECISION, miny DOUBLE PRECISION, maxx DOUBLE PRECISION, maxy DOUBLE PRECISION, scalex DOUBLE PRECISION, scaley DOUBLE PRECISION)",
     )
Ejemplo n.º 4
0
 def runGenericQueryParallelCandChild(self, chunkIds):
     connection = self.getConnection()
     cursor = connection.cursor()
     cqp = copy.copy(self.qp)
     cqp.statistics = None
     (query, queryArgs) = self.getSelectParallel(cursor, cqp, self.queryTable, self.queryIndex, True, chunkIds)
     postgresops.mogrifyExecute(cursor, "INSERT INTO "  + self.resultTable + " " + query, queryArgs)
     connection.close()      
Ejemplo n.º 5
0
 def childInsert(self, iMortonRanges, xMortonRanges):
     connection = self.getConnection()
     cursor = connection.cursor()
     cqp = copy.copy(self.qp)
     cqp.statistics = None
     (query, queryArgs) = dbops.getSelectMorton(iMortonRanges, xMortonRanges, cqp, self.flatTable, self.addContainsCondition, self.DM_FLAT)
     postgresops.mogrifyExecute(cursor, "INSERT INTO "  + self.resultTable + " " + query, queryArgs)
     connection.close()
Ejemplo n.º 6
0
    def query(self, queryId, iterationId, queriesParameters):
        (eTime, result) = (-1, None)
        connection = self.getConnection()
        cursor = connection.cursor()
        self.prepareQuery(cursor, queryId, queriesParameters, iterationId == 0)
        postgresops.dropTable(cursor, self.resultTable, True)

        wkt = self.qp.wkt
        if self.qp.queryType == 'nn':
            g = loads(self.qp.wkt)
            wkt = dumps(g.buffer(self.qp.rad))

        t0 = time.time()
        scaledWKT = wktops.scale(wkt, self.scaleX, self.scaleY, self.minX,
                                 self.minY)
        (mimranges,
         mxmranges) = self.quadtree.getMortonRanges(scaledWKT,
                                                    self.mortonDistinctIn,
                                                    maxRanges=MAXIMUM_RANGES)

        if len(mimranges) == 0 and len(mxmranges) == 0:
            logging.info('None morton range in specified extent!')
            return (eTime, result)

        if self.numProcessesQuery > 1:
            if self.qp.queryMethod != 'stream' and self.qp.queryType in (
                    'rectangle', 'circle', 'generic'):
                return self.pythonParallelization(t0, mimranges, mxmranges)
            else:
                logging.error(
                    'Python parallelization only available for disk queries (CTAS) which are not NN queries!'
                )
                return (eTime, result)

        (query, queryArgs) = dbops.getSelectMorton(mimranges, mxmranges,
                                                   self.qp, self.flatTable,
                                                   self.addContainsCondition,
                                                   self.DM_FLAT)

        if self.qp.queryMethod != 'stream':  # disk or stat
            postgresops.mogrifyExecute(
                cursor,
                "CREATE TABLE " + self.resultTable + " AS " + query + "",
                queryArgs)
            (eTime, result) = dbops.getResult(cursor, t0, self.resultTable,
                                              self.DM_FLAT,
                                              (not self.mortonDistinctIn),
                                              self.qp.columns,
                                              self.qp.statistics)
        else:
            sqlFileName = str(queryId) + '.sql'
            postgresops.createSQLFile(cursor, sqlFileName, query, queryArgs)
            result = postgresops.executeSQLFileCount(
                self.getConnectionString(False, True), sqlFileName)
            eTime = time.time() - t0

        connection.close()
        return (eTime, result)
    def createGridTableMethod(self, cursor, gridTable, nrows, ncols):
        (minx,maxx,miny,maxy) = self.getBBoxGeometry(cursor, self.queryTable, self.queryIndex)
        query = """ 
CREATE TABLE """ + gridTable + """ AS
    SELECT row_number() OVER() AS id, ST_Intersection(A.geom, ST_SetSRID(B.geom, %s)) as geom FROM (SELECT geom FROM """ + self.queryTable + """ WHERE id = %s) A, ST_CreateFishnet(%s, %s, %s, %s, %s, %s) B 
"""
        queryArgs = [self.srid, self.queryIndex, nrows, ncols, (maxx - minx) / float(ncols), (maxy- miny) /  float(nrows), minx, miny,]
        postgresops.mogrifyExecute(cursor, query, queryArgs)
        cursor.execute("CREATE INDEX " + gridTable + "_rowcol ON " + gridTable + " ( id )")
        cursor.connection.commit()
Ejemplo n.º 8
0
    def initialize(self):
        connection = self.getConnection()
        cursor = connection.cursor()
        
        postgresops.mogrifyExecute(cursor, "SELECT srid from " + self.metaTable)
        self.srid = cursor.fetchone()[0]

        postgresops.dropTable(cursor, self.queryTable, check = True)
        postgresops.mogrifyExecute(cursor, "CREATE TABLE " +  self.queryTable + " (id integer, geom public.geometry(Geometry," + str(self.srid) + "));")
        connection.close()
Ejemplo n.º 9
0
 def process(self):
     logging.info('Starting data loading with las2pg (parallel by python) from ' + self.inputFolder + ' to ' + self.dbName)
     # Insert the extent of the loaded PC
     connection = self.getConnection()
     cursor = connection.cursor()
     metaArgs = (self.flatTable, self.srid, self.minX, self.minY, self.maxX, self.maxY, self.scaleX, self.scaleY)
     postgresops.mogrifyExecute(cursor, "INSERT INTO " + self.metaTable + " VALUES (%s,%s,%s,%s,%s,%s,%s,%s)" , metaArgs)
     connection.close()
     # Start the multiprocessing (las2pg in parallel)
     return self.processMulti(self.inputFiles, self.numProcessesLoad, self.loadFromFile)
Ejemplo n.º 10
0
 def runGenericQueryParallelGridChild(self, sIndex, gridTable):
     connection = self.getConnection()
     cursor = connection.cursor()
     cqp = copy.copy(self.qp)
     cqp.statistics = None
     (query, queryArgs) = self.getSelectParallel(cursor, cqp, gridTable,
                                                 sIndex + 1)
     postgresops.mogrifyExecute(
         cursor, "INSERT INTO " + self.resultTable + " " + query, queryArgs)
     connection.close()
Ejemplo n.º 11
0
 def loadFromFileSequential(self, fileAbsPath, index, numFiles):
     fileBlockTable = self.getFileBlockTable(index)
     connection = self.getConnection()
     cursor = connection.cursor()
     # Insert the blocks on the global blocks table (with correct order)
     query = "INSERT INTO " + self.blockTable + " (pa) SELECT pa FROM " + fileBlockTable + " ORDER BY id"
     postgresops.mogrifyExecute(cursor, query)
     # Drop the temporal table
     postgresops.dropTable(cursor, fileBlockTable)
     connection.close()
    def createFlatTable(self, cursor, flatTable, tableSpace, columns):
        cols = []
        for c in columns:
            if c not in self.DM_FLAT:
                raise Exception('Wrong column!' + c)
            cols.append(self.DM_FLAT[c][0] + ' ' + self.DM_FLAT[c][1])

        # Create the flat table that will contain all the data
        postgresops.mogrifyExecute(
            cursor, """CREATE TABLE """ + flatTable + """ (
        """ + (',\n'.join(cols)) + """)""" +
            self.getTableSpaceString(tableSpace))
 def createBlocksTable(self,
                       cursor,
                       blockTable,
                       tableSpace,
                       quadcell=False):
     aux = ''
     if quadcell:
         aux = ",quadCellId BIGINT"
     postgresops.mogrifyExecute(
         cursor, "CREATE TABLE " + blockTable +
         " (id SERIAL PRIMARY KEY,pa PCPATCH" + aux + ")" +
         self.getTableSpaceString(tableSpace))
Ejemplo n.º 14
0
 def childInsert(self, iMortonRanges, xMortonRanges):
     connection = self.getConnection()
     cursor = connection.cursor()
     cqp = copy.copy(self.qp)
     cqp.statistics = None
     (query, queryArgs) = dbops.getSelectMorton(iMortonRanges,
                                                xMortonRanges, cqp,
                                                self.flatTable,
                                                self.addContainsCondition,
                                                self.DM_FLAT)
     postgresops.mogrifyExecute(
         cursor, "INSERT INTO " + self.resultTable + " " + query, queryArgs)
     connection.close()
 def createBlocksTable(self, cursor, blockTable, tableSpace, quadcell=False):
     aux = ""
     if quadcell:
         aux = ",quadCellId BIGINT"
     postgresops.mogrifyExecute(
         cursor,
         "CREATE TABLE "
         + blockTable
         + " (id SERIAL PRIMARY KEY,pa PCPATCH"
         + aux
         + ")"
         + self.getTableSpaceString(tableSpace),
     )
Ejemplo n.º 16
0
    def initialize(self):
        connection = self.getConnection()
        cursor = connection.cursor()

        postgresops.mogrifyExecute(cursor,
                                   "SELECT srid from " + self.metaTable)
        self.srid = cursor.fetchone()[0]

        postgresops.dropTable(cursor, self.queryTable, check=True)
        postgresops.mogrifyExecute(
            cursor, "CREATE TABLE " + self.queryTable +
            " (id integer, geom public.geometry(Geometry," + str(self.srid) +
            "));")
        connection.close()
Ejemplo n.º 17
0
 def runGenericQueryParallelGridChild(self, sIndex, gridTable):
     connection = self.getConnection()
     cursor = connection.cursor()
     (minx,maxx,miny,maxy) = self.getBBoxGeometry(cursor, gridTable, sIndex+1)
     
     qType = 'generic'
     if self.qp.queryType == 'rectangle':
         qType = 'rectangle'
     
     self.queryIndex = sIndex+1
     self.queryTable = gridTable
     
     cqp = QueryParameters('psql',None,'disk',qType, None, self.qp.columns,None,minx,maxx,miny,maxy,None,None,None,self.qp.minz,self.qp.maxz,None,None,None,None)
     (query, queryArgs) = dbops.getSelect(cqp, self.flatTable, self.addContainsCondition, self.DM_FLAT)
     postgresops.mogrifyExecute(cursor, "INSERT INTO "  + self.resultTable + " " + query, queryArgs)
     connection.close()
Ejemplo n.º 18
0
 def initialize(self):
     connection = self.getConnection()
     cursor = connection.cursor()
     
     cursor.execute('SELECT srid from pointcloud_formats LIMIT 1')
     self.srid = cursor.fetchone()[0]
     
     postgresops.dropTable(cursor, self.queryTable, check = True)
     postgresops.mogrifyExecute(cursor, "CREATE TABLE " +  self.queryTable + " (id integer, geom public.geometry(Geometry," + str(self.srid) + "));")
     
     connection.close()
     
     self.columnsNameDict = {}
     for c in self.DM_PDAL:
         if self.DM_PDAL[c] != None:
             self.columnsNameDict[c] = ("PC_Get(qpoint, '" + self.DM_PDAL[c].lower() + "')",)
Ejemplo n.º 19
0
 def initialize(self):
     connection = self.getConnection()
     cursor = connection.cursor()
     
     postgresops.mogrifyExecute(cursor, "SELECT srid, minx, miny, maxx, maxy, scalex, scaley from " + self.metaTable)
     (self.srid, self.minX, self.minY, self.maxX, self.maxY, self.scaleX, self.scaleY) = cursor.fetchone()
     
     postgresops.dropTable(cursor, self.queryTable, check = True)
     postgresops.mogrifyExecute(cursor, "CREATE TABLE " +  self.queryTable + " (id integer, geom public.geometry(Geometry," + str(self.srid) + "));")
     
     connection.close()
     
     qtDomain = (0, 0, int((self.maxX-self.minX)/self.scaleX), int((self.maxY-self.minY)/self.scaleY))
     self.quadtree = QuadTree(qtDomain, 'auto')    
     # Differentiate QuadTree nodes that are fully in the query region
     self.mortonDistinctIn = False
Ejemplo n.º 20
0
    def query(self, queryId, iterationId, queriesParameters):
        (eTime, result) = (-1, None)
        connection = self.getConnection()
        cursor = connection.cursor()
               
        self.prepareQuery(cursor, queryId, queriesParameters, iterationId == 0)
        postgresops.dropTable(cursor, self.resultTable, True)    
       
        wkt = self.qp.wkt
        if self.qp.queryType == 'nn':
            g = loads(self.qp.wkt)
            wkt = dumps(g.buffer(self.qp.rad))
       
        t0 = time.time()
        scaledWKT = wktops.scale(wkt, self.scaleX, self.scaleY, self.minX, self.minY)    
        (mimranges,mxmranges) = self.quadtree.getMortonRanges(scaledWKT, self.mortonDistinctIn, maxRanges = MAXIMUM_RANGES)
       
        if len(mimranges) == 0 and len(mxmranges) == 0:
            logging.info('None morton range in specified extent!')
            return (eTime, result)

        if self.qp.queryType == 'nn':
            logging.error('NN queries not available!')
            return (eTime, result)

        if self.numProcessesQuery > 1:
            if self.qp.queryMethod != 'stream' and self.qp.queryType in ('rectangle','circle','generic') :
                 return self.pythonParallelization(t0, mimranges, mxmranges)
            else:
                 logging.error('Python parallelization only available for disk queries (CTAS) which are not NN queries!')
                 return (eTime, result)
        
        (query, queryArgs) = self.getSelect(self.qp, mimranges, mxmranges)        
         
        if self.qp.queryMethod != 'stream': # disk or stat
            postgresops.mogrifyExecute(cursor, "CREATE TABLE "  + self.resultTable + " AS (" + query + ")", queryArgs)
            (eTime, result) = dbops.getResult(cursor, t0, self.resultTable, self.DM_FLAT, (not self.mortonDistinctIn), self.qp.columns, self.qp.statistics)
        else:
            sqlFileName = str(queryId) + '.sql'
            postgresops.createSQLFile(cursor, sqlFileName, query, queryArgs)
            result = postgresops.executeSQLFileCount(self.getConnectionString(False, True), sqlFileName)
            eTime = time.time() - t0
            
        connection.close()
        return (eTime, result)
    def createFlatTable(self, cursor, flatTable, tableSpace, columns):
        cols = []
        for c in columns:
            if c not in self.DM_FLAT:
                raise Exception("Wrong column!" + c)
            cols.append(self.DM_FLAT[c][0] + " " + self.DM_FLAT[c][1])

        # Create the flat table that will contain all the data
        postgresops.mogrifyExecute(
            cursor,
            """CREATE TABLE """
            + flatTable
            + """ (
        """
            + (",\n".join(cols))
            + """)"""
            + self.getTableSpaceString(tableSpace),
        )
Ejemplo n.º 22
0
    def initialize(self):
        connection = self.getConnection()
        cursor = connection.cursor()

        cursor.execute('SELECT srid from pointcloud_formats LIMIT 1')
        self.srid = cursor.fetchone()[0]

        postgresops.dropTable(cursor, self.queryTable, check=True)
        postgresops.mogrifyExecute(
            cursor, "CREATE TABLE " + self.queryTable +
            " (id integer, geom public.geometry(Geometry," + str(self.srid) +
            "));")

        connection.close()

        self.columnsNameDict = {}
        for c in self.DM_PDAL:
            if self.DM_PDAL[c] != None:
                self.columnsNameDict[c] = ("PC_Get(qpoint, '" +
                                           self.DM_PDAL[c].lower() + "')", )
Ejemplo n.º 23
0
    def createGridTableMethod(self, cursor, gridTable, nrows, ncols):
        (minx, maxx, miny,
         maxy) = self.getBBoxGeometry(cursor, self.queryTable, self.queryIndex)
        query = """ 
CREATE TABLE """ + gridTable + """ AS
    SELECT row_number() OVER() AS id, ST_Intersection(A.geom, ST_SetSRID(B.geom, %s)) as geom FROM (SELECT geom FROM """ + self.queryTable + """ WHERE id = %s) A, ST_CreateFishnet(%s, %s, %s, %s, %s, %s) B 
"""
        queryArgs = [
            self.srid,
            self.queryIndex,
            nrows,
            ncols,
            (maxx - minx) / float(ncols),
            (maxy - miny) / float(nrows),
            minx,
            miny,
        ]
        postgresops.mogrifyExecute(cursor, query, queryArgs)
        cursor.execute("CREATE INDEX " + gridTable + "_rowcol ON " +
                       gridTable + " ( id )")
        cursor.connection.commit()
 def indexFlatTable(self, cursor, flatTable, indexTableSpace, index, cluster=False):
     if index in ("xy", "xyz"):
         indexName = flatTable + "_" + index + "_btree_idx"
         postgresops.mogrifyExecute(
             cursor,
             "create index "
             + indexName
             + " on "
             + flatTable
             + " ("
             + (",".join(index))
             + ") WITH (FILLFACTOR="
             + str(FILLFACTOR)
             + ")"
             + self.getIndexTableSpaceString(indexTableSpace),
         )
     elif index == "k":
         indexName = flatTable + "_morton_btree_idx"
         postgresops.mogrifyExecute(
             cursor,
             "create index "
             + indexName
             + " on "
             + flatTable
             + " (morton2D) WITH (FILLFACTOR="
             + str(FILLFACTOR)
             + ")"
             + self.getIndexTableSpaceString(indexTableSpace),
         )
     if cluster:
         postgresops.mogrifyExecute(cursor, "CLUSTER " + flatTable + " USING " + indexName)
 def indexBlockTable(self, cursor, blockTable, indexTableSpace, quadcell=False, cluster=False):
     if quadcell:
         indexName = self.blockTable + "_btree"
         postgresops.mogrifyExecute(
             cursor,
             "CREATE INDEX "
             + indexName
             + " ON "
             + blockTable
             + " (quadCellId)"
             + self.getIndexTableSpaceString(indexTableSpace),
         )
     else:
         indexName = blockTable + "_gist"
         postgresops.mogrifyExecute(
             cursor,
             "CREATE INDEX "
             + indexName
             + " ON "
             + blockTable
             + " USING GIST ( geometry(pa) )"
             + self.getIndexTableSpaceString(indexTableSpace),
         )
     if cluster:
         postgresops.mogrifyExecute(cursor, "CLUSTER " + blockTable + " USING " + indexName)
Ejemplo n.º 26
0
 def initialize(self):
     connection = self.getConnection()
     cursor = connection.cursor()
     
     self.metaTable = self.blockTable + '_meta'
     postgresops.mogrifyExecute(cursor, "SELECT srid, minx, miny, maxx, maxy, scalex, scaley from " + self.metaTable)
     (self.srid, self.minX, self.minY, self.maxX, self.maxY, self.scaleX, self.scaleY) = cursor.fetchone()
     
     postgresops.dropTable(cursor, self.queryTable, check = True)
     postgresops.mogrifyExecute(cursor, "CREATE TABLE " +  self.queryTable + " (id integer, geom public.geometry(Geometry," + str(self.srid) + "));")
     
     connection.close()
     
     self.columnsNameDict = {}
     for c in self.DM_PDAL:
         if self.DM_PDAL[c] != None:
             self.columnsNameDict[c] = ("PC_Get(qpoint, '" + self.DM_PDAL[c].lower() + "')",)
     
     qtDomain = (0, 0, int((self.maxX-self.minX)/self.scaleX), int((self.maxY-self.minY)/self.scaleY))
     self.quadtree = QuadTree(qtDomain, 'auto')    
     # Differentiate QuadTree nodes that are fully in the query region
     self.mortonDistinctIn = False
Ejemplo n.º 27
0
    def query(self, queryId, iterationId, queriesParameters):
        (eTime, result) = (-1, None)
        connection = self.getConnection()
        cursor = connection.cursor()

        self.prepareQuery(cursor, queryId, queriesParameters, iterationId == 0)
        postgresops.dropTable(cursor, self.resultTable, True)

        if self.numProcessesQuery > 1:
            if self.qp.queryType in ('rectangle', 'circle', 'generic'):
                return self.pythonParallelization()
            else:
                logging.error(
                    'Python parallelization only available for queries which are not NN!'
                )
                return (eTime, result)

        t0 = time.time()
        (query, queryArgs) = dbops.getSelect(self.qp, self.flatTable,
                                             self.addContainsCondition,
                                             self.DM_FLAT)

        if self.qp.queryMethod != 'stream':  # disk or stat
            postgresops.mogrifyExecute(
                cursor,
                "CREATE TABLE " + self.resultTable + " AS ( " + query + " )",
                queryArgs)
            (eTime, result) = dbops.getResult(cursor, t0, self.resultTable,
                                              self.DM_FLAT, True,
                                              self.qp.columns,
                                              self.qp.statistics)
        else:
            sqlFileName = str(queryId) + '.sql'
            postgresops.createSQLFile(cursor, sqlFileName, query, queryArgs)
            result = postgresops.executeSQLFileCount(
                self.getConnectionString(False, True), sqlFileName)
            eTime = time.time() - t0
        connection.close()
        return (eTime, result)
Ejemplo n.º 28
0
    def runGenericQueryParallelGridChild(self, sIndex, gridTable):
        connection = self.getConnection()
        cursor = connection.cursor()
        (minx, maxx, miny,
         maxy) = self.getBBoxGeometry(cursor, gridTable, sIndex + 1)

        qType = 'generic'
        if self.qp.queryType == 'rectangle':
            qType = 'rectangle'

        self.queryIndex = sIndex + 1
        self.queryTable = gridTable

        cqp = QueryParameters('psql', None, 'disk', qType, None,
                              self.qp.columns, None, minx, maxx, miny, maxy,
                              None, None, None, self.qp.minz, self.qp.maxz,
                              None, None, None, None)
        (query, queryArgs) = dbops.getSelect(cqp, self.flatTable,
                                             self.addContainsCondition,
                                             self.DM_FLAT)
        postgresops.mogrifyExecute(
            cursor, "INSERT INTO " + self.resultTable + " " + query, queryArgs)
        connection.close()
Ejemplo n.º 29
0
    def initialize(self):
        connection = self.getConnection()
        cursor = connection.cursor()

        postgresops.mogrifyExecute(
            cursor,
            "SELECT srid, minx, miny, maxx, maxy, scalex, scaley from " +
            self.metaTable)
        (self.srid, self.minX, self.minY, self.maxX, self.maxY, self.scaleX,
         self.scaleY) = cursor.fetchone()

        postgresops.dropTable(cursor, self.queryTable, check=True)
        postgresops.mogrifyExecute(
            cursor, "CREATE TABLE " + self.queryTable +
            " (id integer, geom public.geometry(Geometry," + str(self.srid) +
            "));")

        connection.close()

        qtDomain = (0, 0, int((self.maxX - self.minX) / self.scaleX),
                    int((self.maxY - self.minY) / self.scaleY))
        self.quadtree = QuadTree(qtDomain, 'auto')
        # Differentiate QuadTree nodes that are fully in the query region
        self.mortonDistinctIn = False
 def indexBlockTable(self,
                     cursor,
                     blockTable,
                     indexTableSpace,
                     quadcell=False,
                     cluster=False):
     if quadcell:
         indexName = self.blockTable + "_btree"
         postgresops.mogrifyExecute(
             cursor, 'CREATE INDEX ' + indexName + ' ON ' + blockTable +
             ' (quadCellId)' +
             self.getIndexTableSpaceString(indexTableSpace))
     else:
         indexName = blockTable + "_gist"
         postgresops.mogrifyExecute(
             cursor, 'CREATE INDEX ' + indexName + ' ON ' + blockTable +
             ' USING GIST ( geometry(pa) )' +
             self.getIndexTableSpaceString(indexTableSpace))
     if cluster:
         postgresops.mogrifyExecute(
             cursor, "CLUSTER " + blockTable + " USING " + indexName)
 def indexFlatTable(self,
                    cursor,
                    flatTable,
                    indexTableSpace,
                    index,
                    cluster=False):
     if index in ('xy', 'xyz'):
         indexName = flatTable + "_" + index + "_btree_idx"
         postgresops.mogrifyExecute(
             cursor,
             "create index " + indexName + " on " + flatTable + " (" +
             (','.join(index)) + ") WITH (FILLFACTOR=" + str(FILLFACTOR) +
             ")" + self.getIndexTableSpaceString(indexTableSpace))
     elif index == 'k':
         indexName = flatTable + "_morton_btree_idx"
         postgresops.mogrifyExecute(
             cursor, "create index " + indexName + " on " + flatTable +
             " (morton2D) WITH (FILLFACTOR=" + str(FILLFACTOR) + ")" +
             self.getIndexTableSpaceString(indexTableSpace))
     if cluster:
         postgresops.mogrifyExecute(
             cursor, "CLUSTER " + flatTable + " USING " + indexName)
 def createMetaTable(self, cursor, metaTable):
     postgresops.mogrifyExecute(
         cursor, "CREATE TABLE " + metaTable +
         " (tablename text, srid integer, minx DOUBLE PRECISION, miny DOUBLE PRECISION, maxx DOUBLE PRECISION, maxy DOUBLE PRECISION, scalex DOUBLE PRECISION, scaley DOUBLE PRECISION)"
     )
 def vacuumTable(self, cursor, tableName):
     old_isolation_level = connection.isolation_level
     connection.set_isolation_level(0)
     postgresops.mogrifyExecute(cursor, "VACUUM FULL ANALYZE " + tableName)
     connection.set_isolation_level(old_isolation_level)
    def addPCFormat(self, cursor, schemaFile, fileAbsPath, srid):
        (_, _, _, _, _, _, _, _, scaleX, scaleY, scaleZ, offsetX, offsetY,
         offsetZ) = lasops.getPCFileDetails(fileAbsPath, srid)

        updatedFormat = False
        schema = None

        pc_namespace = '{http://pointcloud.org/schemas/PC/}'

        while not updatedFormat:

            # Check whether there is already a format with current scale-offset values
            cursor.execute(
                "SELECT pcid,schema FROM pointcloud_formats WHERE srid = %s AND scaleX = %s AND scaleY = %s AND scaleZ = %s AND offsetX = %s AND offsetY = %s AND offsetZ = %s",
                [srid, scaleX, scaleY, scaleZ, offsetX, offsetY, offsetZ])
            rows = cursor.fetchall()

            if len(rows):
                # There is already a format with these scale-offset values
                [pcid, schema] = rows[0]
                root = ET.fromstring(schema)
                updatedFormat = True
            else:
                if schema == None:
                    # There is not a format with these scale-offset values. We add a schema for these ones
                    # Get ElementTree of the XML schema file
                    tree = ET.parse(schemaFile)
                    root = tree.getroot()

                    offsets = {'x': offsetX, 'y': offsetY, 'z': offsetZ}
                    scales = {'x': scaleX, 'y': scaleY, 'z': scaleZ}
                    for dimension in root.findall(pc_namespace + 'dimension'):
                        dimName = dimension.find(pc_namespace + 'name').text
                        if dimName.lower() in offsets:
                            dimension.find(pc_namespace + 'offset').text = str(
                                offsets[dimName.lower()])
                            dimension.find(pc_namespace + 'scale').text = str(
                                scales[dimName.lower()])

                    schema = '<?xml version="1.0" encoding="UTF-8"?>' + '\n' + ET.tostring(
                        tree, encoding='utf8', method='xml')

                cursor.execute("SELECT max(pcid) FROM pointcloud_formats")
                rows = cursor.fetchall()
                pcid = 1
                if len(rows) and rows[0][0] != None:
                    pcid = rows[0][0] + 1
                try:
                    postgresops.mogrifyExecute(
                        cursor,
                        "INSERT INTO pointcloud_formats (pcid, srid, schema, scalex, scaley, scalez, offsetx, offsety, offsetz) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)",
                        [
                            pcid, srid, schema, scaleX, scaleY, scaleZ,
                            offsetX, offsetY, offsetZ
                        ])
                    updatedFormat = True
                except:
                    cursor.connection.rollback()

        # Get the used dimensions if still not acquired (they should be always the same)
        compression = root.find(pc_namespace +
                                'metadata').find('Metadata').text
        columns = []
        for dimension in root.findall(pc_namespace + 'dimension'):
            dName = dimension.find(pc_namespace + 'name').text
            correctDim = False
            for d in self.DM_PDAL:
                if self.DM_PDAL[d] == dName:
                    correctDim = True
                    columns.append(d)
                    break
            if not correctDim:
                raise Exception(
                    'Error: unexpected dimension name, check dimension mappings for PDAL'
                )

        return (columns, pcid, compression)
 def vacuumTable(self, cursor, tableName):
     old_isolation_level = connection.isolation_level
     connection.set_isolation_level(0)
     postgresops.mogrifyExecute(cursor, "VACUUM FULL ANALYZE " + tableName)
     connection.set_isolation_level(old_isolation_level)
    def addPCFormat(self, cursor, schemaFile, fileAbsPath, srid):
        (_, _, _, _, _, _, _, _, scaleX, scaleY, scaleZ, offsetX, offsetY, offsetZ) = lasops.getPCFileDetails(
            fileAbsPath, srid
        )

        updatedFormat = False
        schema = None

        pc_namespace = "{http://pointcloud.org/schemas/PC/}"

        while not updatedFormat:

            # Check whether there is already a format with current scale-offset values
            cursor.execute(
                "SELECT pcid,schema FROM pointcloud_formats WHERE srid = %s AND scaleX = %s AND scaleY = %s AND scaleZ = %s AND offsetX = %s AND offsetY = %s AND offsetZ = %s",
                [srid, scaleX, scaleY, scaleZ, offsetX, offsetY, offsetZ],
            )
            rows = cursor.fetchall()

            if len(rows):
                # There is already a format with these scale-offset values
                [pcid, schema] = rows[0]
                root = ET.fromstring(schema)
                updatedFormat = True
            else:
                if schema == None:
                    # There is not a format with these scale-offset values. We add a schema for these ones
                    # Get ElementTree of the XML schema file
                    tree = ET.parse(schemaFile)
                    root = tree.getroot()

                    offsets = {"x": offsetX, "y": offsetY, "z": offsetZ}
                    scales = {"x": scaleX, "y": scaleY, "z": scaleZ}
                    for dimension in root.findall(pc_namespace + "dimension"):
                        dimName = dimension.find(pc_namespace + "name").text
                        if dimName.lower() in offsets:
                            dimension.find(pc_namespace + "offset").text = str(offsets[dimName.lower()])
                            dimension.find(pc_namespace + "scale").text = str(scales[dimName.lower()])

                    schema = (
                        '<?xml version="1.0" encoding="UTF-8"?>'
                        + "\n"
                        + ET.tostring(tree, encoding="utf8", method="xml")
                    )

                cursor.execute("SELECT max(pcid) FROM pointcloud_formats")
                rows = cursor.fetchall()
                pcid = 1
                if len(rows) and rows[0][0] != None:
                    pcid = rows[0][0] + 1
                try:
                    postgresops.mogrifyExecute(
                        cursor,
                        "INSERT INTO pointcloud_formats (pcid, srid, schema, scalex, scaley, scalez, offsetx, offsety, offsetz) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)",
                        [pcid, srid, schema, scaleX, scaleY, scaleZ, offsetX, offsetY, offsetZ],
                    )
                    updatedFormat = True
                except:
                    cursor.connection.rollback()

        # Get the used dimensions if still not acquired (they should be always the same)
        compression = root.find(pc_namespace + "metadata").find("Metadata").text
        columns = []
        for dimension in root.findall(pc_namespace + "dimension"):
            dName = dimension.find(pc_namespace + "name").text
            correctDim = False
            for d in self.DM_PDAL:
                if self.DM_PDAL[d] == dName:
                    correctDim = True
                    columns.append(d)
                    break
            if not correctDim:
                raise Exception("Error: unexpected dimension name, check dimension mappings for PDAL")

        return (columns, pcid, compression)