Beispiel #1
0
def run(opts):
    # Start logging
    logname = os.path.splitext(os.path.basename(__file__))[0] + '.log'
    utils.start_logging(filename=logname, level=opts.log)
    localtime = utils.getCurrentTimeAsAscii()
    t0 = time.time()
    msg = os.path.basename(__file__) + ' script starts at %s.' % localtime
    print msg
    logging.info(msg)
    # database connection
    connection, cursor = utils.connectToDB(opts.dbname, opts.dbuser,
                                           opts.dbpass, opts.dbhost,
                                           opts.dbport)

    if opts.itemid == '?':
        utils.listRawDataItems(cursor)
        return
    elif opts.itemid == '' or opts.itemid == '!':
        query = """
SELECT raw_data_item_id,abs_path,background 
FROM RAW_DATA_ITEM JOIN ITEM USING (item_id) JOIN RAW_DATA_ITEM_PC USING (raw_data_item_id) 
WHERE raw_data_item_id NOT IN (
          SELECT raw_data_item_id FROM POTREE_DATA_ITEM_PC)"""
        # Get the list of items that are not converted yet (we sort by background to have the background converted first)
        raw_data_items, num_raw_data_items = utils.fetchDataFromDB(
            cursor, query)
        for (rawDataItemId, absPath, isBackground) in raw_data_items:
            if opts.itemid == '':
                levels = getNumLevels(opts, isBackground)
                createPOTree(cursor, rawDataItemId, opts.potreeDir, levels)
            else:
                m = '\t'.join((str(rawDataItemId), absPath))
                print m
                logging.info(m)

    else:
        for rawDataItemId in opts.itemid.split(','):
            rows, num_rows = utils.fetchDataFromDB(
                cursor,
                'SELECT background FROM RAW_DATA_ITEM JOIN ITEM USING (item_id) WHERE raw_data_item_id = %s',
                [int(rawDataItemId)])
            if num_rows == 0:
                logging.error('There is not a raw data item with id %d' %
                              int(rawDataItemId))
                return
            isBackground = rows[0][0]
            levels = getNumLevels(opts, isBackground)
            createPOTree(cursor, int(rawDataItemId), opts.potreeDir, levels)

    # close DB connection
    utils.closeConnectionDB(connection, cursor)

    elapsed_time = time.time() - t0
    msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (
        elapsed_time, logname)
    print(msg)
    logging.info(msg)
Beispiel #2
0
def run(args): 
    # start logging
    logname = os.path.splitext(os.path.basename(__file__))[0] + '.log'
    utils.start_logging(filename=logname, level=utils.DEFAULT_LOG_LEVEL)
    localtime = utils.getCurrentTimeAsAscii()
    t0 = time.time()
    msg = os.path.basename(__file__) + ' script starts at %s.' %localtime
    print msg
    logging.info(msg)
    
    # connect to the DB
    connection, cursor = utils.connectToDB(args.dbname, args.dbuser, args.dbpass, args.dbhost, args.dbport) 
    
    itemIds = []
    
    if args.itemid == '':
        data,num = utils.fetchDataFromDB(cursor, 'SELECT item_id FROM ITEM WHERE NOT background')
        for (itemId,) in data:
            itemIds.append(itemId)
    else:
        itemIds = args.itemid.split(',')
        
    # close the conection to the DB
    utils.closeConnectionDB(connection, cursor)
    
    # Create queues
    itemsQueue = multiprocessing.Queue() # The queue of tasks (queries)
    resultsQueue = multiprocessing.Queue() # The queue of results
    
    for itemId in itemIds:
        itemsQueue.put(int(itemId))
    for i in range(args.cores): #we add as many None jobs as numUsers to tell them to terminate (queue is FIFO)
        itemsQueue.put(None)
    
    procs = []
    # We start numUsers users processes
    for i in range(args.cores):
        procs.append(multiprocessing.Process(target=runChild, 
            args=(i, itemsQueue, resultsQueue, args.las, args.dbname, args.dbuser, args.dbpass, args.dbhost, args.dbport)))
        procs[-1].start()
    
    for i in range(len(itemIds)):
        [procIndex, itemId] = resultsQueue.get()
    # wait for all users to finish their execution
    for i in range(args.cores):
        procs[i].join()
    
    # measure elapsed time
    elapsed_time = time.time() - t0    
    msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (elapsed_time, logname)
    print(msg)
    logging.info(msg)
Beispiel #3
0
def run(opts):
    # Start logging
    logname = os.path.splitext(os.path.basename(__file__))[0] + '.log'
    utils.start_logging(filename=logname, level=opts.log)
    localtime = utils.getCurrentTimeAsAscii()
    t0 = time.time()
    msg = os.path.basename(__file__) + ' script starts at %s.' % localtime
    print msg
    logging.info(msg)
    # database connection
    connection, cursor = utils.connectToDB(opts.dbname, opts.dbuser,
                                           opts.dbpass, opts.dbhost,
                                           opts.dbport)
    
    if opts.itemid == '?':
        utils.listRawDataItems(cursor)
        return
    elif opts.itemid == '' or opts.itemid == '!':
        query = """
SELECT raw_data_item_id,abs_path,background 
FROM RAW_DATA_ITEM JOIN ITEM USING (item_id) JOIN RAW_DATA_ITEM_PC USING (raw_data_item_id) 
WHERE raw_data_item_id NOT IN (
          SELECT raw_data_item_id FROM POTREE_DATA_ITEM_PC)"""
        # Get the list of items that are not converted yet (we sort by background to have the background converted first)
        raw_data_items, num_raw_data_items = utils.fetchDataFromDB(cursor, query)
        for (rawDataItemId,absPath,isBackground) in raw_data_items:
            if opts.itemid == '' :
                levels = getNumLevels(opts, isBackground)
                createPOTree(cursor, rawDataItemId, opts.potreeDir, levels)
            else:
                m = '\t'.join((str(rawDataItemId),absPath))
                print m
                logging.info(m)
                
    else:
        for rawDataItemId in opts.itemid.split(','):
            rows,num_rows = utils.fetchDataFromDB(cursor, 'SELECT background FROM RAW_DATA_ITEM JOIN ITEM USING (item_id) WHERE raw_data_item_id = %s', [int(rawDataItemId)])
            if num_rows == 0:
                logging.error('There is not a raw data item with id %d' % int(rawDataItemId))
                return
            isBackground = rows[0][0]
            levels = getNumLevels(opts, isBackground)    
            createPOTree(cursor, int(rawDataItemId), opts.potreeDir, levels)

    # close DB connection
    utils.closeConnectionDB(connection, cursor)

    elapsed_time = time.time() - t0
    msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (elapsed_time, logname)
    print(msg)
    logging.info(msg)
def run(opts):
    # Start logging
    #logname = os.path.basename(__file__) + '.log'
    logname = os.path.splitext(os.path.basename(__file__))[0] + '.log'
    utils.start_logging(filename=logname, level=opts.log)
    localtime = utils.getCurrentTimeAsAscii()
    t0 = time.time()
    msg = os.path.basename(__file__) + ' script starts at %s.' % localtime
    print msg
    logging.info(msg)

    # database connection
    connection, cursor = utils.connectToDB(opts.dbname, opts.dbuser,
                                           opts.dbpass, opts.dbhost,
                                           opts.dbport)

    if opts.itemid == '?':
        utils.listRawDataItems(cursor)
        return
    elif opts.itemid == '' or opts.itemid == '!':
        query = """
SELECT raw_data_item_id, abs_path 
FROM RAW_DATA_ITEM JOIN ITEM USING (item_id) 
WHERE NOT background AND raw_data_item_id NOT IN (
          SELECT raw_data_item_id FROM OSG_DATA_ITEM_PC_SITE 
          UNION 
          SELECT raw_data_item_id FROM OSG_DATA_ITEM_MESH 
          UNION 
          SELECT raw_data_item_id FROM OSG_DATA_ITEM_PICTURE)"""
        # Get the list of items that are not converted yet (we sort by background to have the background converted first)
        raw_data_items, num_raw_data_items = utils.fetchDataFromDB(
            cursor, query)
        for (rawDataItemId, absPath) in raw_data_items:
            if opts.itemid == '':
                createOSG(cursor, rawDataItemId, opts.osgDir)
            else:
                m = '\t'.join((str(rawDataItemId), absPath))
                print m
                logging.info(m)
    else:
        for rawDataItemId in opts.itemid.split(','):
            createOSG(cursor, int(rawDataItemId), opts.osgDir)

    # close DB connection
    utils.closeConnectionDB(connection, cursor)

    elapsed_time = time.time() - t0
    msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (
        elapsed_time, logname)
    print(msg)
    logging.info(msg)
Beispiel #5
0
def run(opts):
    # Start logging
    #logname = os.path.basename(__file__) + '.log'
    logname = os.path.splitext(os.path.basename(__file__))[0] + '.log'
    utils.start_logging(filename=logname, level=opts.log)
    localtime = utils.getCurrentTimeAsAscii()
    t0 = time.time()
    msg = os.path.basename(__file__) + ' script starts at %s.' % localtime
    print msg
    logging.info(msg)

    # database connection
    connection, cursor = utils.connectToDB(opts.dbname, opts.dbuser,
                                           opts.dbpass, opts.dbhost,
                                           opts.dbport)
    
    if opts.itemid == '?':
        utils.listRawDataItems(cursor)
        return
    elif opts.itemid == '' or opts.itemid == '!':
        query = """
SELECT raw_data_item_id, abs_path 
FROM RAW_DATA_ITEM JOIN ITEM USING (item_id) 
WHERE NOT background AND raw_data_item_id NOT IN (
          SELECT raw_data_item_id FROM OSG_DATA_ITEM_PC_SITE 
          UNION 
          SELECT raw_data_item_id FROM OSG_DATA_ITEM_MESH 
          UNION 
          SELECT raw_data_item_id FROM OSG_DATA_ITEM_PICTURE)"""
        # Get the list of items that are not converted yet (we sort by background to have the background converted first)
        raw_data_items, num_raw_data_items = utils.fetchDataFromDB(cursor, query)
        for (rawDataItemId, absPath) in raw_data_items:
            if opts.itemid == '':
                createOSG(cursor, rawDataItemId, opts.osgDir)
            else:
                m = '\t'.join((str(rawDataItemId),absPath))
                print m
                logging.info(m)
    else:
        for rawDataItemId in opts.itemid.split(','):
            createOSG(cursor, int(rawDataItemId), opts.osgDir)    

    # close DB connection
    utils.closeConnectionDB(connection, cursor)
    
    elapsed_time = time.time() - t0
    msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (elapsed_time, logname)
    print(msg)
    logging.info(msg)
Beispiel #6
0
def run(args):
    # start logging
    logname = os.path.splitext(os.path.basename(__file__))[0] + '.log'
    utils.start_logging(filename=logname, level=utils.DEFAULT_LOG_LEVEL)
    localtime = utils.getCurrentTimeAsAscii()
    t0 = time.time()
    msg = os.path.basename(__file__) + ' script starts at %s.' % localtime
    print msg
    logging.info(msg)

    # connect to the DB
    connection, cursor = utils.connectToDB(args.dbname, args.dbuser,
                                           args.dbpass, args.dbhost,
                                           args.dbport)

    itemIds = []

    if args.itemid == '':
        data, num = utils.fetchDataFromDB(
            cursor, 'SELECT item_id FROM ITEM WHERE NOT background')
        for (itemId, ) in data:
            itemIds.append(itemId)
    else:
        itemIds = args.itemid.split(',')

    # close the conection to the DB
    utils.closeConnectionDB(connection, cursor)

    # Create queues
    itemsQueue = multiprocessing.Queue()  # The queue of tasks (queries)
    resultsQueue = multiprocessing.Queue()  # The queue of results

    for itemId in itemIds:
        itemsQueue.put(int(itemId))
    for i in range(
            args.cores
    ):  #we add as many None jobs as numUsers to tell them to terminate (queue is FIFO)
        itemsQueue.put(None)

    procs = []
    # We start numUsers users processes
    for i in range(args.cores):
        procs.append(
            multiprocessing.Process(
                target=runChild,
                args=(i, itemsQueue, resultsQueue, args.las, args.dbname,
                      args.dbuser, args.dbpass, args.dbhost, args.dbport)))
        procs[-1].start()

    for i in range(len(itemIds)):
        [procIndex, itemId] = resultsQueue.get()
    # wait for all users to finish their execution
    for i in range(args.cores):
        procs[i].join()

    # measure elapsed time
    elapsed_time = time.time() - t0
    msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (
        elapsed_time, logname)
    print(msg)
    logging.info(msg)
Beispiel #7
0
                if returnOk:
                    logging.info(
                        'PROC%d: Updating DB minimum and maximum Z for item %d'
                        % (procIndex, itemId))
                    utils.dbExecute(
                        cursor,
                        "UPDATE ITEM SET (min_z,max_z) = (%s,%s) WHERE item_id = %s",
                        [minZ, maxZ, itemId])
            except Exception, e:
                connection.rollback()
                logging.error(
                    'PROC%d: Can not update minimum and maximum Z for item %d'
                    % (procIndex, itemId))
                logging.error(e)
            resultsQueue.put((procIndex, itemId))
    utils.closeConnectionDB(connection, cursor)


def run(args):
    # start logging
    logname = os.path.splitext(os.path.basename(__file__))[0] + '.log'
    utils.start_logging(filename=logname, level=utils.DEFAULT_LOG_LEVEL)
    localtime = utils.getCurrentTimeAsAscii()
    t0 = time.time()
    msg = os.path.basename(__file__) + ' script starts at %s.' % localtime
    print msg
    logging.info(msg)

    # connect to the DB
    connection, cursor = utils.connectToDB(args.dbname, args.dbuser,
                                           args.dbpass, args.dbhost,
def run(args):

    global logger
    global connection
    global cursor

    # start logging
    logname = os.path.basename(args.input) + '.log'
    logger = utils.start_logging(filename=logname,
                                 level=utils.DEFAULT_LOG_LEVEL)
    localtime = utils.getCurrentTimeAsAscii()
    t0 = time.time()
    msg = os.path.basename(__file__) + ' script starts at %s.' % localtime
    print msg
    logger.info(msg)

    if not os.path.isfile(args.input):
        msg = "Input file is not found!"
        print msg
        logger.error(msg)
        return

    # connect to the DB
    connection, cursor = utils.connectToDB(args.dbname, args.dbuser,
                                           args.dbpass, args.dbhost,
                                           args.dbport)

    if args.input.endswith('shp'):

        prjFile = args.input.replace('shp', 'prj')
        if not os.path.isfile(prjFile):
            msg = "Input PRJ file is not found!"
            print msg
            logger.error(msg)
            return

        sqlFile = os.path.basename(args.input).replace('shp', 'sql')
        shp2psql = 'shp2pgsql -s ' + str(getEPSG(
            prjFile)) + ' -c ' + args.input + ' sites_geoms_temp > ' + sqlFile
        print shp2psql
        logger.info(shp2psql)
        os.system(shp2psql)
    else:
        sqlFile = args.input
        for line in open(sqlFile, 'r').read().split('\n'):
            if line.count('CREATE TABLE'):
                if line.count('sites_geoms_temp') == 0:
                    msg = "The table in the SQL file must be named sites_geom_temp. Replace the table name to sites_geoms_temp!"
                    print msg
                    logger.error(msg)
                    return

    # clean the temp table if exsisted
    clean_temp_table(args)

    # load the table sites_geoms_temp from the SQL file ot the DB
    success_loading = utils.load_sql_file(cursor, sqlFile)

    if args.input.endswith('shp'):
        os.system('rm -rf ' + sqlFile)

    if success_loading:

        # check if the SITES table is empty, then change the type of the geom field
        update_geom_col_type(cursor)

        # find the lists of new IDs and list of overlapping IDs
        no_item_well_temp_ids, both_in_item_and_temp_ids = find_lists(cursor)

        # insert the object geometries per site for the sites not in item, but in the sites_geoms_temp table
        update_geometries(no_item_well_temp_ids, True)

        # update the union of object geometries per site for the sites both in item and sites_geoms_temp table
        update_geometries(both_in_item_and_temp_ids, False)

        # clean the temp table
        clean_temp_table(args)

    # close the conection to the DB
    utils.closeConnectionDB(connection, cursor)

    # measure elapsed time
    elapsed_time = time.time() - t0
    msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (
        elapsed_time, logname)
    print(msg)
    logger.info(msg)

    return
def run(opts):
    # Define logging and start logging
    logname = os.path.basename(opts.config) + '.log'
    utils.start_logging(filename=logname, level=opts.log)
    localtime = utils.getCurrentTimeAsAscii()
    t0 = time.time()
    msg = os.path.basename(__file__) + ' script starts at %s.' % localtime
    print msg
    logging.info(msg)

    # Parse xml configuration file
    data = ET.parse(opts.config).getroot()

    # Database connection
    connection, cursor = utils.connectToDB(opts.dbname, opts.dbuser, opts.dbpass, opts.dbhost, opts.dbport)

    # get offset and srid of the background defined in the conf file
    (bgOffsetX, bgOffsetY, bgOffsetZ, bgSRID) = getBackgroundOffset(data, cursor)
    bgOffset = (bgOffsetX, bgOffsetY, bgOffsetZ)
    # Process updates
    updateAOS = data.xpath('//*[@status="updated"]')
    # loop over all updates found in the xml config file
    for ao in updateAOS:
        #(aoType, proto, uniqueName, siteId, activeobjectId, objectNumber) = \
        #getDetails(ao)
        uniqueName = ao.get('uniqueName')
        (aoType, itemId, rawDataItemId, objectId, labelName) = utils.decodeOSGActiveObjectUniqueName(cursor, uniqueName)
        if aoType == None:
            msg = 'Ignoring operation on %s. Could not decode uniqueName' % uniqueName
            print msg
            logging.warning(msg)
        else:
            # check if the object is in the DB
            osgLocationId = getOSGLocationId(cursor, aoType, labelName, itemId, objectId, rawDataItemId)
            if osgLocationId != None:
                # update the DB with the information in the xml config file
                if aoType == utils.AO_TYPE_LAB:
                    # Some other params may have changed in the label
                    msg = 'Updating label %s' % labelName
                    print msg
                    logging.info(msg)
                    deleteOSG(cursor, aoType, labelName)
                    osgLocationId = insertOSGLocation(cursor, ao.getchildren()[0], bgSRID, bgOffset)
                    insertDB(cursor, 'OSG_LABEL', ('osg_label_name', 'osg_location_id', 'text', 'red', 'green', 'blue', 'rotate_screen', 'outline', 'font'),
                                 (labelName, osgLocationId, ao.get('labelText'),
                                  ao.get('labelColorRed'), ao.get('labelColorGreen'), ao.get('labelColorBlue'),
                                  ao.get('labelRotateScreen'), ao.get('outline'), ao.get('Font')))
                else:
                     msg = 'Updating OSG location %d from %s' % (osgLocationId, uniqueName)
                     print msg
                     logging.info(msg)
                     updateOSGLocation(cursor, osgLocationId, ao.getchildren()[0], bgSRID, bgOffset)
            else:
                if aoType == utils.AO_TYPE_OBJ:
                    # It is a bounding that has been moved and it is not currently in the DB. Let's insert it!
                    msg = 'Insert missing OSG_ITEM_OBJECT (%s,%s)' % (itemId, objectId)
                    print msg
                    logging.info(msg)
                    osgLocationId = insertOSGLocation(cursor, ao.getchildren()[0], bgSRID, bgOffset)
                    insertDB(cursor, 'OSG_ITEM_OBJECT', ('item_id', 'object_number', 'osg_location_id'), (itemId, objectId, osgLocationId))
                else:
                    # log error if object is not found in DB
                    msg = 'Update not possible. OSG_ITEM_OBJECT from %s not found in DB' % uniqueName
                    print msg
                    logging.error(msg)

    # Process deletes (only possible for site objects)
    deleteAOS = data.xpath('//*[@status="deleted"]')
    # loop over all deletes found in the xml config file
    for ao in deleteAOS:
        uniqueName = ao.get('uniqueName')
        (aoType, itemId, rawDataItemId, objectId, labelName) =  utils.decodeOSGActiveObjectUniqueName(cursor, uniqueName)
        if aoType==None:
            msg = 'Ignoring operation on %s. Could not decode uniqueName' % uniqueName
            print msg
            logging.warning(msg)
        else:
            if aoType in (utils.AO_TYPE_OBJ, utils.AO_TYPE_LAB):
                # check if the object is in the DB
                osgLocationId = getOSGLocationId(cursor, aoType, labelName, itemId, objectId)
                if osgLocationId != None:
                    # Delete the OSG-related entries from the DB
                    msg = 'Deleting OSG related entries for %s' % uniqueName
                    print msg
                    logging.info(msg)
                    deleteOSG(cursor, aoType, labelName, itemId, objectId)
                else:
                    # log error if object is not found in DB
                    msg = 'Not possible to delete. OSG_ITEM_OBJECT from %s not found in DB. Maybe already deleted?' % uniqueName
                    print msg
                    logging.warning(msg)
            else:
                # log error if trying to delete a non-site object
                msg = 'Ignoring delete in %s: Meshes, pictures and PCs can not be deleted' % uniqueName
                print msg
                logging.error(msg)

    # Process new objects (only possible for site objects)
    newAOS = data.xpath('//*[@status="new"]')
    # loop over all new objects found in the xml config file
    for ao in newAOS:
        uniqueName = ao.get('uniqueName')
        (aoType, itemId, rawDataItemId, objectId, labelName) = utils.decodeOSGActiveObjectUniqueName(cursor, uniqueName)
        if aoType==None:
            msg = 'Ignoring operation on %s. Could not decode uniqueName' % uniqueName
            print msg
            logging.warning(msg)
        else:
            if aoType in (utils.AO_TYPE_OBJ, utils.AO_TYPE_LAB):
                # check if the object is in the DBbesafe i
                osgLocationId = getOSGLocationId(cursor, aoType, labelName, itemId, objectId)
                if osgLocationId != None:
                    # log error if the new object is already in the DB
                    msg = 'OSG_ITEM_OBJECT from %s already in DB. Ignoring add' % uniqueName
                    print msg
                    logging.warning(msg)
                else:
                    osgLocationId = insertOSGLocation(cursor, ao.getchildren()[0], bgSRID, bgOffset)
                    if aoType == utils.AO_TYPE_OBJ:
                        # add object to the DB
                        if objectId == utils.ITEM_OBJECT_NUMBER_ITEM:
                            msg = 'Adding missing ITEM %s' % objectId
                            print msg
                            logging.info(msg)
                            insertDB(cursor, 'ITEM', ('item_id', 'background'), (itemId, False))
                        msg = 'Adding ITEM_OBJECT (%d,%d)' % (itemId, objectId)
                        print msg
                        logging.info(msg)
                        insertDB(cursor, 'ITEM_OBJECT', ('item_id', 'object_number'), (itemId, objectId))
                        insertDB(cursor, 'OSG_ITEM_OBJECT', ('item_id', 'object_number', 'osg_location_id'), (itemId, objectId, osgLocationId))
                    else:                        
                        # add label to the DB
                        msg = 'Adding label %s' % uniqueName
                        print msg
                        logging.info(msg)
                        insertDB(cursor, 'OSG_LABEL', ('osg_label_name', 'osg_location_id', 'text', 'red', 'green', 'blue', 'rotate_screen', 'outline', 'font'), 
                                 (labelName, osgLocationId, ao.get('labelText'), 
                                  ao.get('labelColorRed'), ao.get('labelColorGreen'), ao.get('labelColorBlue'), 
                                  ao.get('labelRotateScreen'), ao.get('outline'), ao.get('Font')))
            else:
                # log error if trying to add a non-site object
                msg = 'Ignoring new in %s: Meshes, pictures and PCs can not be added' % uniqueName
                print msg
                logging.error(msg)

    # Process the cameras (the DEF CAMs are added for all objects and can not be deleted or updated)
    cameras = data.xpath('//camera[not(starts-with(@name,"' + utils.DEFAULT_CAMERA_PREFIX + '"))]')
    # Delete all previous cameras and related entries
    deleteCameras(cursor)
    # add all cameras
    for camera in cameras:
        name = camera.get('name')
        itemId = None
        if name.count(utils.USER_CAMERA):
            try:
                itemId = int(name[name.index(utils.USER_CAMERA) + len(utils.USER_CAMERA):].split('_')[0])
            except:
                msg = 'Incorrect name %s for a ITEM camera' % name
                print msg
                logging.warn(msg)
                itemId = None
        msg = 'Adding camera %s' % name
        print msg
        logging.info(msg)
        osgLocationId = insertOSGLocation(cursor, camera, bgSRID, bgOffset)
        insertDB(cursor, 'OSG_CAMERA', ('osg_camera_name', 'osg_location_id'), (name, osgLocationId))
        if itemId != None:
            insertDB(cursor, 'OSG_ITEM_CAMERA', ('item_id', 'osg_camera_name'), (itemId, name))
    # close DB connection
    utils.closeConnectionDB(connection, cursor)
    
    elapsed_time = time.time() - t0
    msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (elapsed_time, logname)
    print(msg)
    logging.info(msg)
Beispiel #10
0
def run(args):    
    global logger
    global offsetX
    global offsetY
    global offsetZ
    
    logname = os.path.basename(args.output) + '.log'
    logger = utils.start_logging(filename=logname, level=args.log)

    # start logging    
    localtime = utils.getCurrentTimeAsAscii()
    msg = __file__ + ' script logging start at %s'% localtime
    print msg
    logger.info(msg)
    t0 = time.time()
       
    # connect to DB and get a cursor   
    connection, cursor = utils.connectToDB(args.dbname, args.dbuser, args.dbpass, args.dbhost)
    
    # We assume the osg location is relative
    # We need to make it absolute by adding the offset of the background with srid as provided
    query = """
SELECT C.offset_x, C.offset_y, C.offset_z 
FROM raw_data_item A, raw_data_item_pc B, osg_data_item_pc_background C 
WHERE A.raw_data_item_id = B.raw_data_item_id AND 
      B.raw_data_item_id = C.raw_data_item_id AND 
      A.srid = %s"""
    queryArgs = [args.srid,]
    backgroundOffsets, num_backgrounds = utils.fetchDataFromDB(cursor, query,  queryArgs)
    if num_backgrounds:
        (offsetX,offsetY,offsetZ) = backgroundOffsets[0]
        
    # get all items         
    query = 'SELECT item_id, ST_ASGEOJSON(geom), min_z, max_z FROM item WHERE NOT background ORDER BY item_id'
    sites, num_sites = utils.fetchDataFromDB(cursor, query)
    
    data = []
    
    for (itemId, itemGeom, minz, maxz) in sites:
        # Generate the JSON data for this item
        dataSite = {}
        dataSite["id"] = itemId
        if itemGeom != None:
            dataSite["footprint"] = json.loads(itemGeom)['coordinates']
            dataSite["footprint_altitude"] = [minz,maxz]
        
        addThumbnail(cursor, itemId, dataSite)
        addSiteMetaData(cursor, itemId, dataSite)
        addPointCloud(cursor, itemId, dataSite, args.srid)
        addMeshes(cursor, itemId, dataSite, args.srid)
        addObjectsMetaData(cursor, itemId, dataSite, args.srid)
        
        data.append(dataSite)
        
    # close the Db connection
    utils.closeConnectionDB(connection, cursor)    

    # save the data into JSON file
    save2JSON(args.output, data)
    
    elapsed_time = time.time() - t0
    msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (elapsed_time, logname)
    print(msg)
    logger.info(msg)
Beispiel #11
0
def run(args):
    global logger
    global offsetX
    global offsetY
    global offsetZ

    logname = os.path.basename(args.output) + '.log'
    logger = utils.start_logging(filename=logname, level=args.log)

    # start logging
    localtime = utils.getCurrentTimeAsAscii()
    msg = __file__ + ' script logging start at %s' % localtime
    print msg
    logger.info(msg)
    t0 = time.time()

    # connect to DB and get a cursor
    connection, cursor = utils.connectToDB(args.dbname, args.dbuser,
                                           args.dbpass, args.dbhost)

    # We assume the osg location is relative
    # We need to make it absolute by adding the offset of the background with srid as provided
    query = """
SELECT C.offset_x, C.offset_y, C.offset_z 
FROM raw_data_item A, raw_data_item_pc B, osg_data_item_pc_background C 
WHERE A.raw_data_item_id = B.raw_data_item_id AND 
      B.raw_data_item_id = C.raw_data_item_id AND 
      A.srid = %s"""
    queryArgs = [
        args.srid,
    ]
    backgroundOffsets, num_backgrounds = utils.fetchDataFromDB(
        cursor, query, queryArgs)
    if num_backgrounds:
        (offsetX, offsetY, offsetZ) = backgroundOffsets[0]

    # get all items
    query = 'SELECT item_id, ST_ASGEOJSON(geom), min_z, max_z FROM item WHERE NOT background ORDER BY item_id'
    sites, num_sites = utils.fetchDataFromDB(cursor, query)

    data = []

    for (itemId, itemGeom, minz, maxz) in sites:
        # Generate the JSON data for this item
        dataSite = {}
        dataSite["id"] = itemId
        if itemGeom != None:
            dataSite["footprint"] = json.loads(itemGeom)['coordinates']
            dataSite["footprint_altitude"] = [minz, maxz]

        addThumbnail(cursor, itemId, dataSite)
        addSiteMetaData(cursor, itemId, dataSite)
        addPointCloud(cursor, itemId, dataSite, args.srid)
        addMeshes(cursor, itemId, dataSite, args.srid)
        addObjectsMetaData(cursor, itemId, dataSite, args.srid)

        data.append(dataSite)

    # close the Db connection
    utils.closeConnectionDB(connection, cursor)

    # save the data into JSON file
    save2JSON(args.output, data)

    elapsed_time = time.time() - t0
    msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (
        elapsed_time, logname)
    print(msg)
    logger.info(msg)
Beispiel #12
0
                (returnOk, vertices, minZ, maxZ, avgZ, numpoints) = GetItemLAS.create_cut_out(cursor, lasFolder, outputFile, itemId, BUFFER, CONCAVE)
            
                # We do not need the cutout
                if os.path.isfile(outputFile):
                    os.remove(outputFile)
            
                if returnOk:
                    logging.info('PROC%d: Updating DB minimum and maximum Z for item %d' % (procIndex,itemId))
                    utils.dbExecute(cursor, "UPDATE ITEM SET (min_z,max_z) = (%s,%s) WHERE item_id = %s", 
                                    [minZ, maxZ, itemId])
            except Exception, e:
                connection.rollback()
                logging.error('PROC%d: Can not update minimum and maximum Z for item %d' % (procIndex,itemId))
                logging.error(e)
            resultsQueue.put((procIndex, itemId))   
    utils.closeConnectionDB(connection, cursor)

def run(args): 
    # start logging
    logname = os.path.splitext(os.path.basename(__file__))[0] + '.log'
    utils.start_logging(filename=logname, level=utils.DEFAULT_LOG_LEVEL)
    localtime = utils.getCurrentTimeAsAscii()
    t0 = time.time()
    msg = os.path.basename(__file__) + ' script starts at %s.' %localtime
    print msg
    logging.info(msg)
    
    # connect to the DB
    connection, cursor = utils.connectToDB(args.dbname, args.dbuser, args.dbpass, args.dbhost, args.dbport) 
    
    itemIds = []