コード例 #1
0
ファイル: AddRawDataItem.py プロジェクト: NLeSC/PattyData
def run(opts):
    # set logging level
    global logger
    #logname = os.path.basename(__file__) + '.log'
    logname = os.path.splitext(os.path.basename(__file__))[0] + '.log'
    logger = utils.start_logging(filename=logname, level=opts.log)
    localtime = utils.getCurrentTimeAsAscii()
    t0 = time.time()
    msg = os.path.basename(__file__) + ' script starts at %s.' % localtime
    print msg
    logger.info(msg)
    
    opts.file = opts.file.rstrip('/')
    
    # check if all required options are specified
    check_required_options(opts)
    # check if the required directory structure exists
    check_directory_structure(opts.data)
    # check input data
    check_input_data(opts)
    # define target directory
    TARGETDIR = define_create_target_dir(opts)
    # copy the data to the target directory
    copy_data(opts, TARGETDIR)
    
    elapsed_time = time.time() - t0
    msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (elapsed_time, logname)
    print(msg)
    logger.info(msg)
コード例 #2
0
def run(opts):
    # set logging level
    global logger
    #logname = os.path.basename(__file__) + '.log'
    logname = os.path.splitext(os.path.basename(__file__))[0] + '.log'
    logger = utils.start_logging(filename=logname, level=opts.log)
    localtime = utils.getCurrentTimeAsAscii()
    t0 = time.time()
    msg = os.path.basename(__file__) + ' script starts at %s.' % localtime
    print msg
    logger.info(msg)

    opts.file = opts.file.rstrip('/')

    # check if all required options are specified
    check_required_options(opts)
    # check if the required directory structure exists
    check_directory_structure(opts.data)
    # check input data
    check_input_data(opts)
    # define target directory
    TARGETDIR = define_create_target_dir(opts)
    # copy the data to the target directory
    copy_data(opts, TARGETDIR)

    elapsed_time = time.time() - t0
    msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (
        elapsed_time, logname)
    print(msg)
    logger.info(msg)
コード例 #3
0
def run(args):
    logname = os.path.basename(__file__) + '.log'
    utils.start_logging(filename=logname, level=args.log)
    localtime = utils.getCurrentTimeAsAscii()
    t0 = time.time()
    msg = os.path.basename(__file__) + ' script starts at %s.' % localtime
    print msg
    logging.info(msg)

    connection, cursor = utils.connectToDB(args.dbname, args.dbuser, args.dbpass, args.dbhost, args.dbport) 
    
    (returnOk, vertices, minZ, maxZ, avgZ, numpoints) = create_cut_out(cursor, args.las, args.output, args.itemid, args.buffer, args.concave)
    
    if returnOk:
        # Create CSV with vertices of footprint
        footoutput = args.output + '_footprint.csv'
        logging.info('Creating CSV %s with vertices of concave hull of footprint' % footoutput)
        fpOutput = open(footoutput, 'w')
        for point in vertices:
            point.append(str(avgZ))
            fpOutput.write(','.join(point) + '\n')
        fpOutput.close()
        
        logging.info('#Points: %d' % numpoints)
        
    elapsed_time = time.time() - t0
    msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (elapsed_time, logname)
    print(msg)
    logging.info(msg) 
コード例 #4
0
def run(opts):
    # Start logging
    logname = os.path.splitext(os.path.basename(__file__))[0] + '.log'
    utils.start_logging(filename=logname, level=opts.log)
    localtime = utils.getCurrentTimeAsAscii()
    t0 = time.time()
    msg = os.path.basename(__file__) + ' script starts at %s.' % localtime
    print msg
    logging.info(msg)
    # database connection
    connection, cursor = utils.connectToDB(opts.dbname, opts.dbuser,
                                           opts.dbpass, opts.dbhost,
                                           opts.dbport)

    if opts.itemid == '?':
        utils.listRawDataItems(cursor)
        return
    elif opts.itemid == '' or opts.itemid == '!':
        query = """
SELECT raw_data_item_id,abs_path,background 
FROM RAW_DATA_ITEM JOIN ITEM USING (item_id) JOIN RAW_DATA_ITEM_PC USING (raw_data_item_id) 
WHERE raw_data_item_id NOT IN (
          SELECT raw_data_item_id FROM POTREE_DATA_ITEM_PC)"""
        # Get the list of items that are not converted yet (we sort by background to have the background converted first)
        raw_data_items, num_raw_data_items = utils.fetchDataFromDB(
            cursor, query)
        for (rawDataItemId, absPath, isBackground) in raw_data_items:
            if opts.itemid == '':
                levels = getNumLevels(opts, isBackground)
                createPOTree(cursor, rawDataItemId, opts.potreeDir, levels)
            else:
                m = '\t'.join((str(rawDataItemId), absPath))
                print m
                logging.info(m)

    else:
        for rawDataItemId in opts.itemid.split(','):
            rows, num_rows = utils.fetchDataFromDB(
                cursor,
                'SELECT background FROM RAW_DATA_ITEM JOIN ITEM USING (item_id) WHERE raw_data_item_id = %s',
                [int(rawDataItemId)])
            if num_rows == 0:
                logging.error('There is not a raw data item with id %d' %
                              int(rawDataItemId))
                return
            isBackground = rows[0][0]
            levels = getNumLevels(opts, isBackground)
            createPOTree(cursor, int(rawDataItemId), opts.potreeDir, levels)

    # close DB connection
    utils.closeConnectionDB(connection, cursor)

    elapsed_time = time.time() - t0
    msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (
        elapsed_time, logname)
    print(msg)
    logging.info(msg)
コード例 #5
0
ファイル: UpdateDBItemZ.py プロジェクト: NLeSC/PattyData
def run(args): 
    # start logging
    logname = os.path.splitext(os.path.basename(__file__))[0] + '.log'
    utils.start_logging(filename=logname, level=utils.DEFAULT_LOG_LEVEL)
    localtime = utils.getCurrentTimeAsAscii()
    t0 = time.time()
    msg = os.path.basename(__file__) + ' script starts at %s.' %localtime
    print msg
    logging.info(msg)
    
    # connect to the DB
    connection, cursor = utils.connectToDB(args.dbname, args.dbuser, args.dbpass, args.dbhost, args.dbport) 
    
    itemIds = []
    
    if args.itemid == '':
        data,num = utils.fetchDataFromDB(cursor, 'SELECT item_id FROM ITEM WHERE NOT background')
        for (itemId,) in data:
            itemIds.append(itemId)
    else:
        itemIds = args.itemid.split(',')
        
    # close the conection to the DB
    utils.closeConnectionDB(connection, cursor)
    
    # Create queues
    itemsQueue = multiprocessing.Queue() # The queue of tasks (queries)
    resultsQueue = multiprocessing.Queue() # The queue of results
    
    for itemId in itemIds:
        itemsQueue.put(int(itemId))
    for i in range(args.cores): #we add as many None jobs as numUsers to tell them to terminate (queue is FIFO)
        itemsQueue.put(None)
    
    procs = []
    # We start numUsers users processes
    for i in range(args.cores):
        procs.append(multiprocessing.Process(target=runChild, 
            args=(i, itemsQueue, resultsQueue, args.las, args.dbname, args.dbuser, args.dbpass, args.dbhost, args.dbport)))
        procs[-1].start()
    
    for i in range(len(itemIds)):
        [procIndex, itemId] = resultsQueue.get()
    # wait for all users to finish their execution
    for i in range(args.cores):
        procs[i].join()
    
    # measure elapsed time
    elapsed_time = time.time() - t0    
    msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (elapsed_time, logname)
    print(msg)
    logging.info(msg)
コード例 #6
0
def run(opts):
    # Start logging
    #logname = os.path.basename(__file__) + '.log'
    logname = os.path.splitext(os.path.basename(__file__))[0] + '.log'
    utils.start_logging(filename=logname, level=opts.log)
    localtime = utils.getCurrentTimeAsAscii()
    t0 = time.time()
    msg = os.path.basename(__file__) + ' script starts at %s.' % localtime
    print msg
    logging.info(msg)

    # database connection
    connection, cursor = utils.connectToDB(opts.dbname, opts.dbuser,
                                           opts.dbpass, opts.dbhost,
                                           opts.dbport)

    if opts.itemid == '?':
        utils.listRawDataItems(cursor)
        return
    elif opts.itemid == '' or opts.itemid == '!':
        query = """
SELECT raw_data_item_id, abs_path 
FROM RAW_DATA_ITEM JOIN ITEM USING (item_id) 
WHERE NOT background AND raw_data_item_id NOT IN (
          SELECT raw_data_item_id FROM OSG_DATA_ITEM_PC_SITE 
          UNION 
          SELECT raw_data_item_id FROM OSG_DATA_ITEM_MESH 
          UNION 
          SELECT raw_data_item_id FROM OSG_DATA_ITEM_PICTURE)"""
        # Get the list of items that are not converted yet (we sort by background to have the background converted first)
        raw_data_items, num_raw_data_items = utils.fetchDataFromDB(
            cursor, query)
        for (rawDataItemId, absPath) in raw_data_items:
            if opts.itemid == '':
                createOSG(cursor, rawDataItemId, opts.osgDir)
            else:
                m = '\t'.join((str(rawDataItemId), absPath))
                print m
                logging.info(m)
    else:
        for rawDataItemId in opts.itemid.split(','):
            createOSG(cursor, int(rawDataItemId), opts.osgDir)

    # close DB connection
    utils.closeConnectionDB(connection, cursor)

    elapsed_time = time.time() - t0
    msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (
        elapsed_time, logname)
    print(msg)
    logging.info(msg)
コード例 #7
0
ファイル: GeneratePOTree.py プロジェクト: NLeSC/PattyData
def run(opts):
    # Start logging
    logname = os.path.splitext(os.path.basename(__file__))[0] + '.log'
    utils.start_logging(filename=logname, level=opts.log)
    localtime = utils.getCurrentTimeAsAscii()
    t0 = time.time()
    msg = os.path.basename(__file__) + ' script starts at %s.' % localtime
    print msg
    logging.info(msg)
    # database connection
    connection, cursor = utils.connectToDB(opts.dbname, opts.dbuser,
                                           opts.dbpass, opts.dbhost,
                                           opts.dbport)
    
    if opts.itemid == '?':
        utils.listRawDataItems(cursor)
        return
    elif opts.itemid == '' or opts.itemid == '!':
        query = """
SELECT raw_data_item_id,abs_path,background 
FROM RAW_DATA_ITEM JOIN ITEM USING (item_id) JOIN RAW_DATA_ITEM_PC USING (raw_data_item_id) 
WHERE raw_data_item_id NOT IN (
          SELECT raw_data_item_id FROM POTREE_DATA_ITEM_PC)"""
        # Get the list of items that are not converted yet (we sort by background to have the background converted first)
        raw_data_items, num_raw_data_items = utils.fetchDataFromDB(cursor, query)
        for (rawDataItemId,absPath,isBackground) in raw_data_items:
            if opts.itemid == '' :
                levels = getNumLevels(opts, isBackground)
                createPOTree(cursor, rawDataItemId, opts.potreeDir, levels)
            else:
                m = '\t'.join((str(rawDataItemId),absPath))
                print m
                logging.info(m)
                
    else:
        for rawDataItemId in opts.itemid.split(','):
            rows,num_rows = utils.fetchDataFromDB(cursor, 'SELECT background FROM RAW_DATA_ITEM JOIN ITEM USING (item_id) WHERE raw_data_item_id = %s', [int(rawDataItemId)])
            if num_rows == 0:
                logging.error('There is not a raw data item with id %d' % int(rawDataItemId))
                return
            isBackground = rows[0][0]
            levels = getNumLevels(opts, isBackground)    
            createPOTree(cursor, int(rawDataItemId), opts.potreeDir, levels)

    # close DB connection
    utils.closeConnectionDB(connection, cursor)

    elapsed_time = time.time() - t0
    msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (elapsed_time, logname)
    print(msg)
    logging.info(msg)
コード例 #8
0
ファイル: GenerateOSG.py プロジェクト: NLeSC/PattyData
def run(opts):
    # Start logging
    #logname = os.path.basename(__file__) + '.log'
    logname = os.path.splitext(os.path.basename(__file__))[0] + '.log'
    utils.start_logging(filename=logname, level=opts.log)
    localtime = utils.getCurrentTimeAsAscii()
    t0 = time.time()
    msg = os.path.basename(__file__) + ' script starts at %s.' % localtime
    print msg
    logging.info(msg)

    # database connection
    connection, cursor = utils.connectToDB(opts.dbname, opts.dbuser,
                                           opts.dbpass, opts.dbhost,
                                           opts.dbport)
    
    if opts.itemid == '?':
        utils.listRawDataItems(cursor)
        return
    elif opts.itemid == '' or opts.itemid == '!':
        query = """
SELECT raw_data_item_id, abs_path 
FROM RAW_DATA_ITEM JOIN ITEM USING (item_id) 
WHERE NOT background AND raw_data_item_id NOT IN (
          SELECT raw_data_item_id FROM OSG_DATA_ITEM_PC_SITE 
          UNION 
          SELECT raw_data_item_id FROM OSG_DATA_ITEM_MESH 
          UNION 
          SELECT raw_data_item_id FROM OSG_DATA_ITEM_PICTURE)"""
        # Get the list of items that are not converted yet (we sort by background to have the background converted first)
        raw_data_items, num_raw_data_items = utils.fetchDataFromDB(cursor, query)
        for (rawDataItemId, absPath) in raw_data_items:
            if opts.itemid == '':
                createOSG(cursor, rawDataItemId, opts.osgDir)
            else:
                m = '\t'.join((str(rawDataItemId),absPath))
                print m
                logging.info(m)
    else:
        for rawDataItemId in opts.itemid.split(','):
            createOSG(cursor, int(rawDataItemId), opts.osgDir)    

    # close DB connection
    utils.closeConnectionDB(connection, cursor)
    
    elapsed_time = time.time() - t0
    msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (elapsed_time, logname)
    print(msg)
    logging.info(msg)
コード例 #9
0
ファイル: CreateDB.py プロジェクト: mohitsahunitrr/PattyData
def run(opts):
    # Set logging
    #logname = os.path.splitext(os.path.basename(opts.sql))[0] + '.log'
    logname = os.path.basename(opts.sql) + '.log'
    utils.start_logging(filename=logname, level=opts.log)
   
    localtime = utils.getCurrentTimeAsAscii()
    t0 = time.time()
    msg = os.path.basename(__file__) + ' script starts at %s.' %localtime
    print msg
    logging.info(msg)
    os.system('createdb ' + utils.postgresConnectString(opts.dbname, opts.dbuser, opts.dbpass, opts.dbhost, opts.dbport, True))
    
    connection, cursor = utils.connectToDB(opts.dbname, opts.dbuser, opts.dbpass, opts.dbhost, opts.dbport) 

    msg = 'Adding PostGIS extension'
    logging.info(msg)
    #print msg
    cursor.execute("CREATE EXTENSION POSTGIS")
    connection.commit()

    success_loading = utils.load_sql_file(cursor, opts.sql)
 
    msg = 'Granting relevant permissions' 
    logging.info(msg)
    #print msg

    if success_loading:    
        cursor.execute("select tablename from  pg_tables where schemaname = 'public'")
        tablesNames = cursor.fetchall()
        for (tableName,) in tablesNames:
            cursor.execute('GRANT SELECT ON ' + tableName + ' TO public')
    
        for tableName in ('ITEM', 'ITEM_OBJECT', 'OSG_LOCATION', 'OSG_LABEL', 'OSG_CAMERA', 'OSG_ITEM_CAMERA', 'OSG_ITEM_OBJECT'):
            cursor.execute( 'GRANT SELECT,INSERT,UPDATE,DELETE ON ' + tableName + ' TO public')
    
        connection.commit()
        connection.close()
    
    msg = 'Finished. Total elapsed time  %.02f seconds. See %s' % ((time.time() - t0), logname)
    logging.info(msg)
    print msg
コード例 #10
0
def run(args):
    # start logging
    logname = os.path.splitext(os.path.basename(__file__))[0] + '.log'
    utils.start_logging(filename=logname, level=utils.DEFAULT_LOG_LEVEL)
    localtime = utils.getCurrentTimeAsAscii()
    t0 = time.time()
    msg = os.path.basename(__file__) + ' script starts at %s.' % localtime
    print msg
    logging.info(msg)

    # connect to the DB
    connection, cursor = utils.connectToDB(args.dbname, args.dbuser,
                                           args.dbpass, args.dbhost,
                                           args.dbport)

    itemIds = []

    if args.itemid == '':
        data, num = utils.fetchDataFromDB(
            cursor, 'SELECT item_id FROM ITEM WHERE NOT background')
        for (itemId, ) in data:
            itemIds.append(itemId)
    else:
        itemIds = args.itemid.split(',')

    # close the conection to the DB
    utils.closeConnectionDB(connection, cursor)

    # Create queues
    itemsQueue = multiprocessing.Queue()  # The queue of tasks (queries)
    resultsQueue = multiprocessing.Queue()  # The queue of results

    for itemId in itemIds:
        itemsQueue.put(int(itemId))
    for i in range(
            args.cores
    ):  #we add as many None jobs as numUsers to tell them to terminate (queue is FIFO)
        itemsQueue.put(None)

    procs = []
    # We start numUsers users processes
    for i in range(args.cores):
        procs.append(
            multiprocessing.Process(
                target=runChild,
                args=(i, itemsQueue, resultsQueue, args.las, args.dbname,
                      args.dbuser, args.dbpass, args.dbhost, args.dbport)))
        procs[-1].start()

    for i in range(len(itemIds)):
        [procIndex, itemId] = resultsQueue.get()
    # wait for all users to finish their execution
    for i in range(args.cores):
        procs[i].join()

    # measure elapsed time
    elapsed_time = time.time() - t0
    msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (
        elapsed_time, logname)
    print(msg)
    logging.info(msg)
コード例 #11
0
def run(args):

    global logger
    global connection
    global cursor

    # start logging
    logname = os.path.basename(args.input) + '.log'
    logger = utils.start_logging(filename=logname,
                                 level=utils.DEFAULT_LOG_LEVEL)
    localtime = utils.getCurrentTimeAsAscii()
    t0 = time.time()
    msg = os.path.basename(__file__) + ' script starts at %s.' % localtime
    print msg
    logger.info(msg)

    if not os.path.isfile(args.input):
        msg = "Input file is not found!"
        print msg
        logger.error(msg)
        return

    # connect to the DB
    connection, cursor = utils.connectToDB(args.dbname, args.dbuser,
                                           args.dbpass, args.dbhost,
                                           args.dbport)

    if args.input.endswith('shp'):

        prjFile = args.input.replace('shp', 'prj')
        if not os.path.isfile(prjFile):
            msg = "Input PRJ file is not found!"
            print msg
            logger.error(msg)
            return

        sqlFile = os.path.basename(args.input).replace('shp', 'sql')
        shp2psql = 'shp2pgsql -s ' + str(getEPSG(
            prjFile)) + ' -c ' + args.input + ' sites_geoms_temp > ' + sqlFile
        print shp2psql
        logger.info(shp2psql)
        os.system(shp2psql)
    else:
        sqlFile = args.input
        for line in open(sqlFile, 'r').read().split('\n'):
            if line.count('CREATE TABLE'):
                if line.count('sites_geoms_temp') == 0:
                    msg = "The table in the SQL file must be named sites_geom_temp. Replace the table name to sites_geoms_temp!"
                    print msg
                    logger.error(msg)
                    return

    # clean the temp table if exsisted
    clean_temp_table(args)

    # load the table sites_geoms_temp from the SQL file ot the DB
    success_loading = utils.load_sql_file(cursor, sqlFile)

    if args.input.endswith('shp'):
        os.system('rm -rf ' + sqlFile)

    if success_loading:

        # check if the SITES table is empty, then change the type of the geom field
        update_geom_col_type(cursor)

        # find the lists of new IDs and list of overlapping IDs
        no_item_well_temp_ids, both_in_item_and_temp_ids = find_lists(cursor)

        # insert the object geometries per site for the sites not in item, but in the sites_geoms_temp table
        update_geometries(no_item_well_temp_ids, True)

        # update the union of object geometries per site for the sites both in item and sites_geoms_temp table
        update_geometries(both_in_item_and_temp_ids, False)

        # clean the temp table
        clean_temp_table(args)

    # close the conection to the DB
    utils.closeConnectionDB(connection, cursor)

    # measure elapsed time
    elapsed_time = time.time() - t0
    msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (
        elapsed_time, logname)
    print(msg)
    logger.info(msg)

    return
コード例 #12
0
ファイル: RemoveRawDataItem.py プロジェクト: NLeSC/PattyData
def run(args): 
    
    # set logging level
    global logger
    global connection
    global cursor
    
    logname = os.path.basename(__file__) + '.log'
    logger = utils.start_logging(filename=logname, level=args.log)
    localtime = utils.getCurrentTimeAsAscii()
    msg = os.path.basename(__file__) + ' script starts at %s.' % localtime
    print msg
    logger.info(msg)

     # start timer
    t0 = time.time()
    
    # connect to the DB
    connection, cursor = utils.connectToDB(args.dbname, args.dbuser, args.dbpass, args.dbhost, args.dbport) 

    if args.itemid == '?':
        utils.listRawDataItems(cursor)
        return
    else:
        for rawDataItemId in args.itemid.split(','):
            # fetch the abs_path
            abs_paths = fetch_abs_path(rawDataItemId)        
            msg = 'Abs path fetched: %s' % abs_paths
            print msg
            logger.info(msg)
 
            # fetch the potree abs_paths
            abs_potree_paths, num_potree = fetch_potree_abs_paths(rawDataItemId)
            msg = '%s abs potree paths fetched %s' %(num_potree, abs_potree_paths)
            print msg
            logger.info(msg)
    
            # fetch the nexus abs_paths
            abs_nexus_paths, num_nexus = fetch_nexus_abs_paths(rawDataItemId)
            msg = '%s abs nexus paths fetched %s' %(num_nexus, abs_nexus_paths)
            print msg
            logger.info(msg)
    
            # fetch the OSG abs_paths PC
            abs_osg_pc_paths, num_osg_pc = fetch_osg_abs_paths_pc(rawDataItemId)        
            msg = '%s abs OSG paths for PC fetched: %s' %(num_osg_pc, abs_osg_pc_paths)
            print msg
            logger.info(msg)    

            # fetch the OSG abs_paths mesh
            abs_osg_mesh_paths, num_osg_mesh = fetch_osg_abs_paths_mesh(rawDataItemId)        
            msg = '%s abs OSG paths for meshes fetched: %s' %(num_osg_mesh, abs_osg_mesh_paths)
            print msg
            logger.info(msg)    
    
            # fetch the OSG abs_paths picture
            abs_osg_picture_paths, num_osg_picture = fetch_osg_abs_paths_picture(rawDataItemId)        
            msg = '%s abs OSG paths for pictures fetched: %s' %(num_osg_picture, abs_osg_picture_paths)
            print msg
            logger.info(msg)
     
            # fetch the OSG abs_paths PC BG
            abs_osg_pc_bg_paths, num_osg_pc_bg = fetch_osg_abs_paths_pc_bg(rawDataItemId)        
            msg = '%s abs OSG paths for PC BG fetched: %s' %(num_osg_pc_bg, abs_osg_pc_bg_paths)
            print msg
            logger.info(msg)
    
            # remove the files related to the above absolute paths
            for abs_paths_to_remove in (abs_paths, abs_potree_paths, abs_nexus_paths, abs_osg_pc_paths, abs_osg_mesh_paths, abs_osg_picture_paths, abs_osg_pc_bg_paths):
                remove_data(abs_paths_to_remove)
    
            msg = 'Removed data locations related to raw data item %s (%s)!' % (rawDataItemId, abs_paths[0])
            print msg
            logger.info(msg)    

    # measure elapsed time
    elapsed_time = time.time() - t0
    msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (elapsed_time, logname)
    print(msg)
    logger.info(msg)
コード例 #13
0
def run(opts):
    global logger
    # Define logger and start logging
    #logname = os.path.basename(opts.output) + '.log'
    logname = os.path.splitext(os.path.basename(__file__))[0] + '.log'
    logger = utils.start_logging(filename=logname, level=opts.log)
    localtime = utils.getCurrentTimeAsAscii()
    t0 = time.time()
    msg = os.path.basename(__file__) + ' script starts at %s.' % localtime
    print msg
    logger.info(msg)

    if not opts.output.endswith(".conf.xml"):
        logger.error('The output file must end with .conf.xml')
        raise IOError('The output file must end with .conf.xml')

    # Create python postgres connection
    global cursor
    connection, cursor = utils.connectToDB(opts.dbname, opts.dbuser,
                                           opts.dbpass, opts.dbhost,
                                           opts.dbport)

    # Check that provided background is in DB
    query = """
SELECT OSG_DATA_ITEM_PC_BACKGROUND.abs_path,srid 
FROM OSG_DATA_ITEM_PC_BACKGROUND JOIN RAW_DATA_ITEM USING (raw_data_item_id)"""
    rows, num_rows = utils.fetchDataFromDB(cursor, query)
    backGroundAbsPath = None
    backgroundSRID = None
    for (bgAbsPath, bgSRID) in rows:
        if opts.background == os.path.basename(bgAbsPath):
            backGroundAbsPath = bgAbsPath
            backgroundSRID = bgSRID
    if backGroundAbsPath == None:
        errorMsg = 'Background ' + opts.background + ' is not found'
        logger.error(errorMsg)
        raise Exception(errorMsg)

    # Get the root object: the OSG configuration
    rootObject = viewer_conf_api.osgRCconfiguration()
    # set version
    rootObject.set_version("0.2")

    # Add all the different XML of the active objects
    # (we add distinct since the boundings will share XMLs)
    query = """
SELECT DISTINCT xml_abs_path 
FROM OSG_DATA_ITEM ORDER BY xml_abs_path"""
    rows, num_rows = utils.fetchDataFromDB(cursor, query)
    for (xmlPath, ) in rows:
        if xmlPath.count(opts.osg) == 0:
            logger.error('Mismatch between given OSG data directory ' +
                         'and DB content')
        rootObject.add_objectLibrary(
            viewer_conf_api.objectLibrary(
                url=os.path.relpath(xmlPath, opts.osg)))

    # Add the object library with the boundings
    rootObject.add_objectLibrary(
        viewer_conf_api.objectLibrary(url=utils.BOUNDINGS_XML_RELATIVE))

    # Add the cameras that are in the DB
    cameras = viewer_conf_api.cameras()
    query = """
SELECT osg_camera_name, srid, x, y, z, h, p, r 
FROM OSG_CAMERA JOIN OSG_LOCATION USING (osg_location_id)"""
    rows, num_rows = utils.fetchDataFromDB(cursor, query)
    for (name, srid, x, y, z, h, p, r) in rows:
        if (srid is not None) and (srid == bgSRID):
            x, y, z = getOSGPosition(x, y, z, srid)
        else:
            x, y, z = getOSGPosition(x, y, z)
        cameras.add_camera(
            viewer_conf_api.camera(name=name, x=x, y=y, z=z, h=h, p=p, r=r))
    # Add Default cameras for the items that have no camera in the DB
    query = """
SELECT 
    item_id, ST_SRID(geom), st_x(st_centroid(geom)), st_y(st_centroid(geom)), min_z + ((max_z - min_z) / 2) 
FROM ITEM WHERE NOT background AND geom IS NOT null AND item_id NOT IN (
    SELECT DISTINCT item_id FROM OSG_ITEM_CAMERA
) ORDER BY item_id"""
    rows, numitems = utils.fetchDataFromDB(cursor, query)
    for (itemId, srid, x, y, z) in rows:
        # only call getOSGPosition if [x,y,z] are not None
        # should item_id = -1 be added?
        if all(position is not None for position in [x, y, z]) and itemId > 0:
            if (srid is not None) and (srid == bgSRID):
                x, y, z = getOSGPosition(x, y, z, srid)
            else:
                x, y, z = getOSGPosition(x, y, z)
            cameras.add_camera(
                viewer_conf_api.camera(name=utils.DEFAULT_CAMERA_PREFIX +
                                       str(itemId),
                                       x=x,
                                       y=y,
                                       z=z))
    rootObject.set_cameras(cameras)

    # Add the XML content of the preferences
    rootObject.set_preferences(viewer_conf_api.parseString(DEFAULT_PREFENCES))

    attributes = viewer_conf_api.attributes()
    # Use generic method to fill all properties.
    # We need the name in the XML, the column name in the DB and
    # the table name in the DB
    for property in utils.ATTRIBUTES_ORDER:
        (cName, tName) = utils.ATTRIBUTES[property]
        elements = getattr(viewer_conf_api, property + 's')()
        # We need to call the columns and tables with extra "" because
        # they were created from the Access DB
        #        utils.dbExecute(cursor, 'SELECT "' + cName + '" FROM "' + tName + '"')
        utils.dbExecute(cursor, 'SELECT ' + cName + ' FROM ' + tName)

        for (element, ) in cursor:
            getattr(elements,
                    'add_' + property)(getattr(viewer_conf_api,
                                               property)(name=element))
        getattr(attributes, 'set_' + property + 's')(elements)
    rootObject.set_attributes(attributes)
    # Add all the static objects, i.e. the OSG from the background

    # Add the static object for the background
    staticObjects = viewer_conf_api.staticObjects()
    staticObjects.add_staticObject(
        viewer_conf_api.staticObject(url=os.path.relpath(
            glob.glob(backGroundAbsPath + '/' + utils.OSG_DATA_PREFIX +
                      '.osgb')[0], opts.osg)))
    # Add hardcode DOME
    staticObjects.add_staticObject(
        viewer_conf_api.staticObject(url=utils.DOMES_OSG_RELATIVE))
    rootObject.set_staticObjects(staticObjects)

    # Add the 5 different layers of active objects
    activeObjects = viewer_conf_api.activeObjects()
    # First we add points, meshes and pcitures which are related to
    # the active_objects_sites
    layersData = [('points', 'OSG_DATA_ITEM_PC_SITE', utils.AO_TYPE_PC),
                  ('photos', 'OSG_DATA_ITEM_PICTURE', utils.AO_TYPE_PIC),
                  ('meshes', 'OSG_DATA_ITEM_MESH', utils.AO_TYPE_MESH)]

    for (layerName, tableName, inType) in layersData:
        layer = viewer_conf_api.layer(name=layerName)

        query = """
SELECT item_id, raw_data_item_id, OSG_LOCATION.srid, x, y, z, xs, ys, zs, h, p, r, cast_shadow 
FROM """ + tableName + """ JOIN OSG_DATA_ITEM USING (osg_data_item_id) 
                           JOIN OSG_LOCATION  USING (osg_location_id) 
                           JOIN RAW_DATA_ITEM USING (raw_data_item_id) 
ORDER BY item_id"""
        rows, numitems = utils.fetchDataFromDB(cursor, query)
        for (itemId, rawDataItemId, srid, x, y, z, xs, ys, zs, h, p, r,
             castShadow) in rows:
            # only call getOSGPosition if [x,y,z] are not None
            if all(position is not None for position in [x, y, z]):
                if (srid is not None) and (srid == bgSRID):
                    x, y, z = getOSGPosition(x, y, z, srid)
                else:
                    x, y, z = getOSGPosition(x, y, z)
            uniqueName = utils.codeOSGActiveObjectUniqueName(
                cursor, inType, rawDataItemId)
            activeObject = viewer_conf_api.activeObject(prototype=uniqueName,
                                                        uniqueName=uniqueName)
            setting = viewer_conf_api.setting(
                x=x,
                y=y,
                z=z,
                xs=xs,
                ys=ys,
                zs=zs,
                h=h,
                p=p,
                r=r,
                castShadow=(1 if castShadow else 0))
            activeObject.set_setting(setting)
            layer.add_activeObject(activeObject)
        activeObjects.add_layer(layer)

    # Add the boundings
    layer = viewer_conf_api.layer(name='boundings')
    # We first add the boundings that are currently in the DB
    query = """
SELECT item_id, object_number, x, y, z, xs, ys, zs, h, p, r, OSG_LOCATION.cast_shadow, srid 
FROM OSG_ITEM_OBJECT JOIN OSG_LOCATION USING (osg_location_id) 
ORDER BY item_id,object_number"""
    osgItemObjects, numOsgItemObjects = utils.fetchDataFromDB(cursor, query)
    # osgItemObjects is (itemId, objectNumber, x, y, z, xs, ys, zs, h, p, r, castShadow, srid)
    # Now we add Default OSG data items for the objects that are not in OSG_ITEM_OBJECT table
    query = """
SELECT item_id,object_number 
FROM item_object 
WHERE (item_id,object_number) NOT IN (SELECT item_id,object_number FROM OSG_ITEM_OBJECT)
ORDER BY item_id,object_number"""
    objects, num_objects = utils.fetchDataFromDB(cursor, query)
    for (itemId, objectNumber) in objects:
        srid = None
        (x, y, z) = (0, 0, 0)
        (xs, ys, zs) = (1, 1, 1)
        query = """
SELECT ST_SRID(geom), st_x(st_centroid(geom)), st_y(st_centroid(geom)), min_z + ((max_z - min_z) / 2), 
       st_xmax(geom)-st_xmin(geom) as dx, st_ymax(geom)-st_ymin(geom) as dy, (max_z - min_z) as dz 
FROM ITEM 
WHERE item_id = %s and geom is not %s"""
        queryArgs = [itemId, None]
        footprints, num_footprints = utils.fetchDataFromDB(
            cursor, query, queryArgs)
        if num_footprints:
            (srid, x, y, z, xs, ys, zs) = footprints[0]
            if xs == 0: xs = 1
            if ys == 0: ys = 1
            if zs == 0: zs = 1
        osgItemObjects.append(
            [itemId, objectNumber, x, y, z, xs, ys, zs, 0, 0, 0, False, srid])
    # Now let's add them to the XML
    for (itemId, objectNumber, x, y, z, xs, ys, zs, h, p, r, castShadow,
         srid) in osgItemObjects:
        # only call getOSGPosition if [x,y,z] are not None
        if all(position is not None for position in [x, y, z]) and itemId > 0:
            if (srid is not None) and (srid == bgSRID):
                x, y, z = getOSGPosition(x, y, z, srid)
            else:
                x, y, z = getOSGPosition(x, y, z)
            uniqueName = utils.codeOSGActiveObjectUniqueName(
                cursor,
                utils.AO_TYPE_OBJ,
                itemId=itemId,
                objectId=objectNumber)
            proto = "Bounding Box"
            activeObject = viewer_conf_api.activeObject(prototype=proto,
                                                        uniqueName=uniqueName)
            setting = viewer_conf_api.setting(
                x=x,
                y=y,
                z=z,
                xs=xs,
                ys=ys,
                zs=zs,
                h=h,
                p=p,
                r=r,
                castShadow=(1 if castShadow else 0))
            activeObject.set_setting(setting)
            layer.add_activeObject(activeObject)
    activeObjects.add_layer(layer)

    # Add the labels
    layer = viewer_conf_api.layer(name='labels')
    utils.dbExecute(
        cursor, 'SELECT osg_label_name, text, red, green, blue, ' +
        'rotate_screen, outline, font, srid, x, y, z, xs, ys, zs, h, ' +
        'p, r, cast_shadow FROM OSG_LABEL INNER JOIN ' +
        'OSG_LOCATION ON OSG_LABEL.osg_location_id=' +
        'OSG_LOCATION.osg_location_id')
    rows = cursor.fetchall()
    for (name, text, red, green, blue, rotatescreen, outline, font, srid, x, y,
         z, xs, ys, zs, h, p, r, castShadow) in rows:
        proto = "labelPrototype"
        uniqueName = utils.codeOSGActiveObjectUniqueName(cursor,
                                                         utils.AO_TYPE_LAB,
                                                         labelName=name)
        if (srid is not None) and (srid == bgSRID):
            x, y, z = getOSGPosition(x, y, z, srid)
        else:
            x, y, z = getOSGPosition(x, y, z)
        activeObject = viewer_conf_api.activeObject(
            prototype=proto,
            uniqueName=uniqueName,
            labelText=text,
            labelColorRed=red,
            labelColorGreen=green,
            labelColorBlue=blue,
            labelRotateScreen=rotatescreen,
            outline=outline,
            Font=font)
        setting = viewer_conf_api.setting(x=x,
                                          y=y,
                                          z=z,
                                          xs=xs,
                                          ys=ys,
                                          zs=zs,
                                          h=h,
                                          p=p,
                                          r=r,
                                          castShadow=(1 if castShadow else 0))
        activeObject.set_setting(setting)
        layer.add_activeObject(activeObject)
    activeObjects.add_layer(layer)

    rootObject.set_activeObjects(activeObjects)

    # Create the XML
    rootObject.export(open(opts.output, 'w'), 0)

    elapsed_time = time.time() - t0
    msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (
        elapsed_time, logname)
    print(msg)
    logger.info(msg)
コード例 #14
0
def run(args):

    # set logging level
    global logger
    global connection
    global cursor

    logname = os.path.basename(__file__) + '.log'
    logger = utils.start_logging(filename=logname, level=args.log)
    localtime = utils.getCurrentTimeAsAscii()
    msg = os.path.basename(__file__) + ' script starts at %s.' % localtime
    print msg
    logger.info(msg)

    # start timer
    t0 = time.time()

    # connect to the DB
    connection, cursor = utils.connectToDB(args.dbname, args.dbuser,
                                           args.dbpass, args.dbhost,
                                           args.dbport)

    if args.itemid == '?':
        utils.listRawDataItems(cursor)
        return
    else:
        for rawDataItemId in args.itemid.split(','):
            # fetch the abs_path
            abs_paths = fetch_abs_path(rawDataItemId)
            msg = 'Abs path fetched: %s' % abs_paths
            print msg
            logger.info(msg)

            # fetch the potree abs_paths
            abs_potree_paths, num_potree = fetch_potree_abs_paths(
                rawDataItemId)
            msg = '%s abs potree paths fetched %s' % (num_potree,
                                                      abs_potree_paths)
            print msg
            logger.info(msg)

            # fetch the nexus abs_paths
            abs_nexus_paths, num_nexus = fetch_nexus_abs_paths(rawDataItemId)
            msg = '%s abs nexus paths fetched %s' % (num_nexus,
                                                     abs_nexus_paths)
            print msg
            logger.info(msg)

            # fetch the OSG abs_paths PC
            abs_osg_pc_paths, num_osg_pc = fetch_osg_abs_paths_pc(
                rawDataItemId)
            msg = '%s abs OSG paths for PC fetched: %s' % (num_osg_pc,
                                                           abs_osg_pc_paths)
            print msg
            logger.info(msg)

            # fetch the OSG abs_paths mesh
            abs_osg_mesh_paths, num_osg_mesh = fetch_osg_abs_paths_mesh(
                rawDataItemId)
            msg = '%s abs OSG paths for meshes fetched: %s' % (
                num_osg_mesh, abs_osg_mesh_paths)
            print msg
            logger.info(msg)

            # fetch the OSG abs_paths picture
            abs_osg_picture_paths, num_osg_picture = fetch_osg_abs_paths_picture(
                rawDataItemId)
            msg = '%s abs OSG paths for pictures fetched: %s' % (
                num_osg_picture, abs_osg_picture_paths)
            print msg
            logger.info(msg)

            # fetch the OSG abs_paths PC BG
            abs_osg_pc_bg_paths, num_osg_pc_bg = fetch_osg_abs_paths_pc_bg(
                rawDataItemId)
            msg = '%s abs OSG paths for PC BG fetched: %s' % (
                num_osg_pc_bg, abs_osg_pc_bg_paths)
            print msg
            logger.info(msg)

            # remove the files related to the above absolute paths
            for abs_paths_to_remove in (abs_paths, abs_potree_paths,
                                        abs_nexus_paths, abs_osg_pc_paths,
                                        abs_osg_mesh_paths,
                                        abs_osg_picture_paths,
                                        abs_osg_pc_bg_paths):
                remove_data(abs_paths_to_remove)

            msg = 'Removed data locations related to raw data item %s (%s)!' % (
                rawDataItemId, abs_paths[0])
            print msg
            logger.info(msg)

    # measure elapsed time
    elapsed_time = time.time() - t0
    msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (
        elapsed_time, logname)
    print(msg)
    logger.info(msg)
コード例 #15
0
def run(opts):
    # Define logging and start logging
    logname = os.path.basename(opts.config) + '.log'
    utils.start_logging(filename=logname, level=opts.log)
    localtime = utils.getCurrentTimeAsAscii()
    t0 = time.time()
    msg = os.path.basename(__file__) + ' script starts at %s.' % localtime
    print msg
    logging.info(msg)

    # Parse xml configuration file
    data = ET.parse(opts.config).getroot()

    # Database connection
    connection, cursor = utils.connectToDB(opts.dbname, opts.dbuser, opts.dbpass, opts.dbhost, opts.dbport)

    # get offset and srid of the background defined in the conf file
    (bgOffsetX, bgOffsetY, bgOffsetZ, bgSRID) = getBackgroundOffset(data, cursor)
    bgOffset = (bgOffsetX, bgOffsetY, bgOffsetZ)
    # Process updates
    updateAOS = data.xpath('//*[@status="updated"]')
    # loop over all updates found in the xml config file
    for ao in updateAOS:
        #(aoType, proto, uniqueName, siteId, activeobjectId, objectNumber) = \
        #getDetails(ao)
        uniqueName = ao.get('uniqueName')
        (aoType, itemId, rawDataItemId, objectId, labelName) = utils.decodeOSGActiveObjectUniqueName(cursor, uniqueName)
        if aoType == None:
            msg = 'Ignoring operation on %s. Could not decode uniqueName' % uniqueName
            print msg
            logging.warning(msg)
        else:
            # check if the object is in the DB
            osgLocationId = getOSGLocationId(cursor, aoType, labelName, itemId, objectId, rawDataItemId)
            if osgLocationId != None:
                # update the DB with the information in the xml config file
                if aoType == utils.AO_TYPE_LAB:
                    # Some other params may have changed in the label
                    msg = 'Updating label %s' % labelName
                    print msg
                    logging.info(msg)
                    deleteOSG(cursor, aoType, labelName)
                    osgLocationId = insertOSGLocation(cursor, ao.getchildren()[0], bgSRID, bgOffset)
                    insertDB(cursor, 'OSG_LABEL', ('osg_label_name', 'osg_location_id', 'text', 'red', 'green', 'blue', 'rotate_screen', 'outline', 'font'),
                                 (labelName, osgLocationId, ao.get('labelText'),
                                  ao.get('labelColorRed'), ao.get('labelColorGreen'), ao.get('labelColorBlue'),
                                  ao.get('labelRotateScreen'), ao.get('outline'), ao.get('Font')))
                else:
                     msg = 'Updating OSG location %d from %s' % (osgLocationId, uniqueName)
                     print msg
                     logging.info(msg)
                     updateOSGLocation(cursor, osgLocationId, ao.getchildren()[0], bgSRID, bgOffset)
            else:
                if aoType == utils.AO_TYPE_OBJ:
                    # It is a bounding that has been moved and it is not currently in the DB. Let's insert it!
                    msg = 'Insert missing OSG_ITEM_OBJECT (%s,%s)' % (itemId, objectId)
                    print msg
                    logging.info(msg)
                    osgLocationId = insertOSGLocation(cursor, ao.getchildren()[0], bgSRID, bgOffset)
                    insertDB(cursor, 'OSG_ITEM_OBJECT', ('item_id', 'object_number', 'osg_location_id'), (itemId, objectId, osgLocationId))
                else:
                    # log error if object is not found in DB
                    msg = 'Update not possible. OSG_ITEM_OBJECT from %s not found in DB' % uniqueName
                    print msg
                    logging.error(msg)

    # Process deletes (only possible for site objects)
    deleteAOS = data.xpath('//*[@status="deleted"]')
    # loop over all deletes found in the xml config file
    for ao in deleteAOS:
        uniqueName = ao.get('uniqueName')
        (aoType, itemId, rawDataItemId, objectId, labelName) =  utils.decodeOSGActiveObjectUniqueName(cursor, uniqueName)
        if aoType==None:
            msg = 'Ignoring operation on %s. Could not decode uniqueName' % uniqueName
            print msg
            logging.warning(msg)
        else:
            if aoType in (utils.AO_TYPE_OBJ, utils.AO_TYPE_LAB):
                # check if the object is in the DB
                osgLocationId = getOSGLocationId(cursor, aoType, labelName, itemId, objectId)
                if osgLocationId != None:
                    # Delete the OSG-related entries from the DB
                    msg = 'Deleting OSG related entries for %s' % uniqueName
                    print msg
                    logging.info(msg)
                    deleteOSG(cursor, aoType, labelName, itemId, objectId)
                else:
                    # log error if object is not found in DB
                    msg = 'Not possible to delete. OSG_ITEM_OBJECT from %s not found in DB. Maybe already deleted?' % uniqueName
                    print msg
                    logging.warning(msg)
            else:
                # log error if trying to delete a non-site object
                msg = 'Ignoring delete in %s: Meshes, pictures and PCs can not be deleted' % uniqueName
                print msg
                logging.error(msg)

    # Process new objects (only possible for site objects)
    newAOS = data.xpath('//*[@status="new"]')
    # loop over all new objects found in the xml config file
    for ao in newAOS:
        uniqueName = ao.get('uniqueName')
        (aoType, itemId, rawDataItemId, objectId, labelName) = utils.decodeOSGActiveObjectUniqueName(cursor, uniqueName)
        if aoType==None:
            msg = 'Ignoring operation on %s. Could not decode uniqueName' % uniqueName
            print msg
            logging.warning(msg)
        else:
            if aoType in (utils.AO_TYPE_OBJ, utils.AO_TYPE_LAB):
                # check if the object is in the DBbesafe i
                osgLocationId = getOSGLocationId(cursor, aoType, labelName, itemId, objectId)
                if osgLocationId != None:
                    # log error if the new object is already in the DB
                    msg = 'OSG_ITEM_OBJECT from %s already in DB. Ignoring add' % uniqueName
                    print msg
                    logging.warning(msg)
                else:
                    osgLocationId = insertOSGLocation(cursor, ao.getchildren()[0], bgSRID, bgOffset)
                    if aoType == utils.AO_TYPE_OBJ:
                        # add object to the DB
                        if objectId == utils.ITEM_OBJECT_NUMBER_ITEM:
                            msg = 'Adding missing ITEM %s' % objectId
                            print msg
                            logging.info(msg)
                            insertDB(cursor, 'ITEM', ('item_id', 'background'), (itemId, False))
                        msg = 'Adding ITEM_OBJECT (%d,%d)' % (itemId, objectId)
                        print msg
                        logging.info(msg)
                        insertDB(cursor, 'ITEM_OBJECT', ('item_id', 'object_number'), (itemId, objectId))
                        insertDB(cursor, 'OSG_ITEM_OBJECT', ('item_id', 'object_number', 'osg_location_id'), (itemId, objectId, osgLocationId))
                    else:                        
                        # add label to the DB
                        msg = 'Adding label %s' % uniqueName
                        print msg
                        logging.info(msg)
                        insertDB(cursor, 'OSG_LABEL', ('osg_label_name', 'osg_location_id', 'text', 'red', 'green', 'blue', 'rotate_screen', 'outline', 'font'), 
                                 (labelName, osgLocationId, ao.get('labelText'), 
                                  ao.get('labelColorRed'), ao.get('labelColorGreen'), ao.get('labelColorBlue'), 
                                  ao.get('labelRotateScreen'), ao.get('outline'), ao.get('Font')))
            else:
                # log error if trying to add a non-site object
                msg = 'Ignoring new in %s: Meshes, pictures and PCs can not be added' % uniqueName
                print msg
                logging.error(msg)

    # Process the cameras (the DEF CAMs are added for all objects and can not be deleted or updated)
    cameras = data.xpath('//camera[not(starts-with(@name,"' + utils.DEFAULT_CAMERA_PREFIX + '"))]')
    # Delete all previous cameras and related entries
    deleteCameras(cursor)
    # add all cameras
    for camera in cameras:
        name = camera.get('name')
        itemId = None
        if name.count(utils.USER_CAMERA):
            try:
                itemId = int(name[name.index(utils.USER_CAMERA) + len(utils.USER_CAMERA):].split('_')[0])
            except:
                msg = 'Incorrect name %s for a ITEM camera' % name
                print msg
                logging.warn(msg)
                itemId = None
        msg = 'Adding camera %s' % name
        print msg
        logging.info(msg)
        osgLocationId = insertOSGLocation(cursor, camera, bgSRID, bgOffset)
        insertDB(cursor, 'OSG_CAMERA', ('osg_camera_name', 'osg_location_id'), (name, osgLocationId))
        if itemId != None:
            insertDB(cursor, 'OSG_ITEM_CAMERA', ('item_id', 'osg_camera_name'), (itemId, name))
    # close DB connection
    utils.closeConnectionDB(connection, cursor)
    
    elapsed_time = time.time() - t0
    msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (elapsed_time, logname)
    print(msg)
    logging.info(msg)
コード例 #16
0
def run(args):
    global logger
    global offsetX
    global offsetY
    global offsetZ

    logname = os.path.basename(args.output) + '.log'
    logger = utils.start_logging(filename=logname, level=args.log)

    # start logging
    localtime = utils.getCurrentTimeAsAscii()
    msg = __file__ + ' script logging start at %s' % localtime
    print msg
    logger.info(msg)
    t0 = time.time()

    # connect to DB and get a cursor
    connection, cursor = utils.connectToDB(args.dbname, args.dbuser,
                                           args.dbpass, args.dbhost)

    # We assume the osg location is relative
    # We need to make it absolute by adding the offset of the background with srid as provided
    query = """
SELECT C.offset_x, C.offset_y, C.offset_z 
FROM raw_data_item A, raw_data_item_pc B, osg_data_item_pc_background C 
WHERE A.raw_data_item_id = B.raw_data_item_id AND 
      B.raw_data_item_id = C.raw_data_item_id AND 
      A.srid = %s"""
    queryArgs = [
        args.srid,
    ]
    backgroundOffsets, num_backgrounds = utils.fetchDataFromDB(
        cursor, query, queryArgs)
    if num_backgrounds:
        (offsetX, offsetY, offsetZ) = backgroundOffsets[0]

    # get all items
    query = 'SELECT item_id, ST_ASGEOJSON(geom), min_z, max_z FROM item WHERE NOT background ORDER BY item_id'
    sites, num_sites = utils.fetchDataFromDB(cursor, query)

    data = []

    for (itemId, itemGeom, minz, maxz) in sites:
        # Generate the JSON data for this item
        dataSite = {}
        dataSite["id"] = itemId
        if itemGeom != None:
            dataSite["footprint"] = json.loads(itemGeom)['coordinates']
            dataSite["footprint_altitude"] = [minz, maxz]

        addThumbnail(cursor, itemId, dataSite)
        addSiteMetaData(cursor, itemId, dataSite)
        addPointCloud(cursor, itemId, dataSite, args.srid)
        addMeshes(cursor, itemId, dataSite, args.srid)
        addObjectsMetaData(cursor, itemId, dataSite, args.srid)

        data.append(dataSite)

    # close the Db connection
    utils.closeConnectionDB(connection, cursor)

    # save the data into JSON file
    save2JSON(args.output, data)

    elapsed_time = time.time() - t0
    msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (
        elapsed_time, logname)
    print(msg)
    logger.info(msg)
コード例 #17
0
ファイル: CreateOSGConfig.py プロジェクト: NLeSC/PattyData
def run(opts):
    global logger
    # Define logger and start logging
    #logname = os.path.basename(opts.output) + '.log'
    logname = os.path.splitext(os.path.basename(__file__))[0] + '.log'
    logger = utils.start_logging(filename=logname, level=opts.log)
    localtime = utils.getCurrentTimeAsAscii()
    t0 = time.time()
    msg = os.path.basename(__file__) + ' script starts at %s.' % localtime
    print msg
    logger.info(msg)

    if not opts.output.endswith(".conf.xml"):
        logger.error('The output file must end with .conf.xml')
        raise IOError('The output file must end with .conf.xml')

    # Create python postgres connection
    global cursor
    connection, cursor = utils.connectToDB(opts.dbname, opts.dbuser,
                                           opts.dbpass, opts.dbhost,
                                           opts.dbport)
    
    # Check that provided background is in DB
    query = """
SELECT OSG_DATA_ITEM_PC_BACKGROUND.abs_path,srid 
FROM OSG_DATA_ITEM_PC_BACKGROUND JOIN RAW_DATA_ITEM USING (raw_data_item_id)"""
    rows, num_rows = utils.fetchDataFromDB(cursor, query)
    backGroundAbsPath = None
    backgroundSRID = None
    for (bgAbsPath,bgSRID) in rows:
        if opts.background == os.path.basename(bgAbsPath):
            backGroundAbsPath = bgAbsPath
            backgroundSRID = bgSRID
    if backGroundAbsPath == None:
        errorMsg = 'Background ' + opts.background + ' is not found'
        logger.error(errorMsg)
        raise Exception(errorMsg)
    
    # Get the root object: the OSG configuration
    rootObject = viewer_conf_api.osgRCconfiguration()
    # set version
    rootObject.set_version("0.2")

    # Add all the different XML of the active objects
    # (we add distinct since the boundings will share XMLs)
    query = """
SELECT DISTINCT xml_abs_path 
FROM OSG_DATA_ITEM ORDER BY xml_abs_path"""
    rows, num_rows = utils.fetchDataFromDB(cursor, query)
    for (xmlPath,) in rows:
        if xmlPath.count(opts.osg) == 0:
            logger.error('Mismatch between given OSG data directory ' +
                         'and DB content')
        rootObject.add_objectLibrary(viewer_conf_api.objectLibrary
                                     (url=os.path.relpath(xmlPath, opts.osg)))

    # Add the object library with the boundings
    rootObject.add_objectLibrary(viewer_conf_api.objectLibrary
                                 (url=utils.BOUNDINGS_XML_RELATIVE))

    # Add the cameras that are in the DB
    cameras = viewer_conf_api.cameras()
    query = """
SELECT osg_camera_name, srid, x, y, z, h, p, r 
FROM OSG_CAMERA JOIN OSG_LOCATION USING (osg_location_id)"""
    rows, num_rows = utils.fetchDataFromDB(cursor, query)
    for (name, srid, x, y, z, h, p, r) in rows:
        if (srid is not None) and (srid == bgSRID):
            x, y, z = getOSGPosition(x, y, z, srid)
        else:
            x, y, z = getOSGPosition(x, y, z)
        cameras.add_camera(viewer_conf_api.camera
                           (name=name, x=x, y=y, z=z, h=h, p=p, r=r))
    # Add Default cameras for the items that have no camera in the DB
    query = """
SELECT 
    item_id, ST_SRID(geom), st_x(st_centroid(geom)), st_y(st_centroid(geom)), min_z + ((max_z - min_z) / 2) 
FROM ITEM WHERE NOT background AND geom IS NOT null AND item_id NOT IN (
    SELECT DISTINCT item_id FROM OSG_ITEM_CAMERA
) ORDER BY item_id"""
    rows, numitems = utils.fetchDataFromDB(cursor, query)
    for (itemId, srid, x, y, z) in rows:
        # only call getOSGPosition if [x,y,z] are not None
        # should item_id = -1 be added? 
        if all(position is not None for position in [x,y,z]) and itemId>0:
            if (srid is not None) and (srid == bgSRID):
                x, y, z = getOSGPosition(x, y, z, srid)
            else:
                x, y, z = getOSGPosition(x, y, z)
            cameras.add_camera(viewer_conf_api.camera
                               (name=utils.DEFAULT_CAMERA_PREFIX + str(itemId),
                                x=x, y=y, z=z))
    rootObject.set_cameras(cameras)
    
    # Add the XML content of the preferences
    rootObject.set_preferences(viewer_conf_api.parseString(DEFAULT_PREFENCES))
    
    attributes = viewer_conf_api.attributes()
    # Use generic method to fill all properties.
    # We need the name in the XML, the column name in the DB and
    # the table name in the DB
    for property in utils.ATTRIBUTES_ORDER:
        (cName, tName) = utils.ATTRIBUTES[property]
        elements = getattr(viewer_conf_api, property + 's')()
        # We need to call the columns and tables with extra "" because
        # they were created from the Access DB
#        utils.dbExecute(cursor, 'SELECT "' + cName + '" FROM "' + tName + '"')
        utils.dbExecute(cursor, 'SELECT ' + cName + ' FROM ' + tName)

        for (element,) in cursor:
            getattr(elements, 'add_' + property)(getattr(
                viewer_conf_api, property)(name=element))
        getattr(attributes, 'set_' + property + 's')(elements)
    rootObject.set_attributes(attributes)
    # Add all the static objects, i.e. the OSG from the background

    # Add the static object for the background
    staticObjects = viewer_conf_api.staticObjects()
    staticObjects.add_staticObject(viewer_conf_api.staticObject
                                           (url=os.path.relpath(
                                           glob.glob(backGroundAbsPath + '/' + utils.OSG_DATA_PREFIX + '.osgb')[0],
                                           opts.osg)))
    # Add hardcode DOME
    staticObjects.add_staticObject(viewer_conf_api.staticObject
                                   (url=utils.DOMES_OSG_RELATIVE))
    rootObject.set_staticObjects(staticObjects)

    # Add the 5 different layers of active objects
    activeObjects = viewer_conf_api.activeObjects()
    # First we add points, meshes and pcitures which are related to
    # the active_objects_sites
    layersData = [('points', 'OSG_DATA_ITEM_PC_SITE', utils.AO_TYPE_PC),
                  ('photos', 'OSG_DATA_ITEM_PICTURE', utils.AO_TYPE_PIC),
                  ('meshes', 'OSG_DATA_ITEM_MESH', utils.AO_TYPE_MESH)]
    
    for (layerName, tableName, inType) in layersData:
        layer = viewer_conf_api.layer(name=layerName)
        
        query = """
SELECT item_id, raw_data_item_id, OSG_LOCATION.srid, x, y, z, xs, ys, zs, h, p, r, cast_shadow 
FROM """ + tableName + """ JOIN OSG_DATA_ITEM USING (osg_data_item_id) 
                           JOIN OSG_LOCATION  USING (osg_location_id) 
                           JOIN RAW_DATA_ITEM USING (raw_data_item_id) 
ORDER BY item_id"""
        rows, numitems = utils.fetchDataFromDB(cursor, query)
        for (itemId, rawDataItemId, srid, x, y, z, xs, ys, zs, h, p, r, castShadow) in rows:
            # only call getOSGPosition if [x,y,z] are not None            
            if all(position is not None for position in [x,y,z]):
                if (srid is not None) and (srid == bgSRID):
                    x, y, z  = getOSGPosition(x, y, z, srid)
                else:
                    x, y, z = getOSGPosition(x, y, z)
            uniqueName = utils.codeOSGActiveObjectUniqueName(cursor, inType, rawDataItemId)
            activeObject = viewer_conf_api.activeObject(prototype=uniqueName,
                                                        uniqueName=uniqueName)
            setting = viewer_conf_api.setting(
                x=x, y=y, z=z, xs=xs, ys=ys, zs=zs, h=h, p=p, r=r,
                castShadow=(1 if castShadow else 0))
            activeObject.set_setting(setting)
            layer.add_activeObject(activeObject)
        activeObjects.add_layer(layer)

    # Add the boundings
    layer = viewer_conf_api.layer(name='boundings')
    # We first add the boundings that are currently in the DB
    query = """
SELECT item_id, object_number, x, y, z, xs, ys, zs, h, p, r, OSG_LOCATION.cast_shadow, srid 
FROM OSG_ITEM_OBJECT JOIN OSG_LOCATION USING (osg_location_id) 
ORDER BY item_id,object_number"""
    osgItemObjects, numOsgItemObjects = utils.fetchDataFromDB(cursor, query)
    # osgItemObjects is (itemId, objectNumber, x, y, z, xs, ys, zs, h, p, r, castShadow, srid)
    # Now we add Default OSG data items for the objects that are not in OSG_ITEM_OBJECT table
    query = """
SELECT item_id,object_number 
FROM item_object 
WHERE (item_id,object_number) NOT IN (SELECT item_id,object_number FROM OSG_ITEM_OBJECT)
ORDER BY item_id,object_number"""
    objects, num_objects = utils.fetchDataFromDB(cursor, query)
    for (itemId, objectNumber) in objects:
        srid = None
        (x,y,z) = (0,0,0)
        (xs,ys,zs) = (1,1,1)
        query = """
SELECT ST_SRID(geom), st_x(st_centroid(geom)), st_y(st_centroid(geom)), min_z + ((max_z - min_z) / 2), 
       st_xmax(geom)-st_xmin(geom) as dx, st_ymax(geom)-st_ymin(geom) as dy, (max_z - min_z) as dz 
FROM ITEM 
WHERE item_id = %s and geom is not %s"""
        queryArgs = [itemId, None]
        footprints, num_footprints = utils.fetchDataFromDB(cursor, query, queryArgs)
        if num_footprints:
            (srid, x, y, z, xs, ys, zs) = footprints[0]
            if xs == 0: xs = 1
            if ys == 0: ys = 1
            if zs == 0: zs = 1
        osgItemObjects.append([itemId, objectNumber, x, y, z, xs, ys, zs, 0, 0, 0, False, srid])
    # Now let's add them to the XML
    for (itemId, objectNumber, x, y, z, xs, ys, zs, h, p, r, castShadow, srid) in osgItemObjects:
        # only call getOSGPosition if [x,y,z] are not None
        if all(position is not None for position in [x,y,z]) and itemId>0:        
            if (srid is not None) and (srid == bgSRID):
                x, y, z  = getOSGPosition(x, y, z, srid)
            else:
                x, y, z = getOSGPosition(x, y, z)                
            uniqueName = utils.codeOSGActiveObjectUniqueName(cursor, utils.AO_TYPE_OBJ, itemId = itemId, objectId = objectNumber)
            proto = "Bounding Box"
            activeObject = viewer_conf_api.activeObject(prototype=proto,
                                                        uniqueName=uniqueName)
            setting = viewer_conf_api.setting(
                x=x, y=y, z=z, xs=xs, ys=ys, zs=zs, h=h, p=p, r=r,
            castShadow=(1 if castShadow else 0))
            activeObject.set_setting(setting)
            layer.add_activeObject(activeObject)
    activeObjects.add_layer(layer)

    # Add the labels
    layer = viewer_conf_api.layer(name='labels')
    utils.dbExecute(cursor, 'SELECT osg_label_name, text, red, green, blue, ' +
                    'rotate_screen, outline, font, srid, x, y, z, xs, ys, zs, h, ' +
                    'p, r, cast_shadow FROM OSG_LABEL INNER JOIN ' +
                    'OSG_LOCATION ON OSG_LABEL.osg_location_id=' +
                    'OSG_LOCATION.osg_location_id')
    rows = cursor.fetchall()
    for (name, text, red, green, blue, rotatescreen, outline, font, srid, x, y, z, xs, ys, zs, h, p, r, castShadow) in rows:
        proto = "labelPrototype"
        uniqueName = utils.codeOSGActiveObjectUniqueName(cursor, utils.AO_TYPE_LAB, labelName = name)
        if (srid is not None) and (srid == bgSRID):
            x, y, z = getOSGPosition(x, y, z, srid)
        else:
            x, y, z = getOSGPosition(x, y, z)
        activeObject = viewer_conf_api.activeObject(
            prototype=proto, uniqueName=uniqueName, labelText=text,
            labelColorRed=red, labelColorGreen=green, labelColorBlue=blue,
            labelRotateScreen=rotatescreen, outline=outline, Font=font)
        setting = viewer_conf_api.setting(
            x=x, y=y, z=z, xs=xs, ys=ys, zs=zs, h=h, p=p, r=r,
            castShadow=(1 if castShadow else 0))
        activeObject.set_setting(setting)
        layer.add_activeObject(activeObject)
    activeObjects.add_layer(layer)

    rootObject.set_activeObjects(activeObjects)

    # Create the XML
    rootObject.export(open(opts.output, 'w'), 0)
    
    elapsed_time = time.time() - t0
    msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (elapsed_time, logname)
    print(msg)
    logger.info(msg)
コード例 #18
0
def run(args):    
    global logger
    global offsetX
    global offsetY
    global offsetZ
    
    logname = os.path.basename(args.output) + '.log'
    logger = utils.start_logging(filename=logname, level=args.log)

    # start logging    
    localtime = utils.getCurrentTimeAsAscii()
    msg = __file__ + ' script logging start at %s'% localtime
    print msg
    logger.info(msg)
    t0 = time.time()
       
    # connect to DB and get a cursor   
    connection, cursor = utils.connectToDB(args.dbname, args.dbuser, args.dbpass, args.dbhost)
    
    # We assume the osg location is relative
    # We need to make it absolute by adding the offset of the background with srid as provided
    query = """
SELECT C.offset_x, C.offset_y, C.offset_z 
FROM raw_data_item A, raw_data_item_pc B, osg_data_item_pc_background C 
WHERE A.raw_data_item_id = B.raw_data_item_id AND 
      B.raw_data_item_id = C.raw_data_item_id AND 
      A.srid = %s"""
    queryArgs = [args.srid,]
    backgroundOffsets, num_backgrounds = utils.fetchDataFromDB(cursor, query,  queryArgs)
    if num_backgrounds:
        (offsetX,offsetY,offsetZ) = backgroundOffsets[0]
        
    # get all items         
    query = 'SELECT item_id, ST_ASGEOJSON(geom), min_z, max_z FROM item WHERE NOT background ORDER BY item_id'
    sites, num_sites = utils.fetchDataFromDB(cursor, query)
    
    data = []
    
    for (itemId, itemGeom, minz, maxz) in sites:
        # Generate the JSON data for this item
        dataSite = {}
        dataSite["id"] = itemId
        if itemGeom != None:
            dataSite["footprint"] = json.loads(itemGeom)['coordinates']
            dataSite["footprint_altitude"] = [minz,maxz]
        
        addThumbnail(cursor, itemId, dataSite)
        addSiteMetaData(cursor, itemId, dataSite)
        addPointCloud(cursor, itemId, dataSite, args.srid)
        addMeshes(cursor, itemId, dataSite, args.srid)
        addObjectsMetaData(cursor, itemId, dataSite, args.srid)
        
        data.append(dataSite)
        
    # close the Db connection
    utils.closeConnectionDB(connection, cursor)    

    # save the data into JSON file
    save2JSON(args.output, data)
    
    elapsed_time = time.time() - t0
    msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (elapsed_time, logname)
    print(msg)
    logger.info(msg)
コード例 #19
0
def run(args):
    logname = os.path.basename(args.input) + '.log'
    utils.start_logging(filename=logname, level=args.log)

    localtime = utils.getCurrentTimeAsAscii()
    t0 = time.time()
    msg = os.path.basename(__file__) + ' script starts at %s.' % localtime
    print msg
    logging.info(msg)

    logging.info('Checking validity of SQL file')
    # Check that beginning of the file does not contain a create database statement
    if os.popen('head -500 ' + args.input +
                ' | grep "CREATE DATABASE"').read().count("CREATE DATABASE"):
        msg = "You must remove CREATE DATABASE statement from the SQL file"
        print msg
        logging.error(msg)
        return
    # Check that ther are not defaults in TIMESTAMPS that would cause errors
    if os.popen('grep "TIMESTAMP DEFAULT" ' +
                args.input).read().count("TIMESTAMP DEFAULT"):
        msg = "You must remove any DEFAULT value of any TIMESTAMP column"
        print msg
        logging.error(msg)
        return
    # Check that ther are not index creations
    if os.popen('grep "INDEX" ' + args.input).read().count("INDEX"):
        msg = "You must remove any INDEX creation"
        print msg
        logging.error(msg)
        return
    if os.popen("""grep '"' """ + args.input).read().count('"'):
        msg = 'You must remove any double quote (")'
        print msg
        logging.error(msg)
        dangerousWords = []
        for line in open(args.input, 'r').read().split('\n'):
            if not line.startswith('--'):
                for word in line.split():
                    if word.count('"') == 1:
                        dangerousWords.append(word)
        if len(dangerousWords):
            msg = 'Also, before removing all ", take care of table and column names that would be incorrect when removing ".\n If any of the following is a table or column name please be sure that it does not have white spaces: ' + ','.join(
                dangerousWords)
            print msg
            logging.error(msg)
            return
        return

    # Establish connection with DB
    connection, cursor = utils.connectToDB(args.dbname, args.dbuser,
                                           args.dbpass, args.dbhost,
                                           args.dbport)

    # First we drop all tables in attribute
    logging.info("Dropping all previous attribute tables")
    for tablename in ('tbl2_site_relation', 'tbl2_object_depression',
                      'tbl2_object_decoration', 'tbl2_object_material',
                      'tbl1_object', 'tbl1_site'):
        cursor.execute('DROP TABLE IF EXISTS ' + tablename + ' CASCADE')
        connection.commit()
    # First we need to drop the previous constraints in tbl1_site and tbl1_object


#    logging.info("Dropping constraints in tbl1_site and tbl1_object tables")
#    for tablename in ('tbl1_site','tbl1_object'):
#        cursor.execute("select constraint_name from information_schema.table_constraints where table_name=%s", [tablename,])
#        constraintNames = cursor.fetchall()
#        for (constraintName, ) in constraintNames:
#            cursor.execute('ALTER TABLE ' + tablename + ' DROP CONSTRAINT %s CASCADE', [constraintName,])
#            connection.commit()

# This script will drop all attribute tables and create them again
    logging.info('Executing SQL file %s' % args.input)
    #utils.load_sql_file(cursor, args.input)
    connParams = utils.postgresConnectString(args.dbname, args.dbuser,
                                             args.dbpass, args.dbhost,
                                             args.dbport, True)
    logFile = os.path.basename(args.input) + '.log'
    command = 'psql ' + connParams + ' -f ' + args.input + ' &> ' + logFile
    logging.info(command)
    os.system(command)

    #Check errors
    if os.popen('cat ' + logFile + ' | grep ERROR').read().count("ERROR"):
        msg = 'There was some errors in the data loading. Please see log ' + logFile
        print msg
        logging.error(msg)
        return

    # Set select permissions to all new tables
    logging.info('Granting select permissions to all tables')
    cursor.execute(
        "select tablename from  pg_tables where schemaname = 'public'")
    tablesNames = cursor.fetchall()
    for (tableName, ) in tablesNames:
        cursor.execute('GRANT SELECT ON ' + tableName + ' TO public')

    # We check that the added Sites and Objects are also in Data Management part of the DB
    # All sites in tbl1_site must have an entry in ITEM
    logging.info(
        'Adding items in attribute data that are missing in ITEM table')
    query = 'SELECT site_id from tbl1_site WHERE site_id NOT IN (SELECT item_id FROM item)'
    sites, num_sites = utils.fetchDataFromDB(cursor, query)
    for (siteId, ) in sites:
        utils.dbExecute(
            cursor, "INSERT INTO ITEM (item_id, background) VALUES (%s,%s)",
            [siteId, False])
        utils.dbExecute(
            cursor,
            "INSERT INTO ITEM_OBJECT (item_id, object_number) VALUES (%s,%s)",
            [siteId, utils.ITEM_OBJECT_NUMBER_ITEM])

    # All objects in tbl1_object must also be in ITEM_OBJECT
    logging.info(
        'Adding items objects in attribute data that are missing in ITEM_OBJECT table'
    )
    query = 'SELECT site_id,object_id from tbl1_object WHERE (site_id,object_id) NOT IN (SELECT item_id,object_number FROM item_object)'
    sites_objects, num_sites_objects = utils.fetchDataFromDB(cursor, query)
    for (siteId, objectId) in sites_objects:
        utils.dbExecute(
            cursor,
            "INSERT INTO ITEM_OBJECT (item_id, object_number) VALUES (%s,%s)",
            [siteId, objectId])

    #We add again the constraints that link management and attribute data
    logging.info('Adding constraints between attribute and items')
    cursor.execute("""ALTER TABLE tbl1_object
ADD FOREIGN KEY (site_id, object_id)
REFERENCES ITEM_OBJECT (item_id, object_number)
ON UPDATE NO ACTION
ON DELETE NO ACTION""")
    connection.commit()
    cursor.execute("""ALTER TABLE tbl1_site
ADD FOREIGN KEY (site_id)
REFERENCES ITEM (item_id)
ON UPDATE NO ACTION
ON DELETE NO ACTION""")
    connection.commit()

    elapsed_time = time.time() - t0
    msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (
        elapsed_time, logname)
    print(msg)
    logging.info(msg)