def run(opts): # Start logging logname = os.path.splitext(os.path.basename(__file__))[0] + '.log' utils.start_logging(filename=logname, level=opts.log) localtime = utils.getCurrentTimeAsAscii() t0 = time.time() msg = os.path.basename(__file__) + ' script starts at %s.' % localtime print msg logging.info(msg) # database connection connection, cursor = utils.connectToDB(opts.dbname, opts.dbuser, opts.dbpass, opts.dbhost, opts.dbport) if opts.itemid == '?': utils.listRawDataItems(cursor) return elif opts.itemid == '' or opts.itemid == '!': query = """ SELECT raw_data_item_id,abs_path,background FROM RAW_DATA_ITEM JOIN ITEM USING (item_id) JOIN RAW_DATA_ITEM_PC USING (raw_data_item_id) WHERE raw_data_item_id NOT IN ( SELECT raw_data_item_id FROM POTREE_DATA_ITEM_PC)""" # Get the list of items that are not converted yet (we sort by background to have the background converted first) raw_data_items, num_raw_data_items = utils.fetchDataFromDB( cursor, query) for (rawDataItemId, absPath, isBackground) in raw_data_items: if opts.itemid == '': levels = getNumLevels(opts, isBackground) createPOTree(cursor, rawDataItemId, opts.potreeDir, levels) else: m = '\t'.join((str(rawDataItemId), absPath)) print m logging.info(m) else: for rawDataItemId in opts.itemid.split(','): rows, num_rows = utils.fetchDataFromDB( cursor, 'SELECT background FROM RAW_DATA_ITEM JOIN ITEM USING (item_id) WHERE raw_data_item_id = %s', [int(rawDataItemId)]) if num_rows == 0: logging.error('There is not a raw data item with id %d' % int(rawDataItemId)) return isBackground = rows[0][0] levels = getNumLevels(opts, isBackground) createPOTree(cursor, int(rawDataItemId), opts.potreeDir, levels) # close DB connection utils.closeConnectionDB(connection, cursor) elapsed_time = time.time() - t0 msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % ( elapsed_time, logname) print(msg) logging.info(msg)
def addObjectsMetaData(cursor, itemId, jsonSite, srid): query = 'SELECT object_id, in_situ, ancient, condition, description_object, object_type, object_interpretation, period, date_specific, description_restorations FROM tbl1_object WHERE site_id = %s' queryArgs = [ itemId, ] site_objects, num_site_objects = utils.fetchDataFromDB( cursor, query, queryArgs) objectsData = [] for (object_id, in_situ, ancient, condition, description_object, object_type, object_interpretation, period, date_specific, description_restorations) in site_objects: objectData = {} objectData['object_id'] = object_id objectData['in_situ'] = in_situ objectData['ancient'] = ancient objectData['condition'] = condition objectData['description_object'] = description_object objectData['object_type'] = object_type objectData['object_interpretation'] = object_interpretation objectData['period'] = period objectData['date_specific'] = date_specific objectData['description_restorations'] = description_restorations # Object materials objectsMaterialData = [] query = "SELECT material_type, material_subtype, material_technique FROM tbl2_object_material WHERE site_id = %s AND object_id = %s" queryArgs = [itemId, object_id] object_materials, num_object_materials = utils.fetchDataFromDB( cursor, query, queryArgs) for (material_type, material_subtype, material_technique) in object_materials: objectMaterialData = {} objectMaterialData["id"] = len(objectsMaterialData) + 1 objectMaterialData["material_type"] = material_type objectMaterialData["material_subtype"] = material_subtype objectMaterialData["material_technique"] = material_technique objectsMaterialData.append(objectMaterialData) objectData["object_material"] = objectsMaterialData # Object location query = "SELECT B.srid, B.x, B.y, B.z, B.xs, B.ys, B.zs, B.h, B.p, B.r FROM osg_item_object A, osg_location B WHERE A.item_id = %s AND A.object_number = %s" queryArgs = [itemId, object_id] object_locations, num_object_locations = utils.fetchDataFromDB( cursor, query, queryArgs) if num_object_locations: (locationSrid, x, y, z, xs, ys, zs, h, p, r) = object_locations[0] objectData['osg_position'] = getOSGPosition( cursor, srid, locationSrid, x, y, z, xs, ys, zs, h, p, r) objectsData.append(objectData) jsonSite["objects"] = objectsData
def run(opts): # Start logging logname = os.path.splitext(os.path.basename(__file__))[0] + '.log' utils.start_logging(filename=logname, level=opts.log) localtime = utils.getCurrentTimeAsAscii() t0 = time.time() msg = os.path.basename(__file__) + ' script starts at %s.' % localtime print msg logging.info(msg) # database connection connection, cursor = utils.connectToDB(opts.dbname, opts.dbuser, opts.dbpass, opts.dbhost, opts.dbport) if opts.itemid == '?': utils.listRawDataItems(cursor) return elif opts.itemid == '' or opts.itemid == '!': query = """ SELECT raw_data_item_id,abs_path,background FROM RAW_DATA_ITEM JOIN ITEM USING (item_id) JOIN RAW_DATA_ITEM_PC USING (raw_data_item_id) WHERE raw_data_item_id NOT IN ( SELECT raw_data_item_id FROM POTREE_DATA_ITEM_PC)""" # Get the list of items that are not converted yet (we sort by background to have the background converted first) raw_data_items, num_raw_data_items = utils.fetchDataFromDB(cursor, query) for (rawDataItemId,absPath,isBackground) in raw_data_items: if opts.itemid == '' : levels = getNumLevels(opts, isBackground) createPOTree(cursor, rawDataItemId, opts.potreeDir, levels) else: m = '\t'.join((str(rawDataItemId),absPath)) print m logging.info(m) else: for rawDataItemId in opts.itemid.split(','): rows,num_rows = utils.fetchDataFromDB(cursor, 'SELECT background FROM RAW_DATA_ITEM JOIN ITEM USING (item_id) WHERE raw_data_item_id = %s', [int(rawDataItemId)]) if num_rows == 0: logging.error('There is not a raw data item with id %d' % int(rawDataItemId)) return isBackground = rows[0][0] levels = getNumLevels(opts, isBackground) createPOTree(cursor, int(rawDataItemId), opts.potreeDir, levels) # close DB connection utils.closeConnectionDB(connection, cursor) elapsed_time = time.time() - t0 msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (elapsed_time, logname) print(msg) logging.info(msg)
def getOSGPosition(x, y, z, ItemSRID=None): if (ItemSRID is not None): backgroundOffsets, num_backgrounds = utils.fetchDataFromDB( cursor, 'SELECT offset_x, offset_y, offset_z, srid FROM ' + 'OSG_DATA_ITEM_PC_BACKGROUND INNER JOIN RAW_DATA_ITEM ON ' + 'OSG_DATA_ITEM_PC_BACKGROUND.raw_data_item_id=' + 'RAW_DATA_ITEM.raw_data_item_id') background = [ BACK for BACK in backgroundOffsets if BACK[3] == ItemSRID ] if len(background) == 0: logger.warning('No background with the same SRID %s is found' % (ItemSRID)) #if len(background) > 1: # logger.warning('Multiple backgrounds with the same SRID %s found' # % (ItemSRID)) else: # found the associated background in the database offset_x, offset_y, offset_z, srid = background[0] else: offset_x, offset_y, offset_z = 0, 0, 0 # convert item position to relative to associated background x_out = x - offset_x y_out = y - offset_y z_out = z - offset_z return x_out, y_out, z_out
def addMeshes(cursor, itemId, dataSite, srid): query = """ SELECT C.abs_path, B.current_mesh FROM raw_data_item A, raw_data_item_mesh B, nexus_data_item_mesh C WHERE A.raw_data_item_id = B.raw_data_item_id AND A.raw_data_item_id = C.raw_data_item_id AND A.item_id = %s""" queryArgs = [itemId,] site_meshes, num_site_meshes = utils.fetchDataFromDB(cursor, query, queryArgs) meshesData = [] recMeshesData = [] if num_site_meshes: for (nexusAbsPath, current) in site_meshes: mData = {} if current: mData['id'] = len(meshesData) + 1 meshesData.append(mData) else: mData['id'] = len(recMeshesData) + 1 recMeshesData.append(mData) mData["data_location"] = utils.PATTYVIS_DATA_URL_PREFIX + glob.glob(nexusAbsPath + '/*.nxs')[0].replace(utils.PATTYVIS_SERVER_DATA_ROOT,'') #mData["mtl_location"] = utils.PATTYVIS_DATA_URL_PREFIX + mtlAbsPath.replace(utils.PATTYVIS_SERVER_DATA_ROOT,'') #mData['osg_position'] = getOSGPosition(cursor, srid, meshSrid, x, y, z, xs, ys, zs, h, p, r) else: logger.warning('No meshes found for item %d SRID %d' % (itemId, srid)) dataSite["mesh"] = meshesData dataSite["reconstruction_mesh"] = recMeshesData
def getOSGPosition(x, y, z, ItemSRID=None): if (ItemSRID is not None): backgroundOffsets, num_backgrounds = utils.fetchDataFromDB( cursor, 'SELECT offset_x, offset_y, offset_z, srid FROM ' + 'OSG_DATA_ITEM_PC_BACKGROUND INNER JOIN RAW_DATA_ITEM ON ' + 'OSG_DATA_ITEM_PC_BACKGROUND.raw_data_item_id=' + 'RAW_DATA_ITEM.raw_data_item_id') background = [BACK for BACK in backgroundOffsets if BACK[3] == ItemSRID] if len(background) == 0: logger.warning('No background with the same SRID %s is found' % (ItemSRID)) #if len(background) > 1: # logger.warning('Multiple backgrounds with the same SRID %s found' # % (ItemSRID)) else: # found the associated background in the database offset_x, offset_y, offset_z, srid = background[0] else: offset_x, offset_y, offset_z = 0, 0, 0 # convert item position to relative to associated background x_out = x - offset_x y_out = y - offset_y z_out = z - offset_z return x_out, y_out, z_out
def getOSGLocationId(cursor, aoType, labelName = None, itemId = None, objectId = None, rawDataItemId = None): ''' Function to get OSG location id of an Active Object related DB entry ''' if aoType == utils.AO_TYPE_OBJ: if (itemId == None) or (objectId == None): raise Exception ('Item Object operations require not null itemId and objectId') rows, num_rows = utils.fetchDataFromDB(cursor, 'SELECT osg_location_id FROM OSG_ITEM_OBJECT WHERE item_id = %s AND object_number = %s', [itemId, objectId]) elif aoType == utils.AO_TYPE_LAB: if labelName == None: raise Exception ('Label operations require not null labelName') rows, num_rows = utils.fetchDataFromDB(cursor, 'SELECT osg_location_id FROM OSG_LABEL WHERE osg_label_name = %s', [labelName,]) else: if rawDataItemId == None: raise Exception ('Raw data item operations require not null rawDataItemId') rows, num_rows = utils.fetchDataFromDB(cursor, 'SELECT osg_location_id FROM ((SELECT * FROM OSG_DATA_ITEM_PC_SITE) UNION (SELECT * FROM OSG_DATA_ITEM_MESH) UNION (SELECT * FROM OSG_DATA_ITEM_PICTURE)) A JOIN OSG_DATA_ITEM USING (osg_data_item_id) WHERE raw_data_item_id = %s', [rawDataItemId, ]) if num_rows == 0: return None else: return rows[0][0]
def fetch_potree_abs_paths(rawDataItemId): """ get the absolute data item paths for the potree converted data given the rawDataItemId""" abs_paths = "" fetch_potree_abs_path_statement = 'select abs_path from potree_data_item_pc natural join raw_data_item_pc where raw_data_item_id = %s' abs_paths,num = utils.fetchDataFromDB(cursor, fetch_potree_abs_path_statement, [rawDataItemId,],[], False) # Go up one dir as potree is always created in SXX/SITE_XX/SITE_XX_levels_Y and SXX/SITE_XX should be removed return os.path.dirname(abs_paths), num
def fetch_nexus_abs_paths(rawDataItemId): """ get the absolute data item paths for the nexus converted data given the rawDataItemId""" abs_paths = "" fetch_nexus_abs_path_statement = 'select abs_path from nexus_data_item_mesh natural join raw_data_item_mesh where raw_data_item_id = %s' abs_paths,num = utils.fetchDataFromDB(cursor, fetch_nexus_abs_path_statement, [rawDataItemId,],[], False) return abs_paths, num
def fetch_osg_abs_paths_picture(rawDataItemId): """ get the absolute data item paths for the osg picture data given the rawDataItemId""" abs_paths = "" fetch_osg_abs_path_statement = 'select abs_path from osg_data_item natural join osg_data_item_picture where raw_data_item_id = %s' abs_paths,num = utils.fetchDataFromDB(cursor, fetch_osg_abs_path_statement, [rawDataItemId,],[], False) return abs_paths, num
def fetch_osg_abs_paths_pc_bg(rawDataItemId): """ get the absolute data item paths for the osg PC data (backgrounds) given the rawDataItemID""" abs_paths = "" fetch_osg_abs_path_statement = 'select abs_path from osg_data_item_pc_background where raw_data_item_id = %s' abs_paths,num = utils.fetchDataFromDB(cursor, fetch_osg_abs_path_statement, [rawDataItemId,],[], False) return abs_paths, num
def find_lists(cursor): # find the list of IDs which are in the temporary geometries table, but not in item table no_item_well_temp_sql = "SELECT DISTINCT site_id::integer FROM sites_geoms_temp WHERE (site_id NOT IN (SELECT item_id FROM item))" no_item_well_temp_ids, num_ids = utils.fetchDataFromDB( cursor, no_item_well_temp_sql) msg = "The unique item ids not in item table, but in sites_geoms_temp are %s in number" % num_ids logger.debug(msg) # find the list of IDs which are both in the temporary geometries table and the item table both_in_item_and_temp_sql = "SELECT DISTINCT site_id FROM sites_geoms_temp WHERE (site_id IN (SELECT item_id FROM item))" both_in_item_and_temp_ids, num_both_ids = utils.fetchDataFromDB( cursor, both_in_item_and_temp_sql) msg = "The item ids both in item table and n sites_geoms_temp are %s in number" % num_both_ids logger.debug(msg) return no_item_well_temp_ids, both_in_item_and_temp_ids
def addSiteMetaData(cursor, itemId, dataSite): query = 'SELECT description_site, site_context, site_interpretation FROM tbl1_site WHERE site_id = %s' queryArgs = [itemId,] site_metadatas, num_site_metadatas = utils.fetchDataFromDB(cursor, query, queryArgs) if num_site_metadatas: (descriptionSite, siteContext, siteInterpretation) = site_metadatas[0] if descriptionSite == None: query = 'SELECT description_object FROM tbl1_object WHERE site_id = %s ORDER BY object_id' queryArgs = [itemId,] object_metadatas, num_object_metadatas = utils.fetchDataFromDB(cursor, query, queryArgs) if num_object_metadatas: descriptionSite = object_metadatas[0][0] dataSite["description_site"] = descriptionSite dataSite["site_context"] = siteContext dataSite["site_interpretation"] = siteInterpretation else: logger.warning('No meta-data found for item %d' % itemId)
def fetch_abs_path(rawDataItemId): """ get the absolute data item path given the rawDataItemId""" abs_path = "" fetch_abs_path_statement = 'select abs_path from raw_data_item where raw_data_item_id = %s' abs_path,num = utils.fetchDataFromDB(cursor, fetch_abs_path_statement, [rawDataItemId,],[], False) return abs_path
def addObjectsMetaData(cursor, itemId, jsonSite, srid): query = 'SELECT object_id, in_situ, ancient, condition, description_object, object_type, object_interpretation, period, date_specific, description_restorations FROM tbl1_object WHERE site_id = %s' queryArgs = [itemId,] site_objects, num_site_objects = utils.fetchDataFromDB(cursor, query, queryArgs) objectsData = [] for (object_id, in_situ, ancient, condition, description_object, object_type, object_interpretation, period, date_specific, description_restorations) in site_objects: objectData = {} objectData['object_id'] = object_id objectData['in_situ'] = in_situ objectData['ancient'] = ancient objectData['condition'] = condition objectData['description_object'] = description_object objectData['object_type'] = object_type objectData['object_interpretation'] = object_interpretation objectData['period'] = period objectData['date_specific'] = date_specific objectData['description_restorations'] = description_restorations # Object materials objectsMaterialData= [] query = "SELECT material_type, material_subtype, material_technique FROM tbl2_object_material WHERE site_id = %s AND object_id = %s" queryArgs = [itemId,object_id] object_materials, num_object_materials = utils.fetchDataFromDB(cursor, query, queryArgs) for (material_type, material_subtype, material_technique) in object_materials: objectMaterialData = {} objectMaterialData["id"] = len(objectsMaterialData) + 1 objectMaterialData["material_type"] = material_type objectMaterialData["material_subtype"] = material_subtype objectMaterialData["material_technique"] = material_technique objectsMaterialData.append(objectMaterialData) objectData["object_material"] = objectsMaterialData # Object location query = "SELECT B.srid, B.x, B.y, B.z, B.xs, B.ys, B.zs, B.h, B.p, B.r FROM osg_item_object A, osg_location B WHERE A.item_id = %s AND A.object_number = %s" queryArgs = [itemId,object_id] object_locations, num_object_locations = utils.fetchDataFromDB(cursor, query, queryArgs) if num_object_locations: (locationSrid, x, y, z, xs, ys, zs, h, p ,r) = object_locations[0] objectData['osg_position'] = getOSGPosition(cursor, srid, locationSrid, x, y, z, xs, ys, zs, h, p, r) objectsData.append(objectData) jsonSite["objects"] = objectsData
def fetch_abs_path(rawDataItemId): """ get the absolute data item path given the rawDataItemId""" abs_path = "" fetch_abs_path_statement = 'select abs_path from raw_data_item where raw_data_item_id = %s' abs_path, num = utils.fetchDataFromDB(cursor, fetch_abs_path_statement, [ rawDataItemId, ], [], False) return abs_path
def fetch_nexus_abs_paths(rawDataItemId): """ get the absolute data item paths for the nexus converted data given the rawDataItemId""" abs_paths = "" fetch_nexus_abs_path_statement = 'select abs_path from nexus_data_item_mesh natural join raw_data_item_mesh where raw_data_item_id = %s' abs_paths, num = utils.fetchDataFromDB(cursor, fetch_nexus_abs_path_statement, [ rawDataItemId, ], [], False) return abs_paths, num
def fetch_osg_abs_paths_picture(rawDataItemId): """ get the absolute data item paths for the osg picture data given the rawDataItemId""" abs_paths = "" fetch_osg_abs_path_statement = 'select abs_path from osg_data_item natural join osg_data_item_picture where raw_data_item_id = %s' abs_paths, num = utils.fetchDataFromDB(cursor, fetch_osg_abs_path_statement, [ rawDataItemId, ], [], False) return abs_paths, num
def fetch_osg_abs_paths_pc_bg(rawDataItemId): """ get the absolute data item paths for the osg PC data (backgrounds) given the rawDataItemID""" abs_paths = "" fetch_osg_abs_path_statement = 'select abs_path from osg_data_item_pc_background where raw_data_item_id = %s' abs_paths, num = utils.fetchDataFromDB(cursor, fetch_osg_abs_path_statement, [ rawDataItemId, ], [], False) return abs_paths, num
def deleteOSG(cursor, aoType, labelName = None, itemId = None, objectId = None, rawDataItemId = None): ''' Function to delete a and OSG item objects or an OSG label (and their related OSG location entries) ''' if aoType == utils.AO_TYPE_OBJ: if (itemId == None) or (objectId == None): raise Exception ('Item Object operations require not null itemId and objectId') # extract osg_location_id of the object data,rows = utils.fetchDataFromDB(cursor, 'SELECT osg_location_id FROM OSG_ITEM_OBJECT WHERE item_id = %s AND object_number = %s', [itemId, objectId]) utils.dbExecute(cursor, 'DELETE FROM OSG_ITEM_OBJECT WHERE item_id = %s AND object_number = %s', [itemId, objectId]) # delete from OSG_LOCATION utils.dbExecute(cursor, 'DELETE FROM OSG_LOCATION WHERE osg_location_id = %s', [data[0][0],]) elif aoType == utils.AO_TYPE_LAB: if labelName == None: raise Exception ('Label operations require not null labelName') # extract osg_location_id of the label data, rows = utils.fetchDataFromDB(cursor, 'SELECT osg_location_id FROM OSG_LABEL WHERE osg_label_name = %s', [labelName,]) # delete from OSG_LABEL utils.dbExecute(cursor,'DELETE FROM OSG_LABEL WHERE osg_label_name = %s', [labelName,]) # delete from OSG_LOCATION utils.dbExecute(cursor, 'DELETE FROM OSG_LOCATION WHERE osg_location_id = %s', [data[0][0],]) else: raise Exception('Not possible to delete object ' + labelName)
def fetch_potree_abs_paths(rawDataItemId): """ get the absolute data item paths for the potree converted data given the rawDataItemId""" abs_paths = "" fetch_potree_abs_path_statement = 'select abs_path from potree_data_item_pc natural join raw_data_item_pc where raw_data_item_id = %s' abs_paths, num = utils.fetchDataFromDB(cursor, fetch_potree_abs_path_statement, [ rawDataItemId, ], [], False) # Go up one dir as potree is always created in SXX/SITE_XX/SITE_XX_levels_Y and SXX/SITE_XX should be removed return os.path.dirname(abs_paths), num
def run(args): # start logging logname = os.path.splitext(os.path.basename(__file__))[0] + '.log' utils.start_logging(filename=logname, level=utils.DEFAULT_LOG_LEVEL) localtime = utils.getCurrentTimeAsAscii() t0 = time.time() msg = os.path.basename(__file__) + ' script starts at %s.' %localtime print msg logging.info(msg) # connect to the DB connection, cursor = utils.connectToDB(args.dbname, args.dbuser, args.dbpass, args.dbhost, args.dbport) itemIds = [] if args.itemid == '': data,num = utils.fetchDataFromDB(cursor, 'SELECT item_id FROM ITEM WHERE NOT background') for (itemId,) in data: itemIds.append(itemId) else: itemIds = args.itemid.split(',') # close the conection to the DB utils.closeConnectionDB(connection, cursor) # Create queues itemsQueue = multiprocessing.Queue() # The queue of tasks (queries) resultsQueue = multiprocessing.Queue() # The queue of results for itemId in itemIds: itemsQueue.put(int(itemId)) for i in range(args.cores): #we add as many None jobs as numUsers to tell them to terminate (queue is FIFO) itemsQueue.put(None) procs = [] # We start numUsers users processes for i in range(args.cores): procs.append(multiprocessing.Process(target=runChild, args=(i, itemsQueue, resultsQueue, args.las, args.dbname, args.dbuser, args.dbpass, args.dbhost, args.dbport))) procs[-1].start() for i in range(len(itemIds)): [procIndex, itemId] = resultsQueue.get() # wait for all users to finish their execution for i in range(args.cores): procs[i].join() # measure elapsed time elapsed_time = time.time() - t0 msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (elapsed_time, logname) print(msg) logging.info(msg)
def run(opts): # Start logging #logname = os.path.basename(__file__) + '.log' logname = os.path.splitext(os.path.basename(__file__))[0] + '.log' utils.start_logging(filename=logname, level=opts.log) localtime = utils.getCurrentTimeAsAscii() t0 = time.time() msg = os.path.basename(__file__) + ' script starts at %s.' % localtime print msg logging.info(msg) # database connection connection, cursor = utils.connectToDB(opts.dbname, opts.dbuser, opts.dbpass, opts.dbhost, opts.dbport) if opts.itemid == '?': utils.listRawDataItems(cursor) return elif opts.itemid == '' or opts.itemid == '!': query = """ SELECT raw_data_item_id, abs_path FROM RAW_DATA_ITEM JOIN ITEM USING (item_id) WHERE NOT background AND raw_data_item_id NOT IN ( SELECT raw_data_item_id FROM OSG_DATA_ITEM_PC_SITE UNION SELECT raw_data_item_id FROM OSG_DATA_ITEM_MESH UNION SELECT raw_data_item_id FROM OSG_DATA_ITEM_PICTURE)""" # Get the list of items that are not converted yet (we sort by background to have the background converted first) raw_data_items, num_raw_data_items = utils.fetchDataFromDB( cursor, query) for (rawDataItemId, absPath) in raw_data_items: if opts.itemid == '': createOSG(cursor, rawDataItemId, opts.osgDir) else: m = '\t'.join((str(rawDataItemId), absPath)) print m logging.info(m) else: for rawDataItemId in opts.itemid.split(','): createOSG(cursor, int(rawDataItemId), opts.osgDir) # close DB connection utils.closeConnectionDB(connection, cursor) elapsed_time = time.time() - t0 msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % ( elapsed_time, logname) print(msg) logging.info(msg)
def getBackgroundOffset(data, cursor): ''' Get the offset and srid for the background used in the conf.xml file ''' staticobj = [os.path.dirname(x.get('url')) for x in data.xpath('//staticObject')] matching = [s for s in staticobj if 'PC/BACK/' in s] if len(matching) != 1: raise Exception('More than 1 background detected in xml file') else: rows, numitems = utils.fetchDataFromDB(cursor, """ SELECT offset_x, offset_y, offset_z, srid FROM OSG_DATA_ITEM_PC_BACKGROUND INNER JOIN RAW_DATA_ITEM USING (raw_data_item_id) WHERE OSG_DATA_ITEM_PC_BACKGROUND.abs_path=%s""", [os.path.join(utils.DEFAULT_DATA_DIR,utils.DEFAULT_OSG_DATA_DIR,matching[0])]) return rows[0]
def run(opts): # Start logging #logname = os.path.basename(__file__) + '.log' logname = os.path.splitext(os.path.basename(__file__))[0] + '.log' utils.start_logging(filename=logname, level=opts.log) localtime = utils.getCurrentTimeAsAscii() t0 = time.time() msg = os.path.basename(__file__) + ' script starts at %s.' % localtime print msg logging.info(msg) # database connection connection, cursor = utils.connectToDB(opts.dbname, opts.dbuser, opts.dbpass, opts.dbhost, opts.dbport) if opts.itemid == '?': utils.listRawDataItems(cursor) return elif opts.itemid == '' or opts.itemid == '!': query = """ SELECT raw_data_item_id, abs_path FROM RAW_DATA_ITEM JOIN ITEM USING (item_id) WHERE NOT background AND raw_data_item_id NOT IN ( SELECT raw_data_item_id FROM OSG_DATA_ITEM_PC_SITE UNION SELECT raw_data_item_id FROM OSG_DATA_ITEM_MESH UNION SELECT raw_data_item_id FROM OSG_DATA_ITEM_PICTURE)""" # Get the list of items that are not converted yet (we sort by background to have the background converted first) raw_data_items, num_raw_data_items = utils.fetchDataFromDB(cursor, query) for (rawDataItemId, absPath) in raw_data_items: if opts.itemid == '': createOSG(cursor, rawDataItemId, opts.osgDir) else: m = '\t'.join((str(rawDataItemId),absPath)) print m logging.info(m) else: for rawDataItemId in opts.itemid.split(','): createOSG(cursor, int(rawDataItemId), opts.osgDir) # close DB connection utils.closeConnectionDB(connection, cursor) elapsed_time = time.time() - t0 msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (elapsed_time, logname) print(msg) logging.info(msg)
def addSiteMetaData(cursor, itemId, dataSite): query = 'SELECT description_site, site_context, site_interpretation FROM tbl1_site WHERE site_id = %s' queryArgs = [ itemId, ] site_metadatas, num_site_metadatas = utils.fetchDataFromDB( cursor, query, queryArgs) if num_site_metadatas: (descriptionSite, siteContext, siteInterpretation) = site_metadatas[0] if descriptionSite == None: query = 'SELECT description_object FROM tbl1_object WHERE site_id = %s ORDER BY object_id' queryArgs = [ itemId, ] object_metadatas, num_object_metadatas = utils.fetchDataFromDB( cursor, query, queryArgs) if num_object_metadatas: descriptionSite = object_metadatas[0][0] dataSite["description_site"] = descriptionSite dataSite["site_context"] = siteContext dataSite["site_interpretation"] = siteInterpretation else: logger.warning('No meta-data found for item %d' % itemId)
def addPointCloud(cursor, itemId, dataSite, srid): query = """ SELECT C.abs_path, A.srid, B.minx, B.miny, B.minz, B.maxx, B.maxy, B.maxz, F.srid, F.x, F.y, F.z, F.xs, F.ys, F.zs, F.h, F.p, F.r FROM raw_data_item A, raw_data_item_pc B, potree_data_item_pc C, osg_data_item_pc_site D, osg_data_item E, osg_location F WHERE A.raw_data_item_id = B.raw_data_item_id AND B.raw_data_item_id = C.raw_data_item_id AND B.raw_data_item_id = D.raw_data_item_id AND D.osg_data_item_id = E.osg_data_item_id AND E.osg_location_id = F.osg_location_id AND A.item_id = %s""" queryArgs = [ itemId, ] site_pcs, num_site_pcs = utils.fetchDataFromDB(cursor, query, queryArgs) pcData = [] if num_site_pcs: for (pcAbsPath, pcSRID, pcMinx, pcMiny, pcMinz, pcMaxx, pcMaxy, pcMaxz, osgSRID, x, y, z, xs, ys, zs, h, p, r) in site_pcs: pData = {} pData["id"] = len(pcData) + 1 pData[ "dataLocation"] = utils.PATTYVIS_DATA_URL_PREFIX + pcAbsPath.replace( utils.PATTYVIS_SERVER_DATA_ROOT, '') + "/cloud.js" osgPosition = getOSGPosition(cursor, srid, osgSRID, x, y, z, xs, ys, zs, h, p, r) pData["osg_position"] = osgPosition if pcSRID == srid: pData["bbox"] = [ pcMinx, pcMiny, pcMinz, pcMaxx, pcMaxy, pcMaxz ] else: pData["bbox"] = [ osgPosition['x'] - (osgPosition['xs'] / 2.), osgPosition['y'] - (osgPosition['ys'] / 2.), osgPosition['z'] - (osgPosition['zs'] / 2.), osgPosition['x'] + (osgPosition['xs'] / 2.), osgPosition['y'] + (osgPosition['ys'] / 2.), osgPosition['z'] + (osgPosition['zs'] / 2.) ] pcData.append(pData) else: logger.warning('No potree point cloud found for item %d SRID %d' % (itemId, srid)) dataSite["pointcloud"] = pcData
def addThumbnail(cursor, itemId, jsonSite): query = 'SELECT A.abs_path, B.thumbnail FROM raw_data_item A, raw_data_item_picture B WHERE A.raw_data_item_id = B.raw_data_item_id AND A.item_id = %s' queryArgs = [itemId,] site_images, num_site_images = utils.fetchDataFromDB(cursor, query, queryArgs) if num_site_images: imageAbsPath = None # We use the thumbnail if available for (absPath, thumbnail) in site_images: if thumbnail: imageAbsPath = absPath # If no thumbnail is available we just use the first image if imageAbsPath == None: (absPath, thumbnail) = site_images[0] imageAbsPath = absPath jsonSite["thumbnail"] = utils.PATTYVIS_DATA_URL_PREFIX + imageAbsPath.replace(utils.PATTYVIS_SERVER_DATA_ROOT,'') + '/' + os.listdir(imageAbsPath)[0] else: logger.warning('No image found for item %d' % itemId)
def createNexus(cursor, itemId, nexusDir): # extract abspath using raw_data_item_id data_items, num_items = utils.fetchDataFromDB( cursor, "SELECT abs_path, item_id FROM RAW_DATA_ITEM WHERE " + "raw_data_item_id = %s", (itemId,)) abspath, site_id = data_items[0] inputFiles = (glob.glob(abspath + '/*.ply') + glob.glob(abspath + '/*.PLY')) if len(inputFiles): inputFile = inputFiles[0] else: error('none PLY file was found.', abspath) if not os.path.isfile(inputFile): error('none PLY file was found.' , abspath) inputFileName = os.path.basename(inputFile) outputFileName = inputFileName + '.nxs' # extract inType & outFolder, create outFolder in non-existent inType, inKind, outFolder = extract_inType(abspath, site_id,nexusDir) if os.path.isfile(abspath): # input was a file -> raise IOError error('Database key absPath should define a directory, ' + 'file detected: ' + abspath, outFolder) # os.chdir(os.path.dirname(inFile)) else: # input is already a directory os.chdir(abspath) # create the output folder os.system('mkdir -p ' + outFolder) # Run the nxsbuild in the docker container nxs in the docker-machine outputPath = os.path.join(outFolder, outputFileName) command = "nxsbuild " + inputFileName + " -o " + outputPath logging.info(command) os.system(command) if not os.path.isfile(outputPath): error('none Nexus file was generated (found in ' + outFolder + ').', outFolder)
def createPOTree(cursor, itemId, potreeDir, levels): (mainOsgb, xmlPath, offsets) = (None, None, (0, 0, 0)) # extract abspath using raw_data_item_id data_items, num_items = utils.fetchDataFromDB( cursor, "SELECT abs_path, item_id FROM RAW_DATA_ITEM WHERE " + "raw_data_item_id = %s", (itemId, )) abspath, site_id = data_items[0] # extract inType & outFolder, create outFolder in non-existent inType, inKind, outFolder = extract_inType(abspath, site_id, potreeDir, levels) inFile = abspath if os.path.isfile(inFile): # input was a file -> raise IOError error( 'Database key abspath should define a directory, ' + 'file detected: ' + inFile, outFolder) # os.chdir(os.path.dirname(inFile)) else: # input is already a directory os.chdir(inFile) outputPrefix = 'data' logFile = os.path.join(outFolder, outputPrefix + '.log') command = CONVERTER_COMMAND + ' -o ' + outFolder + ' -l ' + \ str(levels) + ' --output-format ' + outputFormat + ' --source ' + \ inFile command += ' &> ' + logFile logging.info(command) args = shlex.split(command) subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() ofiles = sorted(glob.glob(os.path.join(outFolder, '*'))) if len(ofiles) == 0: error( 'none POTree file was generated (found in ' + outFolder + '). Check log: ' + logFile, outFolder)
def createPOTree(cursor, itemId, potreeDir, levels): (mainOsgb, xmlPath, offsets) = (None, None, (0, 0, 0)) # extract abspath using raw_data_item_id data_items, num_items = utils.fetchDataFromDB( cursor, "SELECT abs_path, item_id FROM RAW_DATA_ITEM WHERE " + "raw_data_item_id = %s", (itemId,)) abspath, site_id = data_items[0] # extract inType & outFolder, create outFolder in non-existent inType, inKind, outFolder = extract_inType(abspath, site_id, potreeDir, levels) inFile = abspath if os.path.isfile(inFile): # input was a file -> raise IOError error('Database key abspath should define a directory, ' + 'file detected: ' + inFile, outFolder) # os.chdir(os.path.dirname(inFile)) else: # input is already a directory os.chdir(inFile) outputPrefix = 'data' logFile = os.path.join(outFolder, outputPrefix + '.log') command = CONVERTER_COMMAND + ' -o ' + outFolder + ' -l ' + \ str(levels) + ' --output-format ' + outputFormat + ' --source ' + \ inFile command += ' &> ' + logFile logging.info(command) args = shlex.split(command) subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() ofiles = sorted(glob.glob(os.path.join(outFolder, '*'))) if len(ofiles) == 0: error('none POTree file was generated (found in ' + outFolder + '). Check log: ' + logFile, outFolder)
def addMeshes(cursor, itemId, dataSite, srid): query = """ SELECT C.abs_path, B.current_mesh FROM raw_data_item A, raw_data_item_mesh B, nexus_data_item_mesh C WHERE A.raw_data_item_id = B.raw_data_item_id AND A.raw_data_item_id = C.raw_data_item_id AND A.item_id = %s""" queryArgs = [ itemId, ] site_meshes, num_site_meshes = utils.fetchDataFromDB( cursor, query, queryArgs) meshesData = [] recMeshesData = [] if num_site_meshes: for (nexusAbsPath, current) in site_meshes: mData = {} if current: mData['id'] = len(meshesData) + 1 meshesData.append(mData) else: mData['id'] = len(recMeshesData) + 1 recMeshesData.append(mData) mData[ "data_location"] = utils.PATTYVIS_DATA_URL_PREFIX + glob.glob( nexusAbsPath + '/*.nxs')[0].replace( utils.PATTYVIS_SERVER_DATA_ROOT, '') #mData["mtl_location"] = utils.PATTYVIS_DATA_URL_PREFIX + mtlAbsPath.replace(utils.PATTYVIS_SERVER_DATA_ROOT,'') #mData['osg_position'] = getOSGPosition(cursor, srid, meshSrid, x, y, z, xs, ys, zs, h, p, r) else: logger.warning('No meshes found for item %d SRID %d' % (itemId, srid)) dataSite["mesh"] = meshesData dataSite["reconstruction_mesh"] = recMeshesData
def update_geometries(list_ids, new): """ function to update/insert the footprint geometries into the item table got a given list of IDs. the new flag indicated weather INSERT or UPDATE is needed """ number = 1 for (sid, ) in list_ids: msg = "Processing %s site from %s sites in total" % (number, len(list_ids)) # print msg logger.debug(msg) fetch_geom_sql = "SELECT site_id AS item_id, ST_Multi(ST_Transform( ST_Union(geom), %s)) AS geom FROM sites_geoms_temp WHERE site_id = %s GROUP BY site_id" data, num = utils.fetchDataFromDB(cursor, fetch_geom_sql, [utils.SRID, sid], [], False) item_id = data[0][0] background = False geometry = data[0][1] if new: insert_geom_sql = "INSERT INTO item VALUES (%s,%s,%s)" utils.dbExecute(cursor, insert_geom_sql, [item_id, background, geometry]) # We also insert for the new item an entry in item_object utils.dbExecute( cursor, "INSERT INTO ITEM_OBJECT (item_id, object_number) VALUES (%s,%s)", [item_id, utils.ITEM_OBJECT_NUMBER_ITEM]) else: update_geom_sql = "UPDATE item SET background=%s,geom=%s WHERE item_id=%s" utils.dbExecute(cursor, update_geom_sql, [background, geometry, item_id]) number = number + 1 msg = "The geometries have been updated!" logger.debug(msg)
def addThumbnail(cursor, itemId, jsonSite): query = 'SELECT A.abs_path, B.thumbnail FROM raw_data_item A, raw_data_item_picture B WHERE A.raw_data_item_id = B.raw_data_item_id AND A.item_id = %s' queryArgs = [ itemId, ] site_images, num_site_images = utils.fetchDataFromDB( cursor, query, queryArgs) if num_site_images: imageAbsPath = None # We use the thumbnail if available for (absPath, thumbnail) in site_images: if thumbnail: imageAbsPath = absPath # If no thumbnail is available we just use the first image if imageAbsPath == None: (absPath, thumbnail) = site_images[0] imageAbsPath = absPath jsonSite[ "thumbnail"] = utils.PATTYVIS_DATA_URL_PREFIX + imageAbsPath.replace( utils.PATTYVIS_SERVER_DATA_ROOT, '') + '/' + os.listdir(imageAbsPath)[0] else: logger.warning('No image found for item %d' % itemId)
def addPointCloud(cursor, itemId, dataSite, srid): query = """ SELECT C.abs_path, A.srid, B.minx, B.miny, B.minz, B.maxx, B.maxy, B.maxz, F.srid, F.x, F.y, F.z, F.xs, F.ys, F.zs, F.h, F.p, F.r FROM raw_data_item A, raw_data_item_pc B, potree_data_item_pc C, osg_data_item_pc_site D, osg_data_item E, osg_location F WHERE A.raw_data_item_id = B.raw_data_item_id AND B.raw_data_item_id = C.raw_data_item_id AND B.raw_data_item_id = D.raw_data_item_id AND D.osg_data_item_id = E.osg_data_item_id AND E.osg_location_id = F.osg_location_id AND A.item_id = %s""" queryArgs = [itemId,] site_pcs, num_site_pcs = utils.fetchDataFromDB(cursor, query, queryArgs) pcData = [] if num_site_pcs: for (pcAbsPath, pcSRID, pcMinx, pcMiny, pcMinz, pcMaxx, pcMaxy, pcMaxz, osgSRID, x, y, z, xs, ys, zs, h, p, r) in site_pcs: pData = {} pData["id"] = len(pcData) + 1 pData["dataLocation"] = utils.PATTYVIS_DATA_URL_PREFIX + pcAbsPath.replace(utils.PATTYVIS_SERVER_DATA_ROOT,'') + "/cloud.js" osgPosition = getOSGPosition(cursor, srid, osgSRID, x, y, z, xs, ys, zs, h, p, r) pData["osg_position"] = osgPosition if pcSRID == srid: pData["bbox"] = [pcMinx, pcMiny, pcMinz, pcMaxx, pcMaxy, pcMaxz] else: pData["bbox"] = [osgPosition['x'] - (osgPosition['xs']/2.), osgPosition['y'] - (osgPosition['ys']/2.), osgPosition['z'] - (osgPosition['zs']/2.), osgPosition['x'] + (osgPosition['xs']/2.), osgPosition['y'] + (osgPosition['ys']/2.), osgPosition['z'] + (osgPosition['zs']/2.)] pcData.append(pData) else: logger.warning('No potree point cloud found for item %d SRID %d' % (itemId, srid)) dataSite["pointcloud"] = pcData
def run(args): # start logging logname = os.path.splitext(os.path.basename(__file__))[0] + '.log' utils.start_logging(filename=logname, level=utils.DEFAULT_LOG_LEVEL) localtime = utils.getCurrentTimeAsAscii() t0 = time.time() msg = os.path.basename(__file__) + ' script starts at %s.' % localtime print msg logging.info(msg) # connect to the DB connection, cursor = utils.connectToDB(args.dbname, args.dbuser, args.dbpass, args.dbhost, args.dbport) itemIds = [] if args.itemid == '': data, num = utils.fetchDataFromDB( cursor, 'SELECT item_id FROM ITEM WHERE NOT background') for (itemId, ) in data: itemIds.append(itemId) else: itemIds = args.itemid.split(',') # close the conection to the DB utils.closeConnectionDB(connection, cursor) # Create queues itemsQueue = multiprocessing.Queue() # The queue of tasks (queries) resultsQueue = multiprocessing.Queue() # The queue of results for itemId in itemIds: itemsQueue.put(int(itemId)) for i in range( args.cores ): #we add as many None jobs as numUsers to tell them to terminate (queue is FIFO) itemsQueue.put(None) procs = [] # We start numUsers users processes for i in range(args.cores): procs.append( multiprocessing.Process( target=runChild, args=(i, itemsQueue, resultsQueue, args.las, args.dbname, args.dbuser, args.dbpass, args.dbhost, args.dbport))) procs[-1].start() for i in range(len(itemIds)): [procIndex, itemId] = resultsQueue.get() # wait for all users to finish their execution for i in range(args.cores): procs[i].join() # measure elapsed time elapsed_time = time.time() - t0 msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % ( elapsed_time, logname) print(msg) logging.info(msg)
def run(args): global logger global offsetX global offsetY global offsetZ logname = os.path.basename(args.output) + '.log' logger = utils.start_logging(filename=logname, level=args.log) # start logging localtime = utils.getCurrentTimeAsAscii() msg = __file__ + ' script logging start at %s' % localtime print msg logger.info(msg) t0 = time.time() # connect to DB and get a cursor connection, cursor = utils.connectToDB(args.dbname, args.dbuser, args.dbpass, args.dbhost) # We assume the osg location is relative # We need to make it absolute by adding the offset of the background with srid as provided query = """ SELECT C.offset_x, C.offset_y, C.offset_z FROM raw_data_item A, raw_data_item_pc B, osg_data_item_pc_background C WHERE A.raw_data_item_id = B.raw_data_item_id AND B.raw_data_item_id = C.raw_data_item_id AND A.srid = %s""" queryArgs = [ args.srid, ] backgroundOffsets, num_backgrounds = utils.fetchDataFromDB( cursor, query, queryArgs) if num_backgrounds: (offsetX, offsetY, offsetZ) = backgroundOffsets[0] # get all items query = 'SELECT item_id, ST_ASGEOJSON(geom), min_z, max_z FROM item WHERE NOT background ORDER BY item_id' sites, num_sites = utils.fetchDataFromDB(cursor, query) data = [] for (itemId, itemGeom, minz, maxz) in sites: # Generate the JSON data for this item dataSite = {} dataSite["id"] = itemId if itemGeom != None: dataSite["footprint"] = json.loads(itemGeom)['coordinates'] dataSite["footprint_altitude"] = [minz, maxz] addThumbnail(cursor, itemId, dataSite) addSiteMetaData(cursor, itemId, dataSite) addPointCloud(cursor, itemId, dataSite, args.srid) addMeshes(cursor, itemId, dataSite, args.srid) addObjectsMetaData(cursor, itemId, dataSite, args.srid) data.append(dataSite) # close the Db connection utils.closeConnectionDB(connection, cursor) # save the data into JSON file save2JSON(args.output, data) elapsed_time = time.time() - t0 msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % ( elapsed_time, logname) print(msg) logger.info(msg)
def create_cut_out(cursor, inputLAS, output, itemid, buffer, concave): returnOk = False vertices = None minZ = None maxZ = None avgZ = None numpoints = None # Make DB query to extract the bounding box of the buffer (if selected) of the concave hull of the footprint of a item/site # Also the concave hull is extracted queryDescr = 'Getting bounding box of ' queryArgs = [] aux = 'ch' if buffer > 0: aux = 'ST_Buffer(ch,%s)' queryArgs.append(buffer) queryDescr += 'buffer of %.2f meters around ' % buffer queryDescr += 'concave hull of footprint of item %s' % itemid query = """ SELECT st_astext(ch), st_xmin(ebch), st_xmax(ebch), st_ymin(ebch), st_ymax(ebch) FROM ( SELECT ch, ST_Envelope(""" + aux + """) as ebch FROM ( SELECT ST_ConcaveHull(geom, %s) as ch FROM ITEM WHERE item_id = %s AND geom is NOT null ) A ) B""" queryArgs.extend([concave, itemid]) logging.info(queryDescr) rows,num = utils.fetchDataFromDB(cursor, query, queryArgs) if num == 0: logging.error('Wrong item ID: No item is found with specified ID or the geometry is NULL!') return (returnOk, vertices, minZ, maxZ, avgZ, numpoints) (concaveHull, minx, maxx, miny, maxy) = rows[0] # If it is found we also extract the individual 2D points of the vertices of the concave hull logging.info('Extracting 2D points of concave hull of footprint') vertices = [] for e in concaveHull.split(','): c = re.findall("\d+.\d+ \d+.\d+",e) vertices.append(c[0].split(' ')) # Check there is some LAS/LAZ file in specified directory listPCFiles = glob.glob(inputLAS + '/*las') + glob.glob(inputLAS + '/*laz') if len(listPCFiles) == 0: logging.error('%s does not contain any LAS/LAZ file' % inputLAS) return (returnOk, vertices, minZ, maxZ, avgZ, numpoints) # Create list of files for lasmerge tfilename = output + '.list' tfile = open(tfilename, 'w') for f in listPCFiles: tfile.write(f + '\n') tfile.close() command = 'lasmerge -lof ' + tfilename + ' -inside ' + str(minx) + ' ' + str(miny) + ' ' + str(maxx) + ' ' + str(maxy) + ' -o ' + output logging.info(command) os.system(command) os.system('rm ' + tfilename) if not os.path.isfile(output): logging.error('Output file has not been generated. Is LAStools/lasmerge installed and in PATH?') return (returnOk, vertices, minZ, maxZ, avgZ, numpoints) logging.info('Getting average elevation and number of points from %s' % output) statcommand = "lasinfo -i " + output + " -nv -nmm -histo z 10000000" lines = '\n'.join(subprocess.Popen(statcommand, shell = True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()).split('\n') for line in lines: if line.count('average z'): avgZ = float(line.split()[-1]) if line.count('number of point records:'): numpoints = int(line.split()[-1]) if line.count('min x y z:'): minZ = float(line.split()[-1]) if line.count('max x y z:'): maxZ = float(line.split()[-1]) if numpoints == None: logging.error("Could not extract average elevation and number of points. Is LAStools/lasinfo installed and in PATH? Check that lasinfo in PATH is from LAStools and not libLAS!") return (returnOk, vertices, minZ, maxZ, avgZ, numpoints) logging.info('Extracted cutout for item %d has %d points with minimum z = %s, maximum z = %s, average z = %s' % (itemid, numpoints, str(minZ), str(maxZ), str(avgZ))) returnOk = True return (returnOk, vertices, minZ, maxZ, avgZ, numpoints)
def createOSG(cursor, itemId, osgDir): global cwd (mainOsgb, xmlPath, offsets) = (None, None, (0, 0, 0)) # extract abspath using raw_data_item_id data_items, num_items = utils.fetchDataFromDB( cursor, "SELECT abs_path, item_id FROM RAW_DATA_ITEM WHERE " + "raw_data_item_id = %s", (itemId, )) absPath, site_id = data_items[0] # extract inType & outFolder, create outFolder in non-existent inType, inKind, outFolder = extract_inType(absPath, site_id, osgDir) # Get the inputFile for the converter (in the case of Meshes without OBJ we skip them,this method just returns) # A PC SITE if (inType == utils.PC_FT and inKind == utils.SITE_FT): inputFiles = glob.glob(absPath + '/*.las') + glob.glob(absPath + '/*.laz') # A PC BACKGROUND elif (inType == utils.PC_FT and inKind == utils.BG_FT): # A PC BG numLAS = len(glob.glob(absPath + '/*.las')) numLAZ = len(glob.glob(absPath + '/*.laz')) if (numLAS != 0) and (numLAZ != 0): error( 'Folder %s should contain LAS or LAZ but not both!' % absPath, outFolder) if numLAS: inputFiles = [ absPath + '/*.las', ] elif numLAZ: inputFiles = [ absPath + '/*.laz', ] else: error('Folder %s does not contain LAS or LAZ files' % absPath, outFolder) # A MESH elif inType == utils.MESH_FT: inputFiles = glob.glob(absPath + '/*.obj') + glob.glob(absPath + '/*.OBJ') if len(inputFiles) == 0: #Meshes without OBJ are ignores logging.warning('Ignoring ' + absPath + ': no OBJ is found') shutil.rmtree(outFolder) return # A PICTURE elif inType == utils.PIC_FT: inputFiles = glob.glob(absPath + '/*.png') + glob.glob( absPath + '/*.jpg') + glob.glob(absPath + '/*.jpeg') + glob.glob( absPath + '/*.PNG') + glob.glob( absPath + '/*.JPG') + glob.glob(absPath + '/*.JPEG') if len(inputFiles) > 1: error('Multiple valid files found in %s' % absPath, outFolder) elif len(inputFiles) == 0: error('None valid files found in %s' % absPath, outFolder) inputFileAbsPath = inputFiles[0] # Get 8bitcolor information from DB data_items, num_items = utils.fetchDataFromDB( cursor, 'SELECT RAW_DATA_ITEM_PC.color_8bit, ' + 'RAW_DATA_ITEM_MESH.color_8bit FROM RAW_DATA_ITEM LEFT JOIN ' + 'RAW_DATA_ITEM_PC ON RAW_DATA_ITEM.raw_data_item_id=' + 'RAW_DATA_ITEM_PC.raw_data_item_id LEFT JOIN RAW_DATA_ITEM_MESH ON ' + 'RAW_DATA_ITEM.raw_data_item_id=RAW_DATA_ITEM_MESH.raw_data_item_id ' + 'WHERE ' + 'RAW_DATA_ITEM.raw_data_item_id = %s', (itemId, )) try: if (True in data_items[0]): color8Bit = True else: color8Bit = False except IndexError: color8Bit = False # no 8BC color in database, set to false # Get alignment info from DB data_items, num_items = utils.fetchDataFromDB( cursor, 'SELECT offset_x, offset_y, offset_z FROM ' + 'OSG_DATA_ITEM_PC_BACKGROUND INNER JOIN RAW_DATA_ITEM ON ' + 'OSG_DATA_ITEM_PC_BACKGROUND.raw_data_item_id=' + 'RAW_DATA_ITEM.raw_data_item_id WHERE RAW_DATA_ITEM.srid = ' + '(SELECT srid from RAW_DATA_ITEM WHERE raw_data_item_id=' + '%s )', (itemId, )) # Set offset if item is aligned aligned = False if len(data_items) > 0: aligned = True (abOffsetX, abOffsetY, abOffsetZ) = data_items[0] cwd = os.getcwd() if os.path.isfile(absPath): # input was a file -> raise IOError error( 'Database key abspath should define a directory, ' + 'file detected: ' + absPath, outFolder) else: # input is already a directory os.chdir(absPath) outputPrefix = utils.OSG_DATA_PREFIX ofile = getOSGFileFormat(inType) # A PC SITE if (inType == utils.PC_FT and inKind == utils.SITE_FT): tmode = '--mode lodPoints --reposition' # A PC BACKGROUND elif (inType == utils.PC_FT and inKind == utils.BG_FT): # A PC BG tmode = '--mode quadtree --reposition' # A MESH elif inType == utils.MESH_FT: tmode = '--mode polyMesh --convert --reposition' # A PICTURE elif inType == utils.PIC_FT: tmode = '--mode picturePlane' logFile = os.path.join(outFolder, outputPrefix + '.log') # Call CONVERTER_COMMAND for the inputFile command = CONVERTER_COMMAND + ' ' + tmode + ' --outputPrefix ' + \ outputPrefix + ' --files ' + os.path.basename(inputFileAbsPath) if color8Bit: command += ' --8bitColor ' if aligned: command += ' --translate ' + str(abOffsetX) + ' ' + str(abOffsetY) + \ ' ' + str(abOffsetZ) command += ' &> ' + logFile logging.info(command) args = shlex.split(command) subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() # move files to outFolder; drop outputPrefix from filename logging.info("Moving files to " + outFolder) outputFiles = glob.glob(outputPrefix + '*') for filename in outputFiles: shutil.move(os.path.abspath(filename), os.path.join(outFolder, filename)) ofiles = sorted(glob.glob(os.path.join(outFolder, '*' + ofile))) if len(ofiles) == 0: error( 'none OSG file was generated (found in ' + outFolder + '). Check log: ' + logFile, outFolder) else: mainOsgb = ofiles[0] if not (inType == utils.PC_FT and inKind == utils.BG_FT): # if not a PC BACK xmlfiles = glob.glob(os.path.join(outFolder, '*xml')) if len(xmlfiles) == 0: error( 'none XML file was generated (found in ' + outFolder + '). Check log: ' + logFile, outFolder) xmlPath = None else: xmlPath = xmlfiles[0] if len(xmlfiles) > 1: error( 'multiple XMLs file were generated (found in ' + outFolder + '). Using ' + xmlPath, outFolder) # upate xml file updateXMLDescription(xmlPath, cursor, inType, itemId) txtfiles = glob.glob(os.path.join(outFolder, '*offset.txt')) if len(txtfiles): txtFile = txtfiles[0] offsets = open(txtFile, 'r').read().split('\n')[0] \ .split(':')[1].split() for i in range(len(offsets)): offsets[i] = float(offsets[i]) elif aligned: logging.warn('No offset file was found and it was expected!') # We move back to the current working directory os.chdir(cwd)
def run(args): global logger global offsetX global offsetY global offsetZ logname = os.path.basename(args.output) + '.log' logger = utils.start_logging(filename=logname, level=args.log) # start logging localtime = utils.getCurrentTimeAsAscii() msg = __file__ + ' script logging start at %s'% localtime print msg logger.info(msg) t0 = time.time() # connect to DB and get a cursor connection, cursor = utils.connectToDB(args.dbname, args.dbuser, args.dbpass, args.dbhost) # We assume the osg location is relative # We need to make it absolute by adding the offset of the background with srid as provided query = """ SELECT C.offset_x, C.offset_y, C.offset_z FROM raw_data_item A, raw_data_item_pc B, osg_data_item_pc_background C WHERE A.raw_data_item_id = B.raw_data_item_id AND B.raw_data_item_id = C.raw_data_item_id AND A.srid = %s""" queryArgs = [args.srid,] backgroundOffsets, num_backgrounds = utils.fetchDataFromDB(cursor, query, queryArgs) if num_backgrounds: (offsetX,offsetY,offsetZ) = backgroundOffsets[0] # get all items query = 'SELECT item_id, ST_ASGEOJSON(geom), min_z, max_z FROM item WHERE NOT background ORDER BY item_id' sites, num_sites = utils.fetchDataFromDB(cursor, query) data = [] for (itemId, itemGeom, minz, maxz) in sites: # Generate the JSON data for this item dataSite = {} dataSite["id"] = itemId if itemGeom != None: dataSite["footprint"] = json.loads(itemGeom)['coordinates'] dataSite["footprint_altitude"] = [minz,maxz] addThumbnail(cursor, itemId, dataSite) addSiteMetaData(cursor, itemId, dataSite) addPointCloud(cursor, itemId, dataSite, args.srid) addMeshes(cursor, itemId, dataSite, args.srid) addObjectsMetaData(cursor, itemId, dataSite, args.srid) data.append(dataSite) # close the Db connection utils.closeConnectionDB(connection, cursor) # save the data into JSON file save2JSON(args.output, data) elapsed_time = time.time() - t0 msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (elapsed_time, logname) print(msg) logger.info(msg)
def run(opts): global logger # Define logger and start logging #logname = os.path.basename(opts.output) + '.log' logname = os.path.splitext(os.path.basename(__file__))[0] + '.log' logger = utils.start_logging(filename=logname, level=opts.log) localtime = utils.getCurrentTimeAsAscii() t0 = time.time() msg = os.path.basename(__file__) + ' script starts at %s.' % localtime print msg logger.info(msg) if not opts.output.endswith(".conf.xml"): logger.error('The output file must end with .conf.xml') raise IOError('The output file must end with .conf.xml') # Create python postgres connection global cursor connection, cursor = utils.connectToDB(opts.dbname, opts.dbuser, opts.dbpass, opts.dbhost, opts.dbport) # Check that provided background is in DB query = """ SELECT OSG_DATA_ITEM_PC_BACKGROUND.abs_path,srid FROM OSG_DATA_ITEM_PC_BACKGROUND JOIN RAW_DATA_ITEM USING (raw_data_item_id)""" rows, num_rows = utils.fetchDataFromDB(cursor, query) backGroundAbsPath = None backgroundSRID = None for (bgAbsPath, bgSRID) in rows: if opts.background == os.path.basename(bgAbsPath): backGroundAbsPath = bgAbsPath backgroundSRID = bgSRID if backGroundAbsPath == None: errorMsg = 'Background ' + opts.background + ' is not found' logger.error(errorMsg) raise Exception(errorMsg) # Get the root object: the OSG configuration rootObject = viewer_conf_api.osgRCconfiguration() # set version rootObject.set_version("0.2") # Add all the different XML of the active objects # (we add distinct since the boundings will share XMLs) query = """ SELECT DISTINCT xml_abs_path FROM OSG_DATA_ITEM ORDER BY xml_abs_path""" rows, num_rows = utils.fetchDataFromDB(cursor, query) for (xmlPath, ) in rows: if xmlPath.count(opts.osg) == 0: logger.error('Mismatch between given OSG data directory ' + 'and DB content') rootObject.add_objectLibrary( viewer_conf_api.objectLibrary( url=os.path.relpath(xmlPath, opts.osg))) # Add the object library with the boundings rootObject.add_objectLibrary( viewer_conf_api.objectLibrary(url=utils.BOUNDINGS_XML_RELATIVE)) # Add the cameras that are in the DB cameras = viewer_conf_api.cameras() query = """ SELECT osg_camera_name, srid, x, y, z, h, p, r FROM OSG_CAMERA JOIN OSG_LOCATION USING (osg_location_id)""" rows, num_rows = utils.fetchDataFromDB(cursor, query) for (name, srid, x, y, z, h, p, r) in rows: if (srid is not None) and (srid == bgSRID): x, y, z = getOSGPosition(x, y, z, srid) else: x, y, z = getOSGPosition(x, y, z) cameras.add_camera( viewer_conf_api.camera(name=name, x=x, y=y, z=z, h=h, p=p, r=r)) # Add Default cameras for the items that have no camera in the DB query = """ SELECT item_id, ST_SRID(geom), st_x(st_centroid(geom)), st_y(st_centroid(geom)), min_z + ((max_z - min_z) / 2) FROM ITEM WHERE NOT background AND geom IS NOT null AND item_id NOT IN ( SELECT DISTINCT item_id FROM OSG_ITEM_CAMERA ) ORDER BY item_id""" rows, numitems = utils.fetchDataFromDB(cursor, query) for (itemId, srid, x, y, z) in rows: # only call getOSGPosition if [x,y,z] are not None # should item_id = -1 be added? if all(position is not None for position in [x, y, z]) and itemId > 0: if (srid is not None) and (srid == bgSRID): x, y, z = getOSGPosition(x, y, z, srid) else: x, y, z = getOSGPosition(x, y, z) cameras.add_camera( viewer_conf_api.camera(name=utils.DEFAULT_CAMERA_PREFIX + str(itemId), x=x, y=y, z=z)) rootObject.set_cameras(cameras) # Add the XML content of the preferences rootObject.set_preferences(viewer_conf_api.parseString(DEFAULT_PREFENCES)) attributes = viewer_conf_api.attributes() # Use generic method to fill all properties. # We need the name in the XML, the column name in the DB and # the table name in the DB for property in utils.ATTRIBUTES_ORDER: (cName, tName) = utils.ATTRIBUTES[property] elements = getattr(viewer_conf_api, property + 's')() # We need to call the columns and tables with extra "" because # they were created from the Access DB # utils.dbExecute(cursor, 'SELECT "' + cName + '" FROM "' + tName + '"') utils.dbExecute(cursor, 'SELECT ' + cName + ' FROM ' + tName) for (element, ) in cursor: getattr(elements, 'add_' + property)(getattr(viewer_conf_api, property)(name=element)) getattr(attributes, 'set_' + property + 's')(elements) rootObject.set_attributes(attributes) # Add all the static objects, i.e. the OSG from the background # Add the static object for the background staticObjects = viewer_conf_api.staticObjects() staticObjects.add_staticObject( viewer_conf_api.staticObject(url=os.path.relpath( glob.glob(backGroundAbsPath + '/' + utils.OSG_DATA_PREFIX + '.osgb')[0], opts.osg))) # Add hardcode DOME staticObjects.add_staticObject( viewer_conf_api.staticObject(url=utils.DOMES_OSG_RELATIVE)) rootObject.set_staticObjects(staticObjects) # Add the 5 different layers of active objects activeObjects = viewer_conf_api.activeObjects() # First we add points, meshes and pcitures which are related to # the active_objects_sites layersData = [('points', 'OSG_DATA_ITEM_PC_SITE', utils.AO_TYPE_PC), ('photos', 'OSG_DATA_ITEM_PICTURE', utils.AO_TYPE_PIC), ('meshes', 'OSG_DATA_ITEM_MESH', utils.AO_TYPE_MESH)] for (layerName, tableName, inType) in layersData: layer = viewer_conf_api.layer(name=layerName) query = """ SELECT item_id, raw_data_item_id, OSG_LOCATION.srid, x, y, z, xs, ys, zs, h, p, r, cast_shadow FROM """ + tableName + """ JOIN OSG_DATA_ITEM USING (osg_data_item_id) JOIN OSG_LOCATION USING (osg_location_id) JOIN RAW_DATA_ITEM USING (raw_data_item_id) ORDER BY item_id""" rows, numitems = utils.fetchDataFromDB(cursor, query) for (itemId, rawDataItemId, srid, x, y, z, xs, ys, zs, h, p, r, castShadow) in rows: # only call getOSGPosition if [x,y,z] are not None if all(position is not None for position in [x, y, z]): if (srid is not None) and (srid == bgSRID): x, y, z = getOSGPosition(x, y, z, srid) else: x, y, z = getOSGPosition(x, y, z) uniqueName = utils.codeOSGActiveObjectUniqueName( cursor, inType, rawDataItemId) activeObject = viewer_conf_api.activeObject(prototype=uniqueName, uniqueName=uniqueName) setting = viewer_conf_api.setting( x=x, y=y, z=z, xs=xs, ys=ys, zs=zs, h=h, p=p, r=r, castShadow=(1 if castShadow else 0)) activeObject.set_setting(setting) layer.add_activeObject(activeObject) activeObjects.add_layer(layer) # Add the boundings layer = viewer_conf_api.layer(name='boundings') # We first add the boundings that are currently in the DB query = """ SELECT item_id, object_number, x, y, z, xs, ys, zs, h, p, r, OSG_LOCATION.cast_shadow, srid FROM OSG_ITEM_OBJECT JOIN OSG_LOCATION USING (osg_location_id) ORDER BY item_id,object_number""" osgItemObjects, numOsgItemObjects = utils.fetchDataFromDB(cursor, query) # osgItemObjects is (itemId, objectNumber, x, y, z, xs, ys, zs, h, p, r, castShadow, srid) # Now we add Default OSG data items for the objects that are not in OSG_ITEM_OBJECT table query = """ SELECT item_id,object_number FROM item_object WHERE (item_id,object_number) NOT IN (SELECT item_id,object_number FROM OSG_ITEM_OBJECT) ORDER BY item_id,object_number""" objects, num_objects = utils.fetchDataFromDB(cursor, query) for (itemId, objectNumber) in objects: srid = None (x, y, z) = (0, 0, 0) (xs, ys, zs) = (1, 1, 1) query = """ SELECT ST_SRID(geom), st_x(st_centroid(geom)), st_y(st_centroid(geom)), min_z + ((max_z - min_z) / 2), st_xmax(geom)-st_xmin(geom) as dx, st_ymax(geom)-st_ymin(geom) as dy, (max_z - min_z) as dz FROM ITEM WHERE item_id = %s and geom is not %s""" queryArgs = [itemId, None] footprints, num_footprints = utils.fetchDataFromDB( cursor, query, queryArgs) if num_footprints: (srid, x, y, z, xs, ys, zs) = footprints[0] if xs == 0: xs = 1 if ys == 0: ys = 1 if zs == 0: zs = 1 osgItemObjects.append( [itemId, objectNumber, x, y, z, xs, ys, zs, 0, 0, 0, False, srid]) # Now let's add them to the XML for (itemId, objectNumber, x, y, z, xs, ys, zs, h, p, r, castShadow, srid) in osgItemObjects: # only call getOSGPosition if [x,y,z] are not None if all(position is not None for position in [x, y, z]) and itemId > 0: if (srid is not None) and (srid == bgSRID): x, y, z = getOSGPosition(x, y, z, srid) else: x, y, z = getOSGPosition(x, y, z) uniqueName = utils.codeOSGActiveObjectUniqueName( cursor, utils.AO_TYPE_OBJ, itemId=itemId, objectId=objectNumber) proto = "Bounding Box" activeObject = viewer_conf_api.activeObject(prototype=proto, uniqueName=uniqueName) setting = viewer_conf_api.setting( x=x, y=y, z=z, xs=xs, ys=ys, zs=zs, h=h, p=p, r=r, castShadow=(1 if castShadow else 0)) activeObject.set_setting(setting) layer.add_activeObject(activeObject) activeObjects.add_layer(layer) # Add the labels layer = viewer_conf_api.layer(name='labels') utils.dbExecute( cursor, 'SELECT osg_label_name, text, red, green, blue, ' + 'rotate_screen, outline, font, srid, x, y, z, xs, ys, zs, h, ' + 'p, r, cast_shadow FROM OSG_LABEL INNER JOIN ' + 'OSG_LOCATION ON OSG_LABEL.osg_location_id=' + 'OSG_LOCATION.osg_location_id') rows = cursor.fetchall() for (name, text, red, green, blue, rotatescreen, outline, font, srid, x, y, z, xs, ys, zs, h, p, r, castShadow) in rows: proto = "labelPrototype" uniqueName = utils.codeOSGActiveObjectUniqueName(cursor, utils.AO_TYPE_LAB, labelName=name) if (srid is not None) and (srid == bgSRID): x, y, z = getOSGPosition(x, y, z, srid) else: x, y, z = getOSGPosition(x, y, z) activeObject = viewer_conf_api.activeObject( prototype=proto, uniqueName=uniqueName, labelText=text, labelColorRed=red, labelColorGreen=green, labelColorBlue=blue, labelRotateScreen=rotatescreen, outline=outline, Font=font) setting = viewer_conf_api.setting(x=x, y=y, z=z, xs=xs, ys=ys, zs=zs, h=h, p=p, r=r, castShadow=(1 if castShadow else 0)) activeObject.set_setting(setting) layer.add_activeObject(activeObject) activeObjects.add_layer(layer) rootObject.set_activeObjects(activeObjects) # Create the XML rootObject.export(open(opts.output, 'w'), 0) elapsed_time = time.time() - t0 msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % ( elapsed_time, logname) print(msg) logger.info(msg)
def run(opts): global logger # Define logger and start logging #logname = os.path.basename(opts.output) + '.log' logname = os.path.splitext(os.path.basename(__file__))[0] + '.log' logger = utils.start_logging(filename=logname, level=opts.log) localtime = utils.getCurrentTimeAsAscii() t0 = time.time() msg = os.path.basename(__file__) + ' script starts at %s.' % localtime print msg logger.info(msg) if not opts.output.endswith(".conf.xml"): logger.error('The output file must end with .conf.xml') raise IOError('The output file must end with .conf.xml') # Create python postgres connection global cursor connection, cursor = utils.connectToDB(opts.dbname, opts.dbuser, opts.dbpass, opts.dbhost, opts.dbport) # Check that provided background is in DB query = """ SELECT OSG_DATA_ITEM_PC_BACKGROUND.abs_path,srid FROM OSG_DATA_ITEM_PC_BACKGROUND JOIN RAW_DATA_ITEM USING (raw_data_item_id)""" rows, num_rows = utils.fetchDataFromDB(cursor, query) backGroundAbsPath = None backgroundSRID = None for (bgAbsPath,bgSRID) in rows: if opts.background == os.path.basename(bgAbsPath): backGroundAbsPath = bgAbsPath backgroundSRID = bgSRID if backGroundAbsPath == None: errorMsg = 'Background ' + opts.background + ' is not found' logger.error(errorMsg) raise Exception(errorMsg) # Get the root object: the OSG configuration rootObject = viewer_conf_api.osgRCconfiguration() # set version rootObject.set_version("0.2") # Add all the different XML of the active objects # (we add distinct since the boundings will share XMLs) query = """ SELECT DISTINCT xml_abs_path FROM OSG_DATA_ITEM ORDER BY xml_abs_path""" rows, num_rows = utils.fetchDataFromDB(cursor, query) for (xmlPath,) in rows: if xmlPath.count(opts.osg) == 0: logger.error('Mismatch between given OSG data directory ' + 'and DB content') rootObject.add_objectLibrary(viewer_conf_api.objectLibrary (url=os.path.relpath(xmlPath, opts.osg))) # Add the object library with the boundings rootObject.add_objectLibrary(viewer_conf_api.objectLibrary (url=utils.BOUNDINGS_XML_RELATIVE)) # Add the cameras that are in the DB cameras = viewer_conf_api.cameras() query = """ SELECT osg_camera_name, srid, x, y, z, h, p, r FROM OSG_CAMERA JOIN OSG_LOCATION USING (osg_location_id)""" rows, num_rows = utils.fetchDataFromDB(cursor, query) for (name, srid, x, y, z, h, p, r) in rows: if (srid is not None) and (srid == bgSRID): x, y, z = getOSGPosition(x, y, z, srid) else: x, y, z = getOSGPosition(x, y, z) cameras.add_camera(viewer_conf_api.camera (name=name, x=x, y=y, z=z, h=h, p=p, r=r)) # Add Default cameras for the items that have no camera in the DB query = """ SELECT item_id, ST_SRID(geom), st_x(st_centroid(geom)), st_y(st_centroid(geom)), min_z + ((max_z - min_z) / 2) FROM ITEM WHERE NOT background AND geom IS NOT null AND item_id NOT IN ( SELECT DISTINCT item_id FROM OSG_ITEM_CAMERA ) ORDER BY item_id""" rows, numitems = utils.fetchDataFromDB(cursor, query) for (itemId, srid, x, y, z) in rows: # only call getOSGPosition if [x,y,z] are not None # should item_id = -1 be added? if all(position is not None for position in [x,y,z]) and itemId>0: if (srid is not None) and (srid == bgSRID): x, y, z = getOSGPosition(x, y, z, srid) else: x, y, z = getOSGPosition(x, y, z) cameras.add_camera(viewer_conf_api.camera (name=utils.DEFAULT_CAMERA_PREFIX + str(itemId), x=x, y=y, z=z)) rootObject.set_cameras(cameras) # Add the XML content of the preferences rootObject.set_preferences(viewer_conf_api.parseString(DEFAULT_PREFENCES)) attributes = viewer_conf_api.attributes() # Use generic method to fill all properties. # We need the name in the XML, the column name in the DB and # the table name in the DB for property in utils.ATTRIBUTES_ORDER: (cName, tName) = utils.ATTRIBUTES[property] elements = getattr(viewer_conf_api, property + 's')() # We need to call the columns and tables with extra "" because # they were created from the Access DB # utils.dbExecute(cursor, 'SELECT "' + cName + '" FROM "' + tName + '"') utils.dbExecute(cursor, 'SELECT ' + cName + ' FROM ' + tName) for (element,) in cursor: getattr(elements, 'add_' + property)(getattr( viewer_conf_api, property)(name=element)) getattr(attributes, 'set_' + property + 's')(elements) rootObject.set_attributes(attributes) # Add all the static objects, i.e. the OSG from the background # Add the static object for the background staticObjects = viewer_conf_api.staticObjects() staticObjects.add_staticObject(viewer_conf_api.staticObject (url=os.path.relpath( glob.glob(backGroundAbsPath + '/' + utils.OSG_DATA_PREFIX + '.osgb')[0], opts.osg))) # Add hardcode DOME staticObjects.add_staticObject(viewer_conf_api.staticObject (url=utils.DOMES_OSG_RELATIVE)) rootObject.set_staticObjects(staticObjects) # Add the 5 different layers of active objects activeObjects = viewer_conf_api.activeObjects() # First we add points, meshes and pcitures which are related to # the active_objects_sites layersData = [('points', 'OSG_DATA_ITEM_PC_SITE', utils.AO_TYPE_PC), ('photos', 'OSG_DATA_ITEM_PICTURE', utils.AO_TYPE_PIC), ('meshes', 'OSG_DATA_ITEM_MESH', utils.AO_TYPE_MESH)] for (layerName, tableName, inType) in layersData: layer = viewer_conf_api.layer(name=layerName) query = """ SELECT item_id, raw_data_item_id, OSG_LOCATION.srid, x, y, z, xs, ys, zs, h, p, r, cast_shadow FROM """ + tableName + """ JOIN OSG_DATA_ITEM USING (osg_data_item_id) JOIN OSG_LOCATION USING (osg_location_id) JOIN RAW_DATA_ITEM USING (raw_data_item_id) ORDER BY item_id""" rows, numitems = utils.fetchDataFromDB(cursor, query) for (itemId, rawDataItemId, srid, x, y, z, xs, ys, zs, h, p, r, castShadow) in rows: # only call getOSGPosition if [x,y,z] are not None if all(position is not None for position in [x,y,z]): if (srid is not None) and (srid == bgSRID): x, y, z = getOSGPosition(x, y, z, srid) else: x, y, z = getOSGPosition(x, y, z) uniqueName = utils.codeOSGActiveObjectUniqueName(cursor, inType, rawDataItemId) activeObject = viewer_conf_api.activeObject(prototype=uniqueName, uniqueName=uniqueName) setting = viewer_conf_api.setting( x=x, y=y, z=z, xs=xs, ys=ys, zs=zs, h=h, p=p, r=r, castShadow=(1 if castShadow else 0)) activeObject.set_setting(setting) layer.add_activeObject(activeObject) activeObjects.add_layer(layer) # Add the boundings layer = viewer_conf_api.layer(name='boundings') # We first add the boundings that are currently in the DB query = """ SELECT item_id, object_number, x, y, z, xs, ys, zs, h, p, r, OSG_LOCATION.cast_shadow, srid FROM OSG_ITEM_OBJECT JOIN OSG_LOCATION USING (osg_location_id) ORDER BY item_id,object_number""" osgItemObjects, numOsgItemObjects = utils.fetchDataFromDB(cursor, query) # osgItemObjects is (itemId, objectNumber, x, y, z, xs, ys, zs, h, p, r, castShadow, srid) # Now we add Default OSG data items for the objects that are not in OSG_ITEM_OBJECT table query = """ SELECT item_id,object_number FROM item_object WHERE (item_id,object_number) NOT IN (SELECT item_id,object_number FROM OSG_ITEM_OBJECT) ORDER BY item_id,object_number""" objects, num_objects = utils.fetchDataFromDB(cursor, query) for (itemId, objectNumber) in objects: srid = None (x,y,z) = (0,0,0) (xs,ys,zs) = (1,1,1) query = """ SELECT ST_SRID(geom), st_x(st_centroid(geom)), st_y(st_centroid(geom)), min_z + ((max_z - min_z) / 2), st_xmax(geom)-st_xmin(geom) as dx, st_ymax(geom)-st_ymin(geom) as dy, (max_z - min_z) as dz FROM ITEM WHERE item_id = %s and geom is not %s""" queryArgs = [itemId, None] footprints, num_footprints = utils.fetchDataFromDB(cursor, query, queryArgs) if num_footprints: (srid, x, y, z, xs, ys, zs) = footprints[0] if xs == 0: xs = 1 if ys == 0: ys = 1 if zs == 0: zs = 1 osgItemObjects.append([itemId, objectNumber, x, y, z, xs, ys, zs, 0, 0, 0, False, srid]) # Now let's add them to the XML for (itemId, objectNumber, x, y, z, xs, ys, zs, h, p, r, castShadow, srid) in osgItemObjects: # only call getOSGPosition if [x,y,z] are not None if all(position is not None for position in [x,y,z]) and itemId>0: if (srid is not None) and (srid == bgSRID): x, y, z = getOSGPosition(x, y, z, srid) else: x, y, z = getOSGPosition(x, y, z) uniqueName = utils.codeOSGActiveObjectUniqueName(cursor, utils.AO_TYPE_OBJ, itemId = itemId, objectId = objectNumber) proto = "Bounding Box" activeObject = viewer_conf_api.activeObject(prototype=proto, uniqueName=uniqueName) setting = viewer_conf_api.setting( x=x, y=y, z=z, xs=xs, ys=ys, zs=zs, h=h, p=p, r=r, castShadow=(1 if castShadow else 0)) activeObject.set_setting(setting) layer.add_activeObject(activeObject) activeObjects.add_layer(layer) # Add the labels layer = viewer_conf_api.layer(name='labels') utils.dbExecute(cursor, 'SELECT osg_label_name, text, red, green, blue, ' + 'rotate_screen, outline, font, srid, x, y, z, xs, ys, zs, h, ' + 'p, r, cast_shadow FROM OSG_LABEL INNER JOIN ' + 'OSG_LOCATION ON OSG_LABEL.osg_location_id=' + 'OSG_LOCATION.osg_location_id') rows = cursor.fetchall() for (name, text, red, green, blue, rotatescreen, outline, font, srid, x, y, z, xs, ys, zs, h, p, r, castShadow) in rows: proto = "labelPrototype" uniqueName = utils.codeOSGActiveObjectUniqueName(cursor, utils.AO_TYPE_LAB, labelName = name) if (srid is not None) and (srid == bgSRID): x, y, z = getOSGPosition(x, y, z, srid) else: x, y, z = getOSGPosition(x, y, z) activeObject = viewer_conf_api.activeObject( prototype=proto, uniqueName=uniqueName, labelText=text, labelColorRed=red, labelColorGreen=green, labelColorBlue=blue, labelRotateScreen=rotatescreen, outline=outline, Font=font) setting = viewer_conf_api.setting( x=x, y=y, z=z, xs=xs, ys=ys, zs=zs, h=h, p=p, r=r, castShadow=(1 if castShadow else 0)) activeObject.set_setting(setting) layer.add_activeObject(activeObject) activeObjects.add_layer(layer) rootObject.set_activeObjects(activeObjects) # Create the XML rootObject.export(open(opts.output, 'w'), 0) elapsed_time = time.time() - t0 msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % (elapsed_time, logname) print(msg) logger.info(msg)
def createOSG(cursor, itemId, osgDir): global cwd (mainOsgb, xmlPath, offsets) = (None, None, (0, 0, 0)) # extract abspath using raw_data_item_id data_items, num_items = utils.fetchDataFromDB( cursor, "SELECT abs_path, item_id FROM RAW_DATA_ITEM WHERE " + "raw_data_item_id = %s", (itemId,)) absPath, site_id = data_items[0] # extract inType & outFolder, create outFolder in non-existent inType, inKind, outFolder = extract_inType(absPath, site_id, osgDir) # Get the inputFile for the converter (in the case of Meshes without OBJ we skip them,this method just returns) # A PC SITE if (inType == utils.PC_FT and inKind == utils.SITE_FT): inputFiles = glob.glob(absPath + '/*.las') + glob.glob(absPath + '/*.laz') # A PC BACKGROUND elif (inType == utils.PC_FT and inKind == utils.BG_FT): # A PC BG numLAS = len(glob.glob(absPath + '/*.las')) numLAZ = len(glob.glob(absPath + '/*.laz')) if (numLAS != 0) and (numLAZ != 0): error('Folder %s should contain LAS or LAZ but not both!' % absPath, outFolder) if numLAS: inputFiles = [absPath + '/*.las',] elif numLAZ: inputFiles = [absPath + '/*.laz',] else: error('Folder %s does not contain LAS or LAZ files' % absPath, outFolder) # A MESH elif inType == utils.MESH_FT: inputFiles = glob.glob(absPath + '/*.obj') + glob.glob(absPath + '/*.OBJ') if len(inputFiles) == 0: #Meshes without OBJ are ignores logging.warning('Ignoring ' + absPath + ': no OBJ is found') shutil.rmtree(outFolder) return # A PICTURE elif inType == utils.PIC_FT: inputFiles = glob.glob(absPath + '/*.png') + glob.glob(absPath + '/*.jpg') + glob.glob(absPath + '/*.jpeg') + glob.glob(absPath + '/*.PNG') + glob.glob(absPath + '/*.JPG') + glob.glob(absPath + '/*.JPEG') if len(inputFiles) > 1: error('Multiple valid files found in %s' % absPath,outFolder) elif len(inputFiles) == 0: error('None valid files found in %s' % absPath,outFolder) inputFileAbsPath = inputFiles[0] # Get 8bitcolor information from DB data_items, num_items = utils.fetchDataFromDB( cursor, 'SELECT RAW_DATA_ITEM_PC.color_8bit, ' + 'RAW_DATA_ITEM_MESH.color_8bit FROM RAW_DATA_ITEM LEFT JOIN ' + 'RAW_DATA_ITEM_PC ON RAW_DATA_ITEM.raw_data_item_id=' + 'RAW_DATA_ITEM_PC.raw_data_item_id LEFT JOIN RAW_DATA_ITEM_MESH ON ' + 'RAW_DATA_ITEM.raw_data_item_id=RAW_DATA_ITEM_MESH.raw_data_item_id ' + 'WHERE ' + 'RAW_DATA_ITEM.raw_data_item_id = %s', (itemId,)) try: if (True in data_items[0]): color8Bit = True else: color8Bit = False except IndexError: color8Bit = False # no 8BC color in database, set to false # Get alignment info from DB data_items, num_items = utils.fetchDataFromDB( cursor, 'SELECT offset_x, offset_y, offset_z FROM ' + 'OSG_DATA_ITEM_PC_BACKGROUND INNER JOIN RAW_DATA_ITEM ON ' + 'OSG_DATA_ITEM_PC_BACKGROUND.raw_data_item_id=' + 'RAW_DATA_ITEM.raw_data_item_id WHERE RAW_DATA_ITEM.srid = ' + '(SELECT srid from RAW_DATA_ITEM WHERE raw_data_item_id=' + '%s )', (itemId,)) # Set offset if item is aligned aligned = False if len(data_items) > 0: aligned = True (abOffsetX, abOffsetY, abOffsetZ) = data_items[0] cwd = os.getcwd() if os.path.isfile(absPath): # input was a file -> raise IOError error('Database key abspath should define a directory, ' + 'file detected: ' + absPath, outFolder) else: # input is already a directory os.chdir(absPath) outputPrefix = utils.OSG_DATA_PREFIX ofile = getOSGFileFormat(inType) # A PC SITE if (inType == utils.PC_FT and inKind == utils.SITE_FT): tmode = '--mode lodPoints --reposition' # A PC BACKGROUND elif (inType == utils.PC_FT and inKind == utils.BG_FT): # A PC BG tmode = '--mode quadtree --reposition' # A MESH elif inType == utils.MESH_FT: tmode = '--mode polyMesh --convert --reposition' # A PICTURE elif inType == utils.PIC_FT: tmode = '--mode picturePlane' logFile = os.path.join(outFolder, outputPrefix + '.log') # Call CONVERTER_COMMAND for the inputFile command = CONVERTER_COMMAND + ' ' + tmode + ' --outputPrefix ' + \ outputPrefix + ' --files ' + os.path.basename(inputFileAbsPath) if color8Bit: command += ' --8bitColor ' if aligned: command += ' --translate ' + str(abOffsetX) + ' ' + str(abOffsetY) + \ ' ' + str(abOffsetZ) command += ' &> ' + logFile logging.info(command) args = shlex.split(command) subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() # move files to outFolder; drop outputPrefix from filename logging.info("Moving files to " + outFolder) outputFiles = glob.glob(outputPrefix + '*') for filename in outputFiles: shutil.move(os.path.abspath(filename), os.path.join(outFolder,filename)) ofiles = sorted(glob.glob(os.path.join(outFolder, '*' + ofile))) if len(ofiles) == 0: error('none OSG file was generated (found in ' + outFolder + '). Check log: ' + logFile, outFolder) else: mainOsgb = ofiles[0] if not (inType == utils.PC_FT and inKind == utils.BG_FT): # if not a PC BACK xmlfiles = glob.glob(os.path.join(outFolder, '*xml')) if len(xmlfiles) == 0: error('none XML file was generated (found in ' + outFolder + '). Check log: ' + logFile, outFolder) xmlPath = None else: xmlPath = xmlfiles[0] if len(xmlfiles) > 1: error('multiple XMLs file were generated (found in ' + outFolder + '). Using ' + xmlPath, outFolder) # upate xml file updateXMLDescription(xmlPath, cursor, inType, itemId) txtfiles = glob.glob(os.path.join(outFolder, '*offset.txt')) if len(txtfiles): txtFile = txtfiles[0] offsets = open(txtFile, 'r').read().split('\n')[0] \ .split(':')[1].split() for i in range(len(offsets)): offsets[i] = float(offsets[i]) elif aligned: logging.warn('No offset file was found and it was expected!') # We move back to the current working directory os.chdir(cwd)
def run(args): logname = os.path.basename(args.input) + '.log' utils.start_logging(filename=logname, level=args.log) localtime = utils.getCurrentTimeAsAscii() t0 = time.time() msg = os.path.basename(__file__) + ' script starts at %s.' % localtime print msg logging.info(msg) logging.info('Checking validity of SQL file') # Check that beginning of the file does not contain a create database statement if os.popen('head -500 ' + args.input + ' | grep "CREATE DATABASE"').read().count("CREATE DATABASE"): msg = "You must remove CREATE DATABASE statement from the SQL file" print msg logging.error(msg) return # Check that ther are not defaults in TIMESTAMPS that would cause errors if os.popen('grep "TIMESTAMP DEFAULT" ' + args.input).read().count("TIMESTAMP DEFAULT"): msg = "You must remove any DEFAULT value of any TIMESTAMP column" print msg logging.error(msg) return # Check that ther are not index creations if os.popen('grep "INDEX" ' + args.input).read().count("INDEX"): msg = "You must remove any INDEX creation" print msg logging.error(msg) return if os.popen("""grep '"' """ + args.input).read().count('"'): msg = 'You must remove any double quote (")' print msg logging.error(msg) dangerousWords = [] for line in open(args.input, 'r').read().split('\n'): if not line.startswith('--'): for word in line.split(): if word.count('"') == 1: dangerousWords.append(word) if len(dangerousWords): msg = 'Also, before removing all ", take care of table and column names that would be incorrect when removing ".\n If any of the following is a table or column name please be sure that it does not have white spaces: ' + ','.join( dangerousWords) print msg logging.error(msg) return return # Establish connection with DB connection, cursor = utils.connectToDB(args.dbname, args.dbuser, args.dbpass, args.dbhost, args.dbport) # First we drop all tables in attribute logging.info("Dropping all previous attribute tables") for tablename in ('tbl2_site_relation', 'tbl2_object_depression', 'tbl2_object_decoration', 'tbl2_object_material', 'tbl1_object', 'tbl1_site'): cursor.execute('DROP TABLE IF EXISTS ' + tablename + ' CASCADE') connection.commit() # First we need to drop the previous constraints in tbl1_site and tbl1_object # logging.info("Dropping constraints in tbl1_site and tbl1_object tables") # for tablename in ('tbl1_site','tbl1_object'): # cursor.execute("select constraint_name from information_schema.table_constraints where table_name=%s", [tablename,]) # constraintNames = cursor.fetchall() # for (constraintName, ) in constraintNames: # cursor.execute('ALTER TABLE ' + tablename + ' DROP CONSTRAINT %s CASCADE', [constraintName,]) # connection.commit() # This script will drop all attribute tables and create them again logging.info('Executing SQL file %s' % args.input) #utils.load_sql_file(cursor, args.input) connParams = utils.postgresConnectString(args.dbname, args.dbuser, args.dbpass, args.dbhost, args.dbport, True) logFile = os.path.basename(args.input) + '.log' command = 'psql ' + connParams + ' -f ' + args.input + ' &> ' + logFile logging.info(command) os.system(command) #Check errors if os.popen('cat ' + logFile + ' | grep ERROR').read().count("ERROR"): msg = 'There was some errors in the data loading. Please see log ' + logFile print msg logging.error(msg) return # Set select permissions to all new tables logging.info('Granting select permissions to all tables') cursor.execute( "select tablename from pg_tables where schemaname = 'public'") tablesNames = cursor.fetchall() for (tableName, ) in tablesNames: cursor.execute('GRANT SELECT ON ' + tableName + ' TO public') # We check that the added Sites and Objects are also in Data Management part of the DB # All sites in tbl1_site must have an entry in ITEM logging.info( 'Adding items in attribute data that are missing in ITEM table') query = 'SELECT site_id from tbl1_site WHERE site_id NOT IN (SELECT item_id FROM item)' sites, num_sites = utils.fetchDataFromDB(cursor, query) for (siteId, ) in sites: utils.dbExecute( cursor, "INSERT INTO ITEM (item_id, background) VALUES (%s,%s)", [siteId, False]) utils.dbExecute( cursor, "INSERT INTO ITEM_OBJECT (item_id, object_number) VALUES (%s,%s)", [siteId, utils.ITEM_OBJECT_NUMBER_ITEM]) # All objects in tbl1_object must also be in ITEM_OBJECT logging.info( 'Adding items objects in attribute data that are missing in ITEM_OBJECT table' ) query = 'SELECT site_id,object_id from tbl1_object WHERE (site_id,object_id) NOT IN (SELECT item_id,object_number FROM item_object)' sites_objects, num_sites_objects = utils.fetchDataFromDB(cursor, query) for (siteId, objectId) in sites_objects: utils.dbExecute( cursor, "INSERT INTO ITEM_OBJECT (item_id, object_number) VALUES (%s,%s)", [siteId, objectId]) #We add again the constraints that link management and attribute data logging.info('Adding constraints between attribute and items') cursor.execute("""ALTER TABLE tbl1_object ADD FOREIGN KEY (site_id, object_id) REFERENCES ITEM_OBJECT (item_id, object_number) ON UPDATE NO ACTION ON DELETE NO ACTION""") connection.commit() cursor.execute("""ALTER TABLE tbl1_site ADD FOREIGN KEY (site_id) REFERENCES ITEM (item_id) ON UPDATE NO ACTION ON DELETE NO ACTION""") connection.commit() elapsed_time = time.time() - t0 msg = 'Finished. Total elapsed time: %.02f seconds. See %s' % ( elapsed_time, logname) print(msg) logging.info(msg)