def gnmetaedges_fetch_api(): # Get srchstring and then pass to search func if 'srchqry' in request.args: srchqry_raw = request.args['srchqry'] # Remove "' begining and end srchqry = dequote(srchqry_raw) # Let us invoke gnsrch api gn_log('GNPAppServer: search qry for metanodes : ' + srchqry) else: srchqry = '' srchfilter = "" res = gngraph_search_client.gnsrch_metaqry_request(srchfilter) ##res_data = re.sub(r"(\w+):", r'"\1":', res) gn_log('GnAppSrv: metanodes and edges with filter ' + srchfilter + ' SUCCESS ') rjson = {"status": "SUCCESS", "gndata": res} return rjson
def __init__(self, dbserver, dbport, dbuser, dbpasswd, dbname, dbtype): try: #Create an engine instance pgres_connstr='postgresql+psycopg2://'+dbuser+':'+dbpasswd+'@'+dbserver+':'+dbport+'/'+dbname gn_log('PgresDBOps: setting up connection ') alchemyEngine = create_engine(pgres_connstr, pool_recycle=3600) # Connect to PostgreSQL server self.dbEngine = alchemyEngine self.dbConnp = alchemyEngine.connect(); self.__gdb_dbuser = dbuser self.__gdb_dbpasswd = dbpasswd self.__gdb_dbserver = dbserver self.__gdb_dbport = dbport self.__gdb_dbname = dbname self.connected = 1 self.gnnode_table = "gnnodes" self.gnbizrules_table = "gnbizrules" self.__gdb_metadb_schema = "gnmeta" self.__gdb_metanodes_tbl = "gnnodes" self.__gdb_metaedges_tbl = "gnedges" #self.dbtype = dbtype except exc.SQLAlchemyError as err: self.dbConnp = None self.connected = 0 gn_log('GnIngPgresDBOps: unable to connect pgres DB Error ') gn_log(err) except exc.OperationalError as err: self.dbConnp = None self.connected = 0 gn_log('GnIngPgresDBOps: unable to connect pgres Operational Error ') gn_log(err)
def gngraph_db_datanode_create_table(self, dnodename, schema): gn_log('GNGrphDBMgmtOps: create table for datanode ' + dnodename) tbl_str = schema + "." + dnodename.lower() dnode_tablestr = "CREATE TABLE IF NOT EXISTS " + tbl_str + " (gnnodeid bigint NOT NULL PRIMARY KEY, gnnodetype text, gnmetanodeid bigint, gndatanodeprop json, gndatanodeobj json, uptmstmp timestamp);" print('gnMgmtOps table_str ' + dnode_tablestr) self.db_create_table(dnode_tablestr) gn_log('GNGrphDBMgmtOps: table ' + tbl_str + ' is created ')
def gngraph_search_check_service(gnRootDir): (appsrvid, srchpid) = GnCheckServices(gnRootDir) if (srchpid == 0): gn_log('GnSrchOps: search service is not running ') return 0 else: return 1
def db_create_database(self, newdbname): try: con = psycopg2.connect(dbname='postgres', user=self.dbuser, host=self.dbserver, port=self.dbport, password=self.dbpasswd) con.set_isolation_level( ISOLATION_LEVEL_AUTOCOMMIT) # <-- ADD THIS LINE cur = con.cursor() # Use the psycopg2.sql module instead of string concatenation # in order to avoid sql injection attacs. # Psql does not support IF NOT EXISTS for database ##sql_cmd="SELECT 'CREATE DATABASE "+newdbname+"' WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = '"+newdbname+"')\gexec" cur.execute( sql.SQL("CREATE DATABASE {}").format( sql.Identifier(newdbname))) ###cur.execute(sql.SQL(sql_cmd)) con.close() return 0 except psycopg2.OperationalError as err: gn_log(" CREATE DATABASE ERROR ") gn_log(err) return -1 except psycopg2.Error as err: gn_log('CREATE DATABASE ERROR ') gn_log(err) return -1
def metadb_metanode_id_get(self, name): metaDF = pds.read_json(self.gnmetanode_filepath) metaDF.head(2) gn_log("GnGrphSfOps: getting metanode: "+self.gnmetanode_filepath) rDF = metaDF.query('gnnodename == "'+name+'"') if (rDF.shape[0] > 0): for x in rDF["gnnodeid"]: gnnode_id = x else: gnnode_id = -1 return gnnode_id
def ingest_node_intodb(): nodename = '' if 'node' in request.args: nodename = request.args['node'] if (nodename == ''): print('GnAppSrv: Ingest node with: Invalid args ') rjson = { "status": "FAIL", "statusmsg": "Invalid node name ", } return jsonify(rjson) print('GnAppSrv: Ingesting node ' + nodename + ' into graphdb ') fres = gndd_filedb_fileinfo_bynode(nodename, app.config["gnCfgDBFolder"]) if not bool(fres): print('GnAppSrv: Ingest node with: Invalid file ') rjson = { "status": "FAIL", "statusmsg": "Invalid node file ", } return jsonify(rjson) print('Filelog ') print(fres) gngraph_ingest_file_api(fres["filename"], fres["filetype"], fres["filedelim"], nodename, fres["bizdomain"], app.config["gnDataFolder"], app.config["gnGraphDBCredsFolder"], app.config["gnCfgSettings"]) print('GnAppSrv: Node ' + nodename + ' is ingested ') ### Set the state gndd_filedb_filestate_set(nodename, "INGESTED", app.config["gnCfgDBFolder"]) gn_log('GnAppSrv: Uploaded new node. Remap metarepo ') res = gngraph_search_client.gnsrch_meta_remap_request() rjson = { "status": "SUCCESS", "statusmsg": "Ingested node into database ", "node": nodename } return rjson
def datadb_nodes_write(self, dataDF, tgt_schema, tgt_table): gn_log("GnIngPgresDBOps: copying datanode "+tgt_schema+"."+tgt_table+" to db ") if (self.connected): gn_log("GnIngPgresDBOps: copying datanode for "+tgt_schema+"."+tgt_table+" to db "+self.__gdb_dbname) try: n = dataDF.to_sql(tgt_table, con=self.dbConnp, schema=tgt_schema, if_exists='append', index=False, chunksize=5000, method='multi') except psycopg2.DatabaseError as e: gn_log("GnIngPgresDBOps: database write error ") gn_log(e) return -1 return 0 else: gn_log("GnIngPgressDBOps: database is not connected. Failing the operation ") return -1
def datadb_load_metanode_df(self, bizdomain, nodename): bizdomain_dir = self.gndata_graph_data_folder+"/"+bizdomain if not os.path.exists(bizdomain_dir): gn_log("GnGrphSfOps: biz domain "+bizdomain+" directory is not present") return None dnode_dir = bizdomain_dir+"/"+nodename dnode_file = dnode_dir+"/"+nodename+".json" gn_log("GnGrphSfOps: reading datanode file "+dnode_file) if path.exists(dnode_file): jDF = pds.read_json(dnode_file) else: jDF = None return jDF
def gngrph_search_client_sendreq(t): s = socket.socket() try: s.connect((HOST, GPORT)) gn_log("GnSrchOps: connected to srch service:" + HOST) tJstr = json.dumps(t) gn_log("GnSrchOps: send request: " + str(tJstr)) s.send(tJstr.encode()) resp = '' while True: bytes_read = s.recv(BUFFER_SIZE).decode() if not bytes_read: break resp += bytes_read s.close() rJ = json.loads(resp) gn_log("GnSrchOps: recv resp cmd " + rJ["cmd"] + " status " + rJ["status"]) except socket.error as err: gn_log('GnSrchOps: internal socket error ' + str(err)) resp_j = {} resp_j["cmd"] = t["cmd"] resp_j["status"] = "ERROR" rJ = {} rJ["nodes"] = [] rJ["edges"] = [] rJ["nodelen"] = 0 rJ["edgelen"] = 0 rJ["status"] = "ERROR" rJ["errmsg"] = "Internal Server Error" resp_j["data"] = rJ resp = json.dumps(resp_j) gn_log('GnSrchOps: cmd ' + rJ["cmd"] + ' failed to execute ') return resp
def gngrph_meta_remap_request(): tskcmd = "metaremap" #print(' task cmd ') #print(tskcmd) srchfilter = "" nodemode = 2 t = {} t["cmd"] = "metaremap" t["args"] = "" t["nodemode"] = nodemode resp = gngrph_search_client_sendreq(t) rJData = json.loads(resp) rdata = rJData["data"] gn_log('GnSrchOps: metarepo remap request response ') gn_log(rJData) return rJData
def gnmetanodes_fetch_api(): gn_log('GnAppSrv: request received for meta nodes search') # Get srchstring and then pass to search func if 'srchqry' in request.args: srchqry_raw = request.args['srchqry'] # Remove "' begining and end srchqry = dequote(srchqry_raw) gn_log('GnAppSrv: search qry for metanodes : ' + srchqry) else: srchqry = '' srchfilter = "" res = gngraph_search_client.gnsrch_metanodes_request() gn_log('GnAppSrv: metanode search with filter ' + srchfilter + ' SUCCESS : ') if (res["status"] == "SUCCESS"): rjson = {"status": "SUCCESS", "gndata": res["data"]} else: rjson = {"status": "ERROR", "gndata": res["data"]} return rjson
def __init__(self, fileargs, gdbargs): self.__fargs = fileargs self.__gdbargs = gdbargs if (self.__fargs["ftype"] == "csv"): try: self.__nodeDF = pds.read_csv(self.__fargs["fpath"], delimiter=self.__fargs["fdelim"]) except Exception as err: print(err) gn_log('GnIngPdOps: csv file '+self.__fargs["fpath"]+' is parsed and created dataframe') if (self.__fargs["ftype"] == "json"): try: self.__nodeDF = pds.read_json(self.__fargs["fpath"]) except Exception as err: print(err) gn_log('GnIngPdOps: json file '+self.__fargs["fpath"]+' is parsed and created dataframe') ### Establish metadb conn and datadb conn if (self.__gdbargs["gdbflag"]): self.gdb_conn_setup() if (self.__gdbargs["staticfiles"]): gngraph_folder = self.__gdbargs["gndatafolder"]+"/gngraph"; self.__gngrp_sfops = GNGraphStaticFileOps(gngraph_folder) ## get config class if (self.__gdbargs["gndatafolder"]): id_cfg_path = self.__gdbargs["gndatafolder"]+"/gngraph/config" self.__gngrp_cfg = GNGraphConfig(id_cfg_path) ###set metanode columns for pgresdb and static files self.__metanode_columns=["gnnodeid", "gnnodename", "gnnodetype", "gnnodeprop", "uptmstmp"] self.__metaedge_columns=["gnedgeid", "gnedgename", "gnedgetype", "gnsrcnodeid", "gntgtnodeid", "gnedgeprop", "uptmstmp"] self.gnnodeparentid = -1 self.gnnode_parent_name = self.__fargs["nodename"]
def grphdb_create_table(self, nodename, schema): gn_log('GnIngPgresDBOps: create table for node '+nodename) tbl_str = schema+'.'+'"'+nodename+'"' dnode_tablestr = 'CREATE TABLE IF NOT EXISTS '+tbl_str+' (gnnodeid bigint NOT NULL PRIMARY KEY, gnnodetype text, gnmetanodeid bigint, gndatanodeprop json, gndatanodeobj json, uptmstmp timestamp);' gn_log("GnGrphDBOps: table_str "+dnode_tablestr) self.db_create_table(dnode_tablestr) gn_log("GnIngPgresDBOps: table "+tbl_str+" is created ");
def gdb_config_settings_page(): gdbcfg = GNGraphConfigModel(app.config["gnGraphDBCredsFolder"]) gdbcfg_settings = gdbcfg.get_op() if gdbcfg_settings is None: gdbcfg_settings = {} gdbcfg_settings['sfmode'] = 1 gdbcfg_settings['dbmode'] = 0 gn_log('Current GnDBmode settings ') gn_log(gdbcfg_settings) pgres_conf = gn_pgresdb_getconfiguration( app.config["gnGraphDBCredsFolder"]) gn_log(' GnDB Config settings ') gn_log(pgres_conf) if (request.method == 'POST'): print('GB Config POST') in_vars = request.form.to_dict() print(in_vars) cfg_setting = gdbcfg_settings if ('sfmode' in in_vars): print('GNGraphDB: sfmode ' + in_vars['sfmode']) if (in_vars['sfmode'] == 'on'): cfg_setting['sfmode'] = 1 else: cfg_setting['sfmode'] = 0 if ('dbmode' in in_vars): print('GNGraphDB: sfmode ' + in_vars['sfmode']) if (in_vars['dbmode'] == 'on'): cfg_setting['dbmode'] = 1 ## dbmode is turned on #gngraph_datacopy_sf_to_pgres() else: cfg_setting['dbmode'] = 0 ##gdbcfg.insert_op(cfg_setting) gdbcfg.upsert_op(cfg_setting) return redirect(url_for('gn_home')) return render_template('gdb_config.html', title='Gnana Graph Server Config', cfg=gdbcfg_settings, pgres_conf=pgres_conf)
def verify_user_pwhash(self, user_login, passwhash, acctid): ###http://gnanapath.com:4000/gnaccount/[email protected]&password=esdsdssddss if self.auth_server_port is None: server_url = "http://" + self.auth_server else: server_url = "http://" + self.auth_server + ":" + self.auth_server_port user_params = {'email': user_login, 'password': passwhash} server_url = server_url + "/gnaccount/login" gn_log("UserAuth: sending url " + server_url) resp = requests.get(url=server_url, params=user_params) if resp.status_code != 200: gn_log('UserAuth: Error in getting response status_code:' + str(resp.status_code)) return -1 user_resp = resp.json() ###status = data['results'][0]['geometry']['location']['lat'] status = user_resp['status'] if (status == "SUCCESS"): self.gn_user = user_resp["user"] ## verify that accountid matches gn_acct_id = self.gn_user["id"] if gn_acct_id != acctid: gn_log( 'UserAuth: User credentials didnt match with account registered' ) return -1 self.username = user_login self.password = passwhash gn_log('UserAuth: User credentials verified ') return 1 else: return -1
def gngraph_search_service_init(gnRootDir): servname = "gnsearch" # check gnsearch service if it is already running srchpid = gngraph_search_check_service(gnRootDir) if (srchpid == 0): # create search service srchpid = GnServiceStart(gnRootDir, servname) if (srchpid == -1): gn_log('GnSrchOps: Search service failed to start ') return -1 gn_log('GnSrchOps: Search service is running pid ' + str(srchpid)) return srchpid else: gn_log('GnSrchOps: Search service is already runnig pid ' + str(srchpid)) return srchpid
def gnsrch_api(): srchqry = '' gn_log('GnAppSrv: request for datanodes searching received') # Get srchstring and then pass to search func if 'srchqry' in request.args: srchqry = request.args['srchqry'] if 'lnodes' in request.args: lnodes = request.args['lnodes'] else: lnodes = 10 if 'nodemode' in request.args: nodemode = int(request.args['nodemode']) else: nodemode = 1 # Remove "' begining and end srchqry_filtered = dequote(srchqry) slen = len(srchqry_filtered) gn_log('GnAppSrv: searching datanode with qry : ' + srchqry_filtered) ##Nodemode: 1 Nodes only, 2 Nodes+Edges, 3 Nodes+Edges+Derived nodes res = gngraph_search_client.gnsrch_dataqry_request( srchqry_filtered, nodemode, lnodes) gn_log('GnAppSrv: fetched datanodes with filter ' + srchqry_filtered + ' SUCCESS') rjson = {"status": "SUCCESS", "gndata": res} return rjson else: errstr = {'status': 'ERROR', 'errmsg': "No Search query provided "} return jsonify(errstr)
def create_node_metanodes_edges(self): gn_log("GnIngPdOps: "+self.__fargs["nodename"]+" creating meta nodes and edges ") ##### Add new metanode and metanode attributes to metaDF metanodeprop = {"gnlabel": self.__fargs["nodename"], "bizdomain": self.__fargs["bizdomain"]} metanodepropstr = json.dumps(metanodeprop) utmstmp = pds.Timestamp.now() gn_nodeid_max_c = self.__gngrp_cfg.get_nodeid_max()+1 metanode_e = [gn_nodeid_max_c, self.__fargs["nodename"], "GNMetaNode", metanodepropstr, utmstmp] gn_log(metanode_e) #metaDF.loc[len(metaDF.index)] = metanode_e #metaDF.head() self.__metanodeDF = pds.DataFrame([metanode_e], columns=self.__metanode_columns) ### save metanode ##self.__gngrp_cfg.save_nodeid_max(gn_nodeid_max_c) #### Now update other meta node attributes (columns) self.gnnodeparentid = gn_nodeid_max_c self.gnnode_parent_name = self.__fargs["nodename"] nodedf_collist = self.__nodeDF.columns utmstmp = pds.Timestamp.now() attr_rel_name="HAS_ATTR" attr_edge_type="GNMetaNodeEdge" mnodeattr_arr=[] medgeattr_arr=[] nodename = self.__fargs["nodename"] #### Let us get latest id gn_nodeid_c = gn_nodeid_max_c gn_edgeid_c = self.__gngrp_cfg.get_edgeid_max() for c in nodedf_collist: gn_nodeid_c=gn_nodeid_c+1 gn_edgeid_c=gn_edgeid_c+1 metanodeprop = {"gnlabel":c, "gnnodeparent":nodename, "bizdomain": self.__fargs["bizdomain"]} metaedgeprop = {"gntgtlabel":c, "gnsrclabel": nodename, "gnsrcnodeloc": "gnnodes", "gnsrcdomain": "gnmeta", "gntgtnodeloc": "gnnodes", "gntgtdomain":"gnmeta"} metanodepropstr = json.dumps(metanodeprop) metaedgepropstr = json.dumps(metaedgeprop) metanode_e = [gn_nodeid_c, c, "GNMetaNodeAttr", metanodepropstr, utmstmp] metaedge_e = [gn_edgeid_c, attr_rel_name, attr_edge_type, self.gnnodeparentid, gn_nodeid_c, metaedgepropstr, utmstmp] mnodeattr_arr.append(metanode_e) medgeattr_arr.append(metaedge_e) metaColDF = pds.DataFrame(mnodeattr_arr, columns=self.__metanode_columns) ###metaColDF.head() self.__metanodeDF = self.__metanodeDF.append(metaColDF, ignore_index=True) self.__metaedgeDF = pds.DataFrame(medgeattr_arr, columns=self.__metaedge_columns) ### Now write metanodes and edges to db and static files if (self.__gdbargs["dbmode"] == 1): self.__gdbDBConnp.metadb_nodes_write(self.__metanodeDF) self.__gdbDBConnp.metadb_edges_write(self.__metaedgeDF) if (self.__gdbargs["sfmode"] == 1): print("gnGraphIngest: write nodes and edges to static files ") self.__metanodeDF["uptmstmp"] = self.__metanodeDF["uptmstmp"].astype(str) self.__metaedgeDF["uptmstmp"] = self.__metaedgeDF["uptmstmp"].astype(str) self.__gngrp_sfops.metadb_nodes_append_write(self.__metanodeDF) self.__gngrp_sfops.metadb_edges_append_write(self.__metaedgeDF) #### Update nodeidmax and edgeidmax self.__gngrp_cfg.save_nodeid_max(gn_nodeid_c) self.__gngrp_cfg.save_edgeid_max(gn_edgeid_c) gn_log("GnIngPdOps: "+self.__fargs["nodename"]+" meta nodes and edges created successfully ")
def __gn_graph_init(gnp_thread_flag, gnRootDir): global gnsrch_thread_flag global gnp_spark global gnsrch_ops if (gnp_thread_flag == 1): # we are spawning GnSearch Spark Application gngraph_search_service_init(gnRootDir) gngraph_init_flag = 1 gnsrch_thread_flag = 1 return gngraph_init_flag gn_log('GnSrchOps: Initializing Spark Session for Search ' + str(gngraph_init_flag)) if (gngraph_init_flag == 1): gn_log('GnAppSrv: Graph is already initialized ') return gngraph_init_flag app_name = "gngraph_spk" gn_log('GnAppSrv: Initializing Spark Session ') conf = SparkConf() conf.set('spark.executor.memory', '4g') conf.set('spark.driver.memory', '4g') gnp_spark = SparkSession \ .builder \ .appName("gnp") \ .config("spark.some.config.option", "some-value") \ .getOrCreate() gn_log('GnAppSrv: Spark session acquired ') #sc = pyspark.SparkContext(appName=app_name) gnp_spark.sparkContext.setLogLevel("WARN") ### Initialize GNGraph Sessions gn_log('GnAppSrv: Initializing Search Ops ') gnsrch_ops = gngrph_search_init(gnp_spark, app.config["gnDataFolder"], app.config["gnGraphDBCredsFolder"], app.config["gnCfgSettings"]) gngraph_init_flag = 1 gn_log('GnAppSrv: gngraph Init COMPLETE SUCCESS ' + str(gngraph_init_flag)) return gngraph_init_flag
def create_node_datanodes_edges(self): gn_log("GnIngPdOps: "+self.__fargs["nodename"]+" creating data nodes and edges ") gn_node_parent_id = self.get_metanode_parent_id() gn_nodeid_max_c = self.__gngrp_cfg.get_nodeid_max() gn_edgeid_max_c = self.__gngrp_cfg.get_edgeid_max() jstr = self.__nodeDF.to_json(orient='records') jobj = json.loads(jstr) #convert json elements to strings and then load to df. jDF = pds.DataFrame([json.dumps(e) for e in jobj], columns=["gndatanodeobj"]) self.__nodeDF["gndatanodeobj"] = [json.dumps(e) for e in jobj] gn_nodeid_c = gn_nodeid_max_c+1 gn_edgeid_c = gn_edgeid_max_c+1 self.__nodeDF['gnnodeid'] = pds.RangeIndex(stop=self.__nodeDF.shape[0])+gn_nodeid_c self.__nodeDF['gnnodetype']="GNDataNode" self.__nodeDF['gnmetanodeid']= gn_node_parent_id gn_nodeid_max_n = self.__nodeDF.shape[0]+gn_nodeid_c-1 self.__nodeDF["gndatanodeprop"] = self.__nodeDF["gnnodeid"].apply(lambda x: json.dumps({'gnlabel':self.gnnode_parent_name+str(x)})) #### Update utimestamp utmstmp = pds.Timestamp.now() self.__nodeDF['uptmstmp'] = utmstmp self.__nodeEdgeDF = self.__nodeDF.copy() self.__gndatanodeDF = self.__nodeDF[["gnnodeid","gnnodetype","gnmetanodeid","gndatanodeprop","gndatanodeobj", "uptmstmp"]] ###self.__gnmdatanodeDF = self.__gndatanodeDF.select("gnnodeid", "gnnodetype", "gndatanodeprop", "uptmstmp") self.__gnmdatanodeDF = self.__gndatanodeDF ##self.__gnmdatanodeDF if (self.__gdbargs["dbmode"] == 1): self.__gdbDBConnp.grphdb_create_table(self.__fargs["nodename"], self.__fargs["bizdomain"]) if (self.__gdbargs["sfmode"] == 1): self.__gngrp_sfops.create_gndata_datadirs(self.__fargs["bizdomain"], self.__fargs["nodename"]) ### Now write datanodes to domain schema table if (self.__gdbargs["dbmode"] == 1): self.__gdbDBConnp.datadb_nodes_write(self.__gndatanodeDF, self.__fargs["bizdomain"], self.__fargs["nodename"]) if (self.__gdbargs["sfmode"] == 1): # for static files timestamp has to be JSON serializable self.__gndatanodeDF["uptmstmp"] = self.__gndatanodeDF["uptmstmp"].astype(str) self.__gngrp_sfops.datadb_nodes_write(self.__gndatanodeDF, self.__fargs["bizdomain"], self.__fargs["nodename"]) ### save node id self.__gngrp_cfg.save_nodeid_max(gn_nodeid_max_n) ### Copy datanodes to gnnodes cdf=pds.io.json.json_normalize(self.__gnmdatanodeDF["gndatanodeprop"].apply(json.loads).apply(lambda x: x)) self.__gnmdatanodeDF["gnnodename"]=cdf['gnlabel'] ### set gnnodeprop self.__gnmdatanodeDF["gnnodeprop"] = self.__gnmdatanodeDF["gnnodeid"].apply(lambda x: json.dumps({"gnnodeparent":self.gnnode_parent_name, "bizdomain": self.__fargs["bizdomain"]})) #####self.__gnmdatanodeDF.rename(columns=({'gnnode' self.__gnMetaDatanodeDF = self.__gnmdatanodeDF[["gnnodeid", "gnnodename", "gnnodetype", "gnnodeprop", "uptmstmp"]] ###################### Write datanodes to metatable if (self.__gdbargs["dbmode"] == 1): self.__gdbDBConnp.metadb_nodes_write(self.__gnMetaDatanodeDF) if (self.__gdbargs["sfmode"] == 1): self.__gngrp_sfops.metadb_nodes_append_write(self.__gnMetaDatanodeDF) ## Rename column gnmetanodeid to gnsrcnodeid self.__nodeEdgeDF.rename( columns=({'gnnodeid':'gntgtnodeid', 'gnmetanodeid':'gnsrcnodeid'}), inplace=True) ## Add gnedgeid self.__nodeEdgeDF['gnedgeid'] = pds.RangeIndex(stop=self.__nodeEdgeDF.shape[0])+gn_edgeid_c #### Add gnedgename IS relname="IS" self.__nodeEdgeDF["gnedgename"] = relname ### Add gnedgetype edgetype="GNDataNodeEdge" self.__nodeEdgeDF["gnedgetype"] = edgetype #### Add gnedgeprop self.__nodeEdgeDF["gnedgeprop"] = self.__nodeEdgeDF["gntgtnodeid"].apply(lambda x: json.dumps({'gntgtlabel':self.gnnode_parent_name+str(x), 'gnsrclabel':self.gnnode_parent_name, 'gnsrcnodeloc': "gnnodes", "gnsrcdomain": "gnmeta", "gntgtnodeloc": self.__fargs["nodename"], "gntgtdomain": self.__fargs["bizdomain"] })) #### Update utimestamp utmstmp = pds.Timestamp.now() self.__nodeEdgeDF['uptmstmp'] = utmstmp #####Select edgenode columns and prepare for write self.__gndatanodeEdgeDF = self.__nodeEdgeDF[["gnedgeid","gnedgename","gnedgetype","gnsrcnodeid","gntgtnodeid","gnedgeprop", "uptmstmp"]] ## write edges to database if (self.__gdbargs["dbmode"] == 1): self.__gdbDBConnp.metadb_edges_write(self.__gndatanodeEdgeDF) if (self.__gdbargs["sfmode"] == 1): self.__gndatanodeEdgeDF["uptmstmp"] = self.__gndatanodeEdgeDF["uptmstmp"].astype(str) self.__gngrp_sfops.metadb_edges_append_write(self.__gndatanodeEdgeDF) #save edgeid max gn_edgeid_max_n = self.__gndatanodeEdgeDF.shape[0]+gn_edgeid_c-1 self.__gngrp_cfg.save_edgeid_max(gn_edgeid_max_n) gn_log("GnIngPdOps: "+self.__fargs["nodename"]+" data nodes and edges created succesfully")
def gn_config_init(app): gn_log('GnCfg: Initializing Config directories ') app.config["gnDataFolder"] = app.config["gnRootDir"] + "/gndata" app.config["gnDBFolder"] = app.config["gnRootDir"] + "/gndb" app.config["gnCfgDBFolder"] = app.config["gnRootDir"] + "/gnconfigdb" app.config["gnUploadsFolder"] = app.config["gnDataFolder"] + "/uploads" app.config[ "gnDiscoveryFolder"] = app.config["gnDataFolder"] + "/datadiscovery" app.config[ "gnProfileFolder"] = app.config["gnDiscoveryFolder"] + "/profile" app.config["gnGraphFolder"] = app.config["gnDataFolder"] + "/gngraph" app.config[ "gnGraphDBCredsFolder"] = app.config["gnRootDir"] + "/creds/gngraph" if not os.path.isdir(app.config["gnDataFolder"]): os.mkdir(app.config["gnDataFolder"]) gn_log('GnCfg: ' + app.config["gnDataFolder"] + " is created") if not os.path.isdir(app.config["gnUploadsFolder"]): os.mkdir(app.config["gnUploadsFolder"]) gn_log('GnCfg: ' + app.config["gnUploadsFolder"] + " is created") if not os.path.isdir(app.config["gnCfgDBFolder"]): os.mkdir(app.config["gnCfgDBFolder"]) gn_log('GnCfg: ' + app.config["gnCfgDBFolder"] + " is created") if not os.path.isdir(app.config["gnDiscoveryFolder"]): os.mkdir(app.config["gnDiscoveryFolder"]) gn_log('GnCfg: ' + app.config["gnDiscoveryFolder"] + " is created ") if not os.path.isdir(app.config["gnProfileFolder"]): os.mkdir(app.config["gnProfileFolder"]) gn_log('GnCfg: ' + app.config["gnProfileFolder"] + " is created") if not os.path.isdir(app.config["gnGraphFolder"]): os.mkdir(app.config["gnGraphFolder"]) # check if config and data directory are part of gngraph cfg_dir = app.config["gnGraphFolder"] + "/config" data_dir = app.config["gnGraphFolder"] + "/data" if not os.path.isdir(cfg_dir): os.mkdir(cfg_dir) if not os.path.isdir(data_dir): os.mkdir(data_dir) gn_log('GnCfg: ' + app.config["gnGraphFolder"] + " and subdirs are created") app.config["gnLogDir"] = app.config["gnRootDir"] + "/gnlog" app.config["gnLogFile"] = "gnpath.log" app.config["gnLogFilePath"] = app.config["gnLogDir"] + "/" + app.config[ "gnLogFile"] ###Read Config settings gncfg = GNGraphConfigModel(app.config["gnGraphDBCredsFolder"]) gncfg_settings = gncfg.get_op() #app.config["gnCfgSettings"] = gncfg.get_op() gndb_cfg = GNGraphDBConfigModel(app.config['gnGraphDBCredsFolder']) gndb_cfg_settings = gndb_cfg.get_op() gncfg_settings["gnDBCfgSettings"] = gndb_cfg_settings app.config["gnCfgSettings"] = gncfg_settings
def gn_pgresdb_getconfiguration(credfpath): gndb_cfg = GNGraphDBConfigModel(credfpath) gn_log('GnDBCfg: getting postgres config ') gndb_cfg_settings = gndb_cfg.get_op() return gndb_cfg_settings
def testdb_conn(): dbIP = '' dbPort = '' dbUser = '' dbPasswd = '' dbname = '' if 'dbIP' in request.args: dbIP = request.args['dbIP'] if 'dbPort' in request.args: dbPort = request.args['dbPort'] if 'dbUser' in request.args: dbUser = request.args['dbUser'] if 'dbPasswd' in request.args: dbPasswd = request.args['dbPasswd'] if 'dbname' in request.args: dbname = request.args['dbname'] if 'newdbchk' in request.args: newdbchk = request.args['newdbchk'] if (newdbchk == 'true'): newdb = 1 else: newdb = 0 else: newdb = 0 gn_log('new db val ' + str(newdb)) gn_log('Test DB Connection dbIP ' + dbIP + " port " + dbPort) if ((dbIP == '') or dbPort == '' or dbUser == '' or dbPasswd == ''): gn_log('GNPAppSrv: DB TestConn: Invalid args ') rjson = { "status": "FAIL", "connect_status": 0, "statusmsg": "Invalid Input Arguments" } return jsonify(rjson) if (newdb == 1): pgdb_cls = GNGraphPgresDBOps(dbIP, dbPort, dbUser, dbPasswd, "postgres", "") else: if (dbname): pgdb_cls = GNGraphPgresDBOps(dbIP, dbPort, dbUser, dbPasswd, dbname, "") else: gn_log('TestConn: dbname not provided for testing ') rjson = { "status": "FAIL", "connect_status": 0, "statusmsg": "Database name for testing not provided" } return jsonify(rjson) is_connect = pgdb_cls._isconnected() rjson = {"status": "SUCCESS", "connect_status": is_connect} return jsonify(rjson)
def gnview_cola_api(): _srch = True if check_server_session() else False gn_log(' gnview cola is initiated') return render_template('gnview/gnsrchview.html', disp_srch=_srch)
def pgresdb_config(): form = ConnectServerForm() pgres_conf = GNGraphDBConfigModel(app.config["gnGraphDBCredsFolder"]) conf_settings = GNGraphConfigModel(app.config["gnGraphDBCredsFolder"]) conn = {} if (request.method == 'GET'): #if 'serverIP' in session and connect.search_res(session['serverIP']): conn = pgres_conf.get_op() print('app srv: ') print(conn) if conn: srv_ip_encode = base64.urlsafe_b64encode( conn['serverIP'].encode("utf-8")) srv_encode_str = str(srv_ip_encode, "utf-8") print('pgresConfForm: session ') ##conn['serverIP'] = session['serverIP'] ##conn['serverPort'] = session['serverPort'] ##conn['user_id'] = session['_user_id'] form.serverIP.data = conn['serverIP'] form.serverPort.data = conn['serverPort'] form.username.data = conn['username'] form.dbname.data = conn['dbname'] ##print(conn) #flash(Markup('Connected to db server {},Click <a href=/modify/{}\ # class="alert-link"> here</a> to modify'.format(session['serverIP'], srv_encode_str)), 'info') #return redirect(url_for('gn_home', disp_srch=True)) return render_template('pgres_db_setup.html', title='Connect Graph Server', form=form, disp_srch=False, cfg=conn) if (request.method == 'POST'): print('pgres_db_conf: POST method input vars') in_vars = request.form.to_dict() print(in_vars) if 'newdbchk' in in_vars: newdbchk = in_vars['newdbchk'] if (newdbchk == 'y'): print('new db check ' + newdbchk) else: newdbchk = 'n' if 1 or form.validate_on_submit(): result = request.form.to_dict() req_dict = pgres_conf.req_fields_json(result) print('DBConfig: POST ') print(req_dict) if (newdbchk == 'y'): pgresdb_ops = GNGraphPgresDBMgmtOps(req_dict['serverIP'], req_dict['serverPort'], req_dict['username'], req_dict['password'], req_dict['dbname'], '') iscreatedb = 1 res = pgresdb_ops.gngraph_db_initialize( req_dict['dbname'], iscreatedb) if (res < 0): gn_log('GNPAppSrv: GNGraph DB Initialization failed ') flash( f'Error Initializing GN Graph Database Please look into log file for errors', 'danger') return render_template('pgres_db_setup.html', title='Graph DB Settings', form=form, disp_srch=False) gn_log('GNPAppSrv: GNGraph database initialized SUCCESS') pgres_conf.upsert_op(req_dict) res = "Success" if res == "Error": flash(f'Error connecting to db server {form.serverIP.data}', 'danger') pgres_conf.delete_op(req_dict) return render_template('pgres_db_setup.html', title='Graph DB Settings', form=form, disp_srch=False) else: session['serverIP'] = form.serverIP.data session['serverPort'] = form.serverPort.data flash(f'GNGraph database settings has been saved', 'success') return redirect( url_for('gdb_config_settings_page', disp_srch=True)) else: print('Pgres Conf: Error in form submit') flash(f'Error on submit ', form.errors) return render_template('pgres_db_setup.html', title='Connect Graph Server', form=form, disp_srch=False)
def gngraph_db_initialize(self, newdbname, iscreatedb): gn_log('GNGraphDBInit: Initializing Graph on database ' + newdbname) ## First check connecting to postgres self.dbname = "postgres" ##connected = self.db_connect() ##if (connected == 0): ## print('GNGraphDBInit: ERROR Failed to connect to DB ') ## return -1 ## first create gngraph if (iscreatedb == 1): res = self.db_create_database(newdbname) if (res < 0): gn_log( 'GNGraphDBInit: ERROR Failed to create new database : ' + newdbname) return res gn_log('GNGraphDBInit: database ' + newdbname + ' is created ') ## Now connect using new database self.dbname = newdbname connected = self.db_connect() if (connected == 0): gn_log('GNGraphDBInit: Failed to connect new database:' + newdbname) return -1 ### create schema self.db_create_schema(newdbname, "gnmeta") gn_log('GNGraphDBInit: gnmeta schema created ') gnnode_tablestr = "CREATE TABLE IF NOT EXISTS gnmeta.gnnodes (gnnodeid bigint NOT NULL PRIMARY KEY, gnnodename text, gnnodetype text, gnnodeprop json, uptmstmp timestamp);" #print('GNPgreDBMgmtOps: create table '+gnnode_tablestr) self.db_create_table(gnnode_tablestr) gn_log('GNGraphDBInit: table gnnodes is created ') gnedge_tablestr = "CREATE TABLE IF NOT EXISTS gnmeta.gnedges ( gnedgeid bigint NOT NULL PRIMARY KEY, gnedgename text, gnedgetype text, gnsrcnodeid bigint, gntgtnodeid bigint, gnedgeprop json, uptmstmp timestamp);" ##print('GNPgreDBMgmtOps: create table '+gnedge_tablestr) self.db_create_table(gnedge_tablestr) gn_log('GNGraphDBInit: table gnedges is created ') gnbizrules_tablestr = "CREATE TABLE IF NOT EXISTS gnmeta.gnbizrules ( gnrelid bigint NOT NULL PRIMARY KEY, gnrelname text, gnreltype text, gnsrcnodeid bigint, gntgtnodeid bigint, gnmatchnodeid bigint, gnrelprop json, gnrelobj json, state text, freq text, uptmstmp timestamp);" #print('GNPgreDBMgmtOps: create table '+gnbizrules_tablestr) self.db_create_table(gnbizrules_tablestr) gn_log('GNGraphDBInit: table gnbizrules is created ') ## Create default Business Domains self.db_create_schema(newdbname, "customer") self.db_create_schema(newdbname, "product") self.db_create_schema(newdbname, "sales") gn_log('GNGraphDBInit: schemas customer, product, sales are created ') gn_log('GNGraphDBInit: GNGraph Intialization Successful ') return 0
def upload_file(): _srch = True if check_server_session() else False if request.method == 'GET': fres = gndd_filedb_filelist_get(app.config["gnCfgDBFolder"]) flen = len(fres) ###fres_table = GNFileLogResults(items=fres) return render_template('upload.html', disp_srch=_srch, file_res=fres, flen=flen) if request.method == 'POST': if 'fd' not in request.files: flash('No file part', 'danger') return redirect(request.url) files = request.files["fd"] fdesc = request.form["fdesc"] ftype = request.form["ftype"] fdelim = request.form["fdelim"] fbizdomain = request.form["bizdomain"] fnodename = request.form["nnameid"] if 'ingest_mode' in request.form: fingest = request.form['ingest_mode'] else: fingest = 'off' ##if 'dataset_name' in request.form: ## datasetname = request.form['dataset_name'] ## print(' Dataset name '+datasetname) ##else: ## datasetname = '' ###bizdomain = request.form['bizdomain'] if not allowed_file(files.filename): flash('Please upload CSV or JSON file', 'danger') return redirect(request.url) elif files and allowed_file(files.filename): fname = secure_filename(files.filename) file_name, file_ext = fname.split(".") filename = re.sub(r'\W+', '', file_name) + f".{file_ext}" ####nodename, fext = filename.split(".") print('GnAppServ: file name ' + filename + " filenode: " + file_name) files.save(os.path.join(app.config['gnUploadsFolder'], filename)) gn_log("GnAppServ: uploaded file " + filename + " successfully ") if fnodename == "": fnodename = file_name if fbizdomain == "select": fbizdomain = "other" ### For timebeing disable csv file upload ###gndwdbDataUpload(app.config['gnUploadsFolder'], filename) ###fdelim = ',' fp = Path(app.config['gnUploadsFolder'] + "/" + filename) fsize = fp.stat().st_size gndd_filedb_insert_file_api(filename, fsize, ftype, fbizdomain, fdesc, fdelim, fnodename, app.config["gnCfgDBFolder"]) fres = gndd_filedb_filelist_get(app.config["gnCfgDBFolder"]) flen = len(fres) if (fingest == 'on'): gn_log('GnAppServ: File ingestion is on ') #if (datasetname == ''): # nodename, fext = filename.split(".") # gn_log(' Getting node name '+nodename) #else: # nodename = datasetname gn_log('GNAppSrv: Ingest file ' + fnodename + ' Business Domain ' + fbizdomain) gngraph_ingest_file_api(filename, ftype, fdelim, fnodename, fbizdomain, app.config["gnDataFolder"], app.config["gnGraphDBCredsFolder"], app.config["gnCfgSettings"]) gn_log('GnAppServ: Uploaded new file. Remap metarepo ') res = gngraph_search_client.gnsrch_meta_remap_request() flash(f'File {filename} successfully uploaded', 'success') ####return redirect(url_for('gn_home', disp_srch=_srch)) return render_template('upload.html', disp_srch=_srch, file_res=fres, flen=flen)
app.config["gnGraphDBCredsFolder"], app.config["gnCfgSettings"]) print('GnAppSrv: Node ' + nodename + ' is ingested ') ### Set the state gndd_filedb_filestate_set(nodename, "INGESTED", app.config["gnCfgDBFolder"]) gn_log('GnAppSrv: Uploaded new node. Remap metarepo ') res = gngraph_search_client.gnsrch_meta_remap_request() rjson = { "status": "SUCCESS", "statusmsg": "Ingested node into database ", "node": nodename } return rjson if __name__ == '__main__': gn_log('GnAppSrv: Starting GnSearch thread ') gngraph_search_client.__gn_graph_init(gnp_thread_flag, app.config["gnRootDir"]) gn_log('GnAppSrv: Running Flask App ') if (gnp_thread_flag == 0): __gn_graph_init() app.run(host='0.0.0.0', port=5050, debug=True) gn_log('GnAppSrv: Started Flask App ')