def check(): model = apps.get_model(app_label='query', model_name='FileUpload') objs = model.objects.filter(status="NEW") for obj in objs: if obj.status == "NEW": import pysharkbite conf = pysharkbite.Configuration() conf.set("FILE_SYSTEM_ROOT", "/accumulo") model = apps.get_model(app_label='query', model_name='AccumuloCluster') accumulo_cluster = model.objects.first() print("Checking " + str(obj.uuid)) if accumulo_cluster is None: return print("Checking " + str(obj.uuid)) zk = pysharkbite.ZookeeperInstance(accumulo_cluster.instance, accumulo_cluster.zookeeper, 1000, conf) user = pysharkbite.AuthInfo("root", "secret", zk.getInstanceId()) connector = pysharkbite.AccumuloConnector(user, zk) indexTableOps = connector.tableOps("provenanceIndex") auths = pysharkbite.Authorizations() auths.addAuthorization("PROV") indexScanner = indexTableOps.createScanner(auths, 2) indexrange = pysharkbite.Range(str(obj.uuid)) indexScanner.addRange(indexrange) indexSet = indexScanner.getResultSet() rangelist = list() provops = connector.tableOps("provenance") scanner = provops.createScanner(auths, 10) for indexKeyValue in indexSet: value = indexKeyValue.getValue() protobuf = Uid_pb2.List() protobuf.ParseFromString(value.get().encode()) for uidvalue in protobuf.UID: shard = indexKeyValue.getKey().getColumnQualifier().split( "\u0000")[0] datatype = indexKeyValue.getKey().getColumnQualifier( ).split("\u0000")[1] startKey = pysharkbite.Key() stopKey = pysharkbite.Key() startKey.setRow(shard) stopKey.setRow(shard) startKey.setColumnFamily(datatype + "\x00" + uidvalue) stopKey.setColumnFamily(datatype + "\x00" + uidvalue + "\xff") rangelist.append( pysharkbite.Range(startKey, True, stopKey, False)) scanner = provops.createScanner(auths, 10) scanner.addRange( pysharkbite.Range(startKey, True, stopKey, False)) resultset = scanner.getResultSet() for keyvalue in resultset: key = keyvalue.getKey() value = keyvalue.getValue() eventid = key.getColumnFamily().split("\u0000")[1] fieldname = key.getColumnQualifier().split("\u0000")[0] fieldvalue = key.getColumnQualifier().split( "\u0000")[1] if (fieldname == "EVENTTYPE"): if fieldvalue == "DROP": obj.status = "COMPLETE" obj.save() break scanner.close() indexScanner.close()
def run_edge_query(query_id): model = apps.get_model(app_label='query', model_name='EdgeQuery') objs = model.objects.filter(query_id=query_id) for obj in objs: obj.running = True obj.save() import pysharkbite conf = pysharkbite.Configuration() conf.set("FILE_SYSTEM_ROOT", "/accumulo") model = apps.get_model(app_label='query', model_name='AccumuloCluster') accumulo_cluster = model.objects.first() if accumulo_cluster is None: return zk = pysharkbite.ZookeeperInstance(accumulo_cluster.instance, accumulo_cluster.zookeeper, 1000, conf) user = pysharkbite.AuthInfo("root", "secret", zk.getInstanceId()) connector = pysharkbite.AccumuloConnector(user, zk) auths = pysharkbite.Authorizations() if obj.auths: for auth in obj.auths.split(","): auths.addAuthorization(auth) sres_model = apps.get_model(app_label='query', model_name='ScanResult') res_model = apps.get_model(app_label='query', model_name='Result') sr = sres_model.objects.filter(query_id=obj.query_id).first() if not sr: print("No scan result, returning") return print("here") graphTableOps = connector.tableOps("graph") scanner = graphTableOps.createScanner(auths, 10) range = pysharkbite.Range(obj.query, True, obj.query + "\uffff" + "\uffff", False) ## for now the range should be this scanner.addRange(range) resultset = scanner.getResultSet() count = 0 try: for indexKeyValue in resultset: value = "0" ## row will be the to ## direction will be the cf to_value = "" direction = "one" try: protobuf = EdgeData_pb2.EdgeValue() protobuf.ParseFromString( indexKeyValue.getValue().get_bytes()) value = str(protobuf.count) + "/" + protobuf.uuid_string to_value = indexKeyValue.getKey().getRow().split( "\u0000")[1] direction = indexKeyValue.getKey().getColumnFamily().split( "/")[1] direction_split = direction.split("-") if len(direction_split ) != 2 or direction_split[0] == direction_split[1]: continue except Exception as e: print(e) continue except: continue scanresult = res_model.objects.create( scanResult=sr, value=value, row=to_value, cf=direction, cq=indexKeyValue.getKey().getColumnQualifier()) scanresult.save() count = count + 1 if count > 1000: break sr.is_finished = True sr.save() scanner.close() except Exception as e: print(e) except: print("An error occurred") pass ## user does not have PROV obj.running = False obj.finished = True obj.save()
def post(self, request, *args, **kwargs): query= request.POST.get('query') shard = request.POST.get('shard') authstring = request.POST.get('auths') datatype= request.POST.get('datatype') uid = request.POST.get('uid') originals = {} news = {} for key, value in request.POST.items(): if key == "query": query = value elif key.startswith("original"): split = key.split(".") originals[split[1]] = value elif key == "shard" or key == "datatype" or key == "uid" or key == "auths": pass elif key == "csrfmiddlewaretoken": pass else: news[key] = value user = pysharkbite.AuthInfo(AccumuloCluster.objects.first().user,AccumuloCluster.objects.first().password, ZkInstance().get().getInstanceId()) connector = pysharkbite.AccumuloConnector(user, ZkInstance().get()) auths = pysharkbite.Authorizations() #for auth in if not authstring is None and len(authstring) > 0: auths.addAuthorization(authstring) table = AccumuloCluster.objects.first().dataTable index_table= AccumuloCluster.objects.first().indexTable table_operations = connector.tableOps(table) index_table_ops = connector.tableOps(index_table) writer = table_operations.createWriter(auths, 10) indexWriter = index_table_ops.createWriter(auths,5) mutation = pysharkbite.Mutation(shard); diff=0 for key,value in news.items(): if news[key] != originals[key]: import datetime; ts = int( datetime.datetime.now().timestamp())*1000 mutation.putDelete(datatype + "\x00" + uid,key + "\x00" + originals[key],authstring,ts) ts = int( datetime.datetime.now().timestamp())*1000+100 mutation.put(datatype + "\x00" + uid,key + "\x00" + news[key],authstring,ts) originalIndexMutation = pysharkbite.Mutation(originals[key].lower()) indexMutation = pysharkbite.Mutation(news[key].lower()) protobuf = Uid_pb2.List() protobuf.COUNT=1 protobuf.IGNORE=False protobuf.UID.append( uid ) indexMutation.put(key,shard + "\x00" + datatype,authstring,ts,protobuf.SerializeToString()) originalprotobuf = Uid_pb2.List() indexWriter.addMutation(indexMutation) originalprotobuf.COUNT=1 originalprotobuf.IGNORE=False originalprotobuf.REMOVEDUID.append( uid) originalIndexMutation.put(key,shard + "\x00" + datatype,authstring,ts,originalprotobuf.SerializeToString()) indexWriter.addMutation(originalIndexMutation) diff=diff+1 else: pass if diff > 0: writer.addMutation( mutation ) indexWriter.close() writer.close() authy = "" url = "/search/?q=" + query for auth in authstring.split("|"): url = url + "&auths=" + auth return HttpResponseRedirect(url)
password = input() if not table: table = "blahblahd" import pysharkbite configuration = pysharkbite.Configuration() zk = pysharkbite.ZookeeperInstance(args.instance, args.zookeepers, 1000, configuration) user = pysharkbite.AuthInfo(args.username, password, zk.getInstanceId()) try: connector = pysharkbite.AccumuloConnector(user, zk) table_operations = connector.tableOps(table) if not table_operations.exists(False): print("Creating table " + table) table_operations.create(False) else: print(table + " already exists, so not creating it") auths = pysharkbite.Authorizations() """ Add authorizations """ """ mutation.put("cf","cq","cv",1569786960) """ writer = table_operations.createWriter(auths, 10)
def get(self, request, *args, **kwargs): user = pysharkbite.AuthInfo(AccumuloCluster.objects.first().user,AccumuloCluster.objects.first().password, ZkInstance().get().getInstanceId()) connector = pysharkbite.AccumuloConnector(user, ZkInstance().get()) entry = request.GET.get('q') selectedauths = request.GET.getlist('auths') try: skip = int(request.GET.get('s')) except: skip=0 field = request.GET.get('f') # try: # LuceneToJexlQueryParser = jnius.autoclass('datawave.query.language.parser.jexl.LuceneToJexlQueryParser') # luceneparser = LuceneToJexlQueryParser() # node = luceneparser.parse(entry) # jexl = node.getOriginalQuery() # except: # pass indexLookup = 1 table = AccumuloCluster.objects.first().dataTable index_table = AccumuloCluster.objects.first().indexTable isProv=False authlist=list() auths = pysharkbite.Authorizations() for auth in selectedauths: if len(auth) > 0: if auth == "PROV": isProv=True authlist.append(auth) auths.addAuthorization(auth) if isProv is True and len(authlist) == 1: table="provenance" index_table="provenanceIndex" table_operations = connector.tableOps(table) index_table_ops = connector.tableOps(index_table) #auths = pysharkbite.Authorizations() start=time.time() indexLookupInformation=LookupInformation(index_table,auths,index_table_ops) shardLookupInformation=LookupInformation(table,auths,table_operations) wanted_items = list() tree = parser.parse(entry) tree = resolver(tree) visitor = IndexLookup() iterator = visitor.visit(tree) if isinstance(iterator, RangeLookup): rng = iterator iterator = OrIterator() iterator.addRange(rng) docs = queue.SimpleQueue() lookup(indexLookupInformation,shardLookupInformation,iterator,docs) counts = 0 header = set() while not docs.empty(): jsondoc = docs.get() for key in jsondoc.keys(): if key != "ORIG_FILE" and key != "TERM_COUNT" and key != "RAW_FILE" and key != "shard" and key != "datatype" and key != "uid": header.add( key ) wanted_items.append(jsondoc) counts=counts+1 nxt="" prv="" userAuths = set() try: auths = UserAuths.objects.get(name=request.user) user_auths = auths.authorizations.all() if not user_auths is None: for authset in user_auths: userAuths.add(authset) except: pass s="|" authy= s.join(authlist) context={'header': header,'authstring':authy, 'selectedauths':selectedauths,'results': wanted_items, 'time': (time.time() - start), 'prv': prv, 'nxt': nxt,'field': field, "admin": request.user.is_superuser, "authenticated":True,'userAuths':userAuths,'query': entry} return render(request,'search_results.html',context)