def deepSearch(): print("Starting Deep Search Daemon") with open('network_cfg.json', 'r') as nwc: nw = json.load(nwc) DEEP_SCH_GRP = nw['DEEP_SCH_GRP'] DEEP_SCH_PORT = nw['DEEP_SCH_PORT'] with open('pwf', 'r') as p: password = p.read() password = password.rstrip() sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.bind( ('', DEEP_SCH_PORT)) # use MCAST_GRP instead of '' to listen only # to MCAST_GRP, not all groups on MCAST_PORT mreq = struct.pack("4sl", socket.inet_aton(DEEP_SCH_GRP), socket.INADDR_ANY) sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq) deep_search_tag = 'LogDSearch:::' while True: search = False rx_msg = sock.recv(2048) dcdmsg = rx_msg.decode("utf-8") dcdmsg = bytes(dcdmsg, 'ascii') dcdmsg = cryp.DecryptMsg(dcdmsg, password) try: if deep_search_tag in dcdmsg: search = True print('deep search!') SearchLocal.searchDisk(dcdmsg, password, 'off') else: continue except TypeError: print("None Type not iterable. Probably no logs to search.") pass
def logMonitor_Rx(password, params): """ fn listens for messages and updates message log. """ print("Starting Rx Process...\n") with open('network_cfg.json', 'r') as nwc: nw = json.load(nwc) LOGMSG_GRP = nw['LOGMSG_GRP'] LOGMSG_PORT = nw['LOGMSG_PORT'] SCH_GRP = nw['SCH_GRP'] SCH_PORT = nw['SCH_PORT'] sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.bind(('', LOGMSG_PORT)) # use LOGMSG_GRP instead of '' to listen only # to LOGMSG_GRP, not all groups on LOGMSG_PORT mreq = struct.pack("4sl", socket.inet_aton(LOGMSG_GRP), socket.INADDR_ANY) sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq) filter_tag = '%(node_num)s:%(role)s:%(cluster_id)s:%(localnode)s' % params print(filter_tag) ts = 0 i = 0 dcdmsg = '' search_list = [] quick_search_tag = 'LogQSearch:::' write_mem_tag = '!WRITECACHE!' zero_disk_tag = '!DELETEDISKCACHE!' zero_mem_tag = '!DELETEMEMCACHE!' ts_start = time.time() log_name = 'msglog_' + str(ts_start) + '.json' schjob = [] while True: try: search = False rx_msg = sock.recv(2048) dcdmsg = rx_msg.decode("utf-8") dcdmsg = bytes(dcdmsg, 'ascii') dcdmsg = cryp.DecryptMsg(dcdmsg, password) if quick_search_tag in dcdmsg: search = True print('quick search!') sl.searchMem(search_list, dcdmsg, password, 'off') if filter_tag not in dcdmsg and search == False: jlm = json.loads(dcdmsg) search_list.append({ "source_time": jlm["source_time"], 'sending_node': jlm['sending_node'], 'sending_hostname': jlm['sending_hostname'], "cluster": params["cluster_id"], 'orig_message': jlm['orig_message'], 'orig_addr': jlm['orig_addr'] }) i += 1 if i % 10 == 0: with open('msglog_temp.json', 'w') as log: json.dump(search_list, log) continue if i % 105 == 0: ts_start = time.time() log_name = 'msglog_' + str(ts_start) + '.json' with open(log_name, 'w') as log: json.dump(search_list, log) search_list = [] continue else: continue except: print('Rx Process Exception') pass
def apiReq(search_list_cl=['PIM', 'DR'], search_oper_cl='AND', search_type_cl='D', time_min=0, time_max=30000000000, search_list_ca=[''], search_oper_ca='OR'): if 'APIREQ' not in glob.glob('*'): os.mkdir('APIREQ') with open('network_cfg.json', 'r') as nwc: nw = json.load(nwc) API_GRP = nw['API_GRP'] API_PORT = nw['API_PORT'] with open('pwf', 'r') as p: password = p.read() password = password.rstrip() sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) sock.settimeout(2) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.bind(('', API_PORT)) mreq = struct.pack("4sl", socket.inet_aton(API_GRP), socket.INADDR_ANY) sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq) ts = time.time() api_file = "APIREQ/APIREQ" + str(ts) + ".json" CS.searchSend('api', None, None, 'ZD', None, None, None) time.sleep(.2) CS.searchSend('api', None, None, 'ZM', None, None, None) time.sleep(.2) CS.searchSend('api', search_list_cl, search_oper_cl, search_type_cl, None, time_min, time_max) time.sleep(.2) dcdmsg = '' responses = 1 i = 0 search_len_list = [] if search_type_cl == "D": while i < responses: try: print("Listening for 2 seconds.") rx_msg = sock.recv(2048) dcdmsg = rx_msg.decode("utf-8") dcdmsg = bytes(dcdmsg, 'ascii') dcdmsg = cryp.DecryptMsg(dcdmsg, password) js_dcdmsg = json.loads(dcdmsg) responses = js_dcdmsg['node_len'] search_len = js_dcdmsg['search_len'] search_len_list.append(search_len) search_max = max(search_len_list) i += 1 print(str(responses) + " TxRx nodes to respond.") print(str(i) + " TxRx nodes have responded.") except: search_max = 0 print("Socket Timeout Reached") break print(str(search_max) + " is the largest response from a single node.") if search_max > 3000: return { 'Error': 'Search Results exceed 3000 from a single logging node, please narrow search criteria.' } response_size_timer = (.01 + .001 * responses) * search_max print("\nWait time for responses is: " + str(response_size_timer) + "\n") time.sleep(response_size_timer) else: time.sleep(.5) CS.searchSend('api', None, None, 'W', None, None, None) time.sleep(.2) CS.searchSend('api', search_list_ca, search_oper_ca, 'C', api_file, None, None) time.sleep(1.5) try: with open(api_file, 'r') as f: jsr = json.load(f) return jsr except: error = json.dumps("Can't find %s" % (api_file)) return error
def searchRx(output_type='console'): print("starting searchRx") with open('pwf', 'r') as p: password = p.read() password = password.rstrip() with open('network_cfg.json', 'r') as nwc: nw = json.load(nwc) SCH_GRP = nw['SCH_GRP'] SCH_PORT = nw['SCH_PORT'] sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.bind( ('', SCH_PORT)) # use LOGMSG_GRP instead of '' to listen only # to LOGMSG_GRP, not all groups on LOGMSG_PORT mreq = struct.pack("4sl", socket.inet_aton(SCH_GRP), socket.INADDR_ANY) sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq) print("\n\nListening for search results:\n") dedup_list = [] dedup_set = set() output_list = [] cache_message_list = [] while True: search = False rx_msg = sock.recv(100000) dcdmsg = rx_msg.decode("utf-8") dcdmsg = bytes(dcdmsg, 'ascii') dcdmsg = cryp.DecryptMsg(dcdmsg, password) if "!WRITECACHE!" in dcdmsg: cache_message_list = [ json.dumps(i) for i in cache_message_list ] cache_message_list.sort() cache_message_list = [ json.loads(i) for i in cache_message_list ] with open('search_cache.json', 'w') as sc: json.dump(cache_message_list, sc) dcdmsg = '' if '!DELETEDISKCACHE!' in dcdmsg: with open('search_cache.json', 'w') as f: f.close() dcdmsg = '' if '!DELETEMEMCACHE!' in dcdmsg: dedup_list = [] dedup_set = set() output_list = [] cache_message_list = [] dcdmsg = '' if '!SEARCHCACHE!' in dcdmsg: js_dcdmsg = json.loads(dcdmsg) so = js_dcdmsg['search_oper'] sv = js_dcdmsg['search_var'] sf = js_dcdmsg['search_field'] af = js_dcdmsg['api_file'] search_result = [] with open('search_cache.json', 'r') as f: jsl = json.load(f) if so == 'OR': search_result = [ j for j in jsl if any(x in j[sf] for x in sv) ] if so == 'AND': search_result = [ j for j in jsl if all(x in j[sf] for x in sv) ] if so == None: pass else: pass stamp = str(time.time()) result_filename = str(sv) + str(so) + stamp + ".json" result_filename = result_filename.replace("/", "-") result_filename = result_filename.replace("?", "-") result_filename = result_filename.replace(":", "-") result_filename = result_filename.replace("*", "-") result_filename = result_filename.replace(">", "-") result_filename = result_filename.replace("<", "-") result_filename = result_filename.replace("|", "-") result_filename = result_filename.replace("\\", "-") result_filename = result_filename.replace(" ", "-") if af == None: with open(result_filename, 'w') as rf: json.dump(search_result, rf) if af != None: with open(af, 'w') as f: json.dump(search_result, f) time_addr_list = [{ i['source_time']: i['orig_addr'] } for i in search_result] len_hits = len(time_addr_list) print("Result total occurences: " + str(len_hits) + "\n") dcdmsg = '' else: if dcdmsg != '': dcdmsg = json.loads(dcdmsg) for i in dcdmsg: dedup_list.append(i) len_set_before = len(dedup_set) jd = json.dumps( i ) ## b/c a dictionary is an unhashable type inside if a set. dedup_set.add(jd) len_set_after = len(dedup_set) if len_set_after > len_set_before: dedup_msg = dedup_list[-1] #dcdmsg=json.loads(dcdmsg) cache_message_list.append(dedup_msg) #pprint.pprint(dedup_msg,None,1,80) else: pass