def ingest(self, data): t = gg.TRANSFER() t.from_cbor(data) feed = t.event.feed seq = t.event.seq lg = pcap.PCAP() if not feed in self.db: self.max_fn_number += 1 self.fn[feed] = os.path.join(self.dirname, str(self.max_fn_number) + '.pcap') lg.open(self.fn[feed], 'w') lg.close() self.db[feed] = {} self.max[feed] = 0 # print(f"-- ingesting {seq} into {self.fn[feed]}") if seq != self.max[feed] + 1: # TODO: should also check prev field print("-- mismatch:", seq, self.max[feed] + 1, ", ignored") return self.db[feed][seq] = data self.max[feed] += 1 lg.open(self.fn[feed], 'a') lg.write(data) lg.close() print( f"-- ingested event {base64.b64encode(feed).decode('utf8')}:{seq}")
def my_log_append(log_fn, body): lg = log.PCAP() lg.open(log_fn, 'r') prev = None feed = None seq = 0 t = gg.TRANSFER() # find last event for block in lg: t.from_cbor(block) prev = t.event.prev feed = t.event.feed seq = t.event.seq lg.close() lg.open(log_fn, 'a') e = gg.EVENT(prev=prev, feed=keypair.public, seq=seq + 1, time=int(time.time()), content=bytes(json.dumps(body), 'utf-8'), content_enc=gg.GG_CONTENT_ENCODING_JSON) e.signature = keypair.sign(e.event_to_cbor()) t = gg.TRANSFER(e) lg.write(t.to_cbor()) lg.close()
def read_request(): ''' Since the server can also detruce - close a connection the client has to read requests :return: ''' global next_request_ID p = pcap.PCAP(isp_log) p.open('r') for w in p: e = cbor2.loads(w) href = hashlib.sha256(e[0]).digest() e[0] = cbor2.loads(e[0]) e[0] = pcap.base64ify(e[0]) fid = e[0][0] seq = e[0][1] if e[2] != None: e[2] = cbor2.loads(e[2]) if isinstance(e[2], dict) and e[2]['type'] == 'request': request_ID = e[2]["ID"] logging.debug(f'req_id:{request_ID},next:{next_request_ID}') if request_ID == next_request_ID: logging.info(f'Handling request from server') next_request_ID += 1 handle_request(e[2]) p.close()
def __init__(self, fname, fid=None, signer=None, create_if_notexisting=False): self.fname = fname self.fid = fid self.signer = signer self.cine = create_if_notexisting self.seq = 0 self.pcap = pcap.PCAP(fname) self.hprev = None try: self.pcap.open('r') # find highest seq number: w = self.pcap.read_backwards(True) e = event.EVENT() e.from_wire(w) if fid != None and e.fid != fid: print("feed ID mismatch:", e.fid, "instead of", fid) self.pcap.close() self.pcap = None return self.fid, self.seq = e.fid, e.seq self.hprev = event.get_hash(e.metabits) self.pcap.close() except Exception as e: if not self.cine: self.pcap = None print(e) print(f"error opening file {fname}") else: self.pcap.open('w') self.pcap.close()
def read_c_result(ID, server: cServer): global result_ID_list p = pcap.PCAP(server.s_c_feed) p.open('r') for w in p: # here we apply our knowledge about the event/pkt's internal struct try: e = cbor2.loads(w) except: logging.critical('cbor2 loader failed - skipping logentry') continue href = hashlib.sha256(e[0]).digest() e[0] = cbor2.loads(e[0]) # rewrite the packet's byte arrays for pretty printing: e[0] = pcap.base64ify(e[0]) fid = e[0][0] seq = e[0][1] if e[2] != None: e[2] = cbor2.loads(e[2]) e[1] = pcap.base64ify(e[1]) if isinstance(e[2], dict) and e[2]['type'] == 'result': if e[2]['ID'] == ID: logging.debug(f'from read_result ID={e[2]["ID"]}') logging.debug(f"** fid={fid}, seq={seq}, ${len(w)} bytes") logging.debug(f" hashref={href.hex()}") logging.debug(f" content={e[2]}") if result_ID_list.__contains__(ID): clear_await(ID) handle_result(e) return True p.close() return False
def load(self, dirname): if not os.path.isdir(dirname): return 0 self.dirname = dirname lg = pcap.PCAP() t = gg.TRANSFER() for fn in os.listdir(dirname): # remember highest file number, if we have to create a new file i = int(fn.split('.')[0]) if self.max_fn_number < i: self.max_fn_number = i fn = os.path.join(dirname, fn) lg.open(fn, 'r') for block in lg: self.cnt += 1 t.from_cbor(block) feed = t.event.feed if not feed in self.fn: self.fn[feed] = fn self.max[feed] = -1 self.db[feed] = {} seq = t.event.seq if seq > self.max[feed]: self.max[feed] = seq self.db[feed][seq] = block lg.close() return self.cnt
def feed_get_display_name(log_fn): # returns a <feedID,display_name> tuple feed = None name = None lg = log.PCAP() lg.open(log_fn, 'r') t = gg.TRANSFER() for block in lg: t.from_cbor(block) if not feed: feed = t.event.feed c = t.event.content if not c: continue m = json.loads(c) if 'app' in m and m['app'] == 'feed/about' and 'display_name' in m: name = m['display_name'] lg.close() return (feed, name)
def output_chat(stdscr): t = gg.TRANSFER() lg = log.PCAP() pp_list = [] name_list = {} for file in os.listdir(LOGS_DIR): lg.open(os.path.join(LOGS_DIR, file), "r") nick_name = [] for block in lg: t.from_cbor(block) c = t.event.content if c != None: # print(f"** {base64.b64encode(t.event.feed).decode('utf8')}/{t.event.seq}") # print(str(c, 'utf8')) m = json.loads(c) if m['app'] == "feed/message": pp_list.append([t.event.time, m]) if m['app'] == "feed/about": name_list[m['feed']] = m['display_name'] # print(m) else: scr_print(stdscr, f"** {n}: no content") lg.close() pp(pp_list, name_list, stdscr)
if __name__ == '__main__': # one optional parameter: -new_name import_dir = sys.argv[1] print("Welcome to SneakerNet\n") print(f"** importing new events from '{import_dir}'") print() if not os.path.isdir(import_dir): print(f"** directory not found, aborting") sys.exit() new_db = {} new_cnt = 0 lg = log.PCAP() t = gg.TRANSFER() for fn in os.listdir(import_dir): fn = os.path.join(import_dir, fn) lg.open(fn, 'r') for block in lg: t.from_cbor(block) feed = t.event.feed seq = t.event.seq if not feed in new_db: new_db[feed] = {} if not seq in new_db[feed]: new_db[feed][seq] = [] new_db[feed][seq].append(block) new_cnt += 1 lg.close()
#!/usr/bin/python import lib.pcap as pcap import cbor2 import hashlib inventory_file = open("inventory.txt") inventory = inventory_file.read().splitlines() log = pcap.PCAP('test.pcap') log.open('r') for w in log: e = cbor2.loads(w) href = hashlib.sha256(e[0]).digest() e[0] = cbor2.loads(e[0]) e[0] = pcap.base64ify(e[0]) fid = e[0][0] seq = e[0][1] e[2] = cbor2.loads(e[2]) print(fid) print(seq) print(e[2]) print(e[0]) print(href.hex())
def export(): LOGS_DIR = 'logs' MY_LOG_FILE = '1.pcap' # inside logs dir # ---------------------------------------------------------------------- if __name__ == '__main__': # one optional parameter: -new_name export_dir = sys.argv[1] print("Welcome to SneakerNet\n") print(f"** exporting new events to '{export_dir}'") print() if not os.path.isdir(export_dir): print(f"** directory not found, aborting") sys.exit() lg = log.PCAP() t = gg.TRANSFER() have_db = {} have_max = {} have_cnt = 0 for fn in os.listdir(LOGS_DIR): lg.open(os.path.join(LOGS_DIR, fn), 'r') for block in lg: t.from_cbor(block) feed = t.event.feed seq = t.event.seq if not feed in have_db: have_db[feed] = {} have_max[feed] = 0 have_db[feed][seq] = block if seq > have_max[feed]: have_max[feed] = seq have_cnt += 1 lg.close() print(f"** found {have_cnt} event(s) in directory '{LOGS_DIR}'") target_db = {} target_cnt = 0 for fn in os.listdir(export_dir): fn = os.path.join(export_dir, fn) lg.open(fn, 'r') for block in lg: t.from_cbor(block) feed = t.event.feed seq = t.event.seq if not feed in target_db: target_db[feed] = {} if not seq in target_db[feed]: target_db[feed][seq] = [] # target_db[feed][seq].append(block) target_cnt += 1 lg.close() print( f"** found {target_cnt} event(s) in target directory '{export_dir}'" ) # create file with unique file name log_fn = None while True: log_fn = 'x' + str(random.randint(10000000, 19999999))[1:] + '.pcap' log_fn = os.path.join(export_dir, log_fn) if not os.path.isfile(log_fn): break lg.open(log_fn, 'w') update_cnt = 0 for feed in have_db: for i in range(0, have_max[feed]): if not feed in target_db or not i + 1 in target_db[feed]: if update_cnt == 0: print() print( f"** exporting {base64.b64encode(feed).decode('utf8')}/{i+1}" ) lg.write(have_db[feed][i + 1]) update_cnt += 1 lg.close() print() if update_cnt == 0: os.unlink(log_fn) print("** no events exported") else: print(f"** exported {update_cnt} event(s) to '{log_fn}'")
def init(): ''' Initialises the whole client environment :return: ''' global next_request_ID global highest_result_ID global result_ID_list create_feed() # This part is currently not working logging.info('Initialising from feeds...') p = pcap.PCAP(client_log) p.open('r') for w in p: # here we apply our knowledge about the event/pkt's internal struct e = cbor2.loads(w) href = hashlib.sha256(e[0]).digest() e[0] = cbor2.loads(e[0]) # rewrite the packet's byte arrays for pretty printing: e[0] = pcap.base64ify(e[0]) fid = e[0][0] seq = e[0][1] if e[2] != None: e[2] = cbor2.loads(e[2]) # print(f"** fid={fid}, seq={seq}, ${len(w)} bytes") # print(f" hashref={href.hex()}") # print(f" content={e[2]}") if isinstance(e[2], dict) and e[2]['type'] == 'request': logging.debug(f'from init request ID={e[2]["ID"]}') await_result(e[2]['ID']) next_request_ID = max(int(e[2]["ID"]), next_request_ID) p.close() p = pcap.PCAP(isp_log) p.open('r') for w in p: # here we apply our knowledge about the event/pkt's internal struct e = cbor2.loads(w) href = hashlib.sha256(e[0]).digest() e[0] = cbor2.loads(e[0]) # rewrite the packet's byte arrays for pretty printing: e[0] = pcap.base64ify(e[0]) fid = e[0][0] seq = e[0][1] if e[2] != None: e[2] = cbor2.loads(e[2]) if isinstance(e[2], dict) and e[2]['type'] == 'result': if result_ID_list.__contains__(e[2]['ID']): logging.debug(f'from init result ID={e[2]["ID"]}') logging.debug(f"** fid={fid}, seq={seq}, ${len(w)} bytes") logging.debug(f" hashref={href.hex()}") logging.debug(f" content={e[2]}") read_result(e[2]['ID']) p.close() path = client_config['location'] for log in os.listdir(path): if os.path.isfile(os.path.join(path, log)) and log.endswith(".pcap"): p = pcap.PCAP(f'{path}/{log}') p.open('r') for w in p: # here we apply our knowledge about the event/pkt's internal struct e = cbor2.loads(w) href = hashlib.sha256(e[0]).digest() e[0] = cbor2.loads(e[0]) # rewrite the packet's byte arrays for pretty printing: e[0] = pcap.base64ify(e[0]) fid = e[0][0] seq = e[0][1] if e[2] != None: e[2] = cbor2.loads(e[2]) if isinstance(e[2], dict) and e[2]['type'] == 'init': try: server = e[2]['cserver'] rep = e[2]['cserver']['replicator'] creplicator = replicator.Replicator( rep['name'], rep['source'], rep['destination']) cserver = cServer(server['name'], server['s_c_feed'], server['c_s_feed'], server['c_s_key'], 0, [], creplicator) c_server_dict[server['name']] = cserver except: pass p.close() logging.info(f'Servers:{c_server_dict}') for s in c_server_dict.values(): p = pcap.PCAP(s.c_s_feed) p.open('r') for w in p: # here we apply our knowledge about the event/pkt's internal struct e = cbor2.loads(w) href = hashlib.sha256(e[0]).digest() e[0] = cbor2.loads(e[0]) # rewrite the packet's byte arrays for pretty printing: e[0] = pcap.base64ify(e[0]) fid = e[0][0] seq = e[0][1] if e[2] != None: e[2] = cbor2.loads(e[2]) if isinstance(e[2], dict) and e[2]['type'] == 'request': logging.debug(f'from init request ID={e[2]["ID"]}') await_result(e[2]['ID']) next_request_ID = max(int(e[2]["ID"]), next_request_ID) p.close() logging.info(f'Highest ID: {next_request_ID}') pass
def import_log(stdscr): import_dir = c_input(stdscr, "enter path: ") if not os.path.isdir(import_dir): scr_print(stdscr, "directory not found, press ENTER to go back\n") return new_db = {} new_cnt = 0 lg = log.PCAP() t = gg.TRANSFER() for fn in os.listdir(import_dir): fn = os.path.join(import_dir, fn) lg.open(fn, 'r') for block in lg: t.from_cbor(block) feed = t.event.feed seq = t.event.seq if not feed in new_db: new_db[feed] = {} if not seq in new_db[feed]: new_db[feed][seq] = [] new_db[feed][seq].append(block) new_cnt += 1 lg.close() scr_print(stdscr, f"** found {new_cnt} event(s) in '{import_dir}'\n") have_fn = {} have_max = {} have_cnt = 0 max_fn_number = 1 for fn in os.listdir(LOGS_DIR): # remember highest file number, if we have to create a new file i = int(fn.split('.')[0]) if max_fn_number < i: max_fn_number = i lg.open(os.path.join(LOGS_DIR, fn), 'r') for block in lg: have_cnt += 1 t.from_cbor(block) feed = t.event.feed if not feed in have_fn: have_fn[feed] = fn seq = t.event.seq if not feed in have_max: have_max[feed] = -1 if seq > have_max[feed]: have_max[feed] = seq lg.close() scr_print(stdscr, f"** found {have_cnt} event(s) in '{LOGS_DIR}'\n") update_cnt = 0 for feed in new_db: if not feed in have_fn: max_fn_number += 1 have_fn[feed] = os.path.join(LOGS_DIR, str(max_fn_number) + '.pcap') have_max[feed] = 0 if update_cnt == 0: print() scr_print( stdscr, f"** creating {have_fn[feed]} for {base64.b64encode(feed).decode('utf8')}\n" ) lg.open(have_fn[feed], 'w') lg.close() max_fn_number += 1 update_cnt += 1 update_cnt = 0 for feed in have_fn: if not feed in new_db: continue lg.open(have_fn[feed], 'a') # print(f"** testing {have_fn[feed]}, seq={have_max[feed]}") while have_max[feed] + 1 in new_db[feed]: have_max[feed] += 1 if update_cnt == 0: print() scr_print( stdscr, f"** import {base64.b64encode(feed).decode('utf8')}/{have_max[feed]}\n" ) lg.write(new_db[feed][have_max[feed]][0]) update_cnt += 1 lg.close() scr_print( stdscr, f"** imported {update_cnt} event(s) to the '{LOGS_DIR}' directory\n")
def importPCAP(fname): log = pcap.PCAP(fname) return log
def importPCAP(fname): """ Please comment """ log = pcap.PCAP(fname) return log
def __init__(self, fn): self.pcap = pcap.PCAP(fn) self.pcap.open('r')
def export(stdscr): export_dir = c_input(stdscr, "enter path: ") scr_print(stdscr, f"** exporting new events to '{export_dir}'\n") print() if not os.path.isdir(export_dir): scr_print(stdscr, "directory not found, press ENTER to go back\n") return lg = log.PCAP() t = gg.TRANSFER() have_db = {} have_max = {} have_cnt = 0 for fn in os.listdir(LOGS_DIR): lg.open(os.path.join(LOGS_DIR, fn), 'r') for block in lg: t.from_cbor(block) feed = t.event.feed seq = t.event.seq if not feed in have_db: have_db[feed] = {} have_max[feed] = 0 have_db[feed][seq] = block if seq > have_max[feed]: have_max[feed] = seq have_cnt += 1 lg.close() scr_print(stdscr, f"** found {have_cnt} event(s) in directory '{LOGS_DIR}'\n") target_db = {} target_cnt = 0 for fn in os.listdir(export_dir): fn = os.path.join(export_dir, fn) lg.open(fn, 'r') for block in lg: t.from_cbor(block) feed = t.event.feed seq = t.event.seq if not feed in target_db: target_db[feed] = {} if not seq in target_db[feed]: target_db[feed][seq] = [] # target_db[feed][seq].append(block) target_cnt += 1 lg.close() scr_print( stdscr, f"** found {target_cnt} event(s) in target directory '{export_dir}'\n") # create file with unique file name log_fn = None while True: log_fn = 'x' + str(random.randint(10000000, 19999999))[1:] + '.pcap' log_fn = os.path.join(export_dir, log_fn) if not os.path.isfile(log_fn): break lg.open(log_fn, 'w') update_cnt = 0 for feed in have_db: for i in range(0, have_max[feed]): if not feed in target_db or not i + 1 in target_db[feed]: if update_cnt == 0: print() scr_print( stdscr, f"** exporting {base64.b64encode(feed).decode('utf8')}/{i + 1}\n" ) lg.write(have_db[feed][i + 1]) update_cnt += 1 lg.close() print() if update_cnt == 0: os.unlink(log_fn) scr_print(stdscr, "** no events exported\n") else: stdscr.addstr(f"** exported {update_cnt} event(s) to '{log_fn}'\n")
def importPCAP(fname): """ Imports the pcap file in the specific format specified int the pcap.PCAP function from the demo files of Professor Tschudin. """ log = pcap.PCAP(fname) return log
def __init__(self, fn, digestmod='sha256'): self.pcap = pcap.PCAP(fn) self.pcap.open('r') self.digestmod = digestmod