def start(self): diff = COLORS[2] for s in self.init_socket(): s.setblocking(1) for n, entry in enumerate(self.flow): horizontal_separator() if entry.direction is self.EXPECT: recvd = 0 expected = len(entry.data) entry_header(n, 'xpct', 1, entry.data) entry_header(n, 'recv', 2) while recvd < expected: buf = s.recv(1) if buf: print(diff(hexlify(buf)) if buf != entry.data[recvd] else '..', end='') stdout.flush() recvd += len(buf) else: break print('') elif entry.direction is self.SEND: entry_header(n, 'send', 0, entry.data) s.send(entry.data) horizontal_separator()
def diff_flows(flows, skip_offset=None, max_entries=None, fix_diff_treshold=5): if skip_offset is not None: flows = (f.filter_by_offset(skip_offset) for f in flows) for entry_no, entries in enumerate(izip(*flows)): if max_entries is not None and entry_no == max_entries: break entries_bytes = tuple(tuple(imap(ord, data)) for data in imap(attrgetter('data'), entries)) lengths = set(imap(len, entries_bytes)) print '[i] E{entry_no} // {dirs} // Offset: {offsets} // Length: {lens}'.format( entry_no=entry_no, offsets=sorted(set(e.offset for e in entries)), dirs='/'.join(sorted(set( COLORS[Flow.DIRECTIONS.index(e.direction)](e.direction) for e in entries))), lens=sorted(lengths)) min_len = min(lengths) first_data = entries_bytes[0] common_bytes = [n for n in xrange(min_len) if all(e[n] == first_data[n] for e in entries_bytes[1:])] enum_izip_entries_bytes = tuple(enumerate(izip(*entries_bytes))) if len(lengths) > 1: look_for_length_byte(entries_bytes, enum_izip_entries_bytes) for match_len in xrange(min_len - 1, 0, -1): fd_match = first_data[-match_len:] if all(e[-match_len:] == fd_match for e in entries_bytes[1:]): print '[i] Common postfix: {0}'.format(':'.join( COLORS[len(Flow.DIRECTIONS)]('{0:02x}'.format(c)) for c in fd_match)) break all_same = (len(set(entries_bytes)) == 1) if all_same: entries = (entries[0],) elif fix_diff_treshold: look_for_fix_diff(len(entries), enum_izip_entries_bytes, fix_diff_treshold) else: print('[i] (ignoring patterns)') blobs = [entry.data for entry in entries] for i, data in enumerate(blobs): print '' hexdump, asciidump = ([(empty if (n in common_bytes and i) else COLORS[ next(bi for bi, bd in enumerate(blobs) if len(bd) >= n + 1 and bd[n] == c)](conv(c))) for n, c in enumerate(data)] for empty, conv in (('..', hexlify), ('.', asciify))) bytes_per_line = (width - (1 + 8 + 2 + 2)) / 17 * 4 for offset in xrange(0, len(data), bytes_per_line): print '{offset:08x} {hex} {ascii}'.format(offset=offset, hex=' '.join(' '.join(padspace(hexdump[do:do + 4], 4)) for do in xrange(offset, offset + bytes_per_line, 4)), ascii=''.join(asciidump[offset:offset + bytes_per_line])) if all_same: print '(all entries are the same)' horizontal_separator()
def diff_flows(flows, skip_offset=None, max_entries=None, fix_diff_treshold=5): if skip_offset is not None: flows = (f.filter_by_offset(skip_offset) for f in flows) for entry_no, entries in enumerate(izip(*flows)): if max_entries is not None and entry_no == max_entries: break entries_bytes = tuple( tuple(imap(ord, data)) for data in imap(attrgetter('data'), entries)) lengths = set(imap(len, entries_bytes)) print '[i] E{entry_no} // {dirs} // Offset: {offsets} // Length: {lens}'.format( entry_no=entry_no, offsets=sorted(set(e.offset for e in entries)), dirs='/'.join( sorted( set(COLORS[Flow.DIRECTIONS.index(e.direction)](e.direction) for e in entries))), lens=sorted(lengths)) min_len = min(lengths) first_data = entries_bytes[0] common_bytes = [ n for n in xrange(min_len) if all(e[n] == first_data[n] for e in entries_bytes[1:]) ] enum_izip_entries_bytes = tuple(enumerate(izip(*entries_bytes))) if len(lengths) > 1: look_for_length_byte(entries_bytes, enum_izip_entries_bytes) for match_len in xrange(min_len - 1, 0, -1): fd_match = first_data[-match_len:] if all(e[-match_len:] == fd_match for e in entries_bytes[1:]): print '[i] Common postfix: {0}'.format(':'.join( COLORS[len(Flow.DIRECTIONS)]('{0:02x}'.format(c)) for c in fd_match)) break all_same = (len(set(entries_bytes)) == 1) if all_same: entries = (entries[0], ) elif fix_diff_treshold: look_for_fix_diff(len(entries), enum_izip_entries_bytes, fix_diff_treshold) else: print('[i] (ignoring patterns)') for i, entry in enumerate(entries): print '' hexdump, asciidump = ([ (empty if (n in common_bytes and i) else COLORS[next( bi for bi, be in enumerate(entries) if len(be.data) >= n + 1 and be.data[n] == c)](conv(c))) for n, c in enumerate(entry.data) ] for empty, conv in (('..', hexlify), ('.', asciify))) bytes_per_line = (width - (1 + 8 + 2 + 2)) / 17 * 4 for offset in xrange(0, len(entry.data), bytes_per_line): print '{offset:08x} {hex} {ascii}'.format( offset=offset, hex=' '.join(' '.join(padspace(hexdump[do:do + 4], 4)) for do in xrange(offset, offset + bytes_per_line, 4)), ascii=''.join(asciidump[offset:offset + bytes_per_line])) if all_same: print '(all entries are the same)' horizontal_separator()