def test_security_status(self): schema = SBESchema() schema.parse(TestSBEParserLibrary.LOCAL_TEMPLATE_FILENAME) msg_factory = SBEMessageFactory(schema) parser = SBEParser(msg_factory) msg_buffer = 'N\x7f\xcc\x04F\x9a\x95\x89\x02\x89\xf5\x13(\x00\x1e\x00\x1e\x00\x01\x00\x05\x00\x9a\x94x\x89\x02\x89\xf5\x13CT\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\x7f\x06A\x00\x02\x00\x04' offset = 12 for message in parser.parse(msg_buffer, offset): self.recorded_messages.append(message) # Validate that we parsed a security status message assert_equals(1, len(self.recorded_messages)) recorded_message = self.recorded_messages[0] assert_equals(30, recorded_message.template_id.value) assert_equals('SecurityStatus', recorded_message.name) assert_equals(16646, recorded_message.trade_date.value) assert_equals(1438206300004062362, recorded_message.transact_time.value) assert_equals('Reset Statistics', recorded_message.security_trading_event.value) assert_equals('Trading Halt', recorded_message.security_trading_status.value) assert_equals('CT', recorded_message.security_group.value) assert_equals('', recorded_message.asset.value) assert_equals('Group Schedule', recorded_message.halt_reason.value) assert_equals(None, recorded_message.security_id.value)
def test_security_status(self): schema = SBESchema(include_message_size_header=True, use_description_as_message_name=True) schema.parse(TestSBEParserLibrary.LOCAL_TEMPLATE_FILENAME) msg_factory = MDPMessageFactory(schema) parser = SBEParser(msg_factory) msg_buffer = binascii.a2b_hex( '1409a900bbe7b5d5fe9ad91428001e001e000100080019989cd5fe9ad914455300000000000000000000ffffff7fed4380150001' ) offset = 12 for message in parser.parse(msg_buffer, offset): self.recorded_messages.append(message) # Validate that we parsed a security status message assert_equals(1, len(self.recorded_messages)) recorded_message = self.recorded_messages[0] assert_equals(30, recorded_message.template_id.value) assert_equals('SecurityStatus', recorded_message.name) assert_equals(17389, recorded_message.trade_date.value) assert_equals(1502402370000951321, recorded_message.transact_time.value) assert_equals('EndOfEvent', recorded_message.match_event_indicator.value) assert_equals('Pre Open', recorded_message.security_trading_status.value) assert_equals('ES', recorded_message.security_group.value) assert_equals('', recorded_message.asset.value) assert_equals('Group Schedule', recorded_message.halt_reason.value) assert_equals(None, recorded_message.security_id.value) assert_equals('No Cancel', recorded_message.security_trading_event.value)
def test_security_status_reset_statistics(self): schema = SBESchema(include_message_size_header=True, use_description_as_message_name=True) try: from sbedecoder.generated import __messages__ as generated_messages schema.load(generated_messages) except: schema.parse(TestSBEParserLibrary.LOCAL_TEMPLATE_FILENAME) msg_factory = MDPMessageFactory(schema) parser = SBEParser(msg_factory) msg_buffer = binascii.a2b_hex('5603a9009c16d545349ad91428001e001e000100080003259845349ad914455300000000000000000000ffffff7fed4380150004') offset = 12 for message in parser.parse(msg_buffer, offset): self.recorded_messages.append(message) # Validate that we parsed a security status message assert_equals(1, len(self.recorded_messages)) recorded_message = self.recorded_messages[0] assert_equals(30, recorded_message.template_id.value) assert_equals('SecurityStatus', recorded_message.name) assert_equals(17389, recorded_message.trade_date.value) assert_equals(1502401500001346819, recorded_message.transact_time.value) assert_equals('Reset Statistics', recorded_message.security_trading_event.value) assert_equals('ResetStatistics', recorded_message.security_trading_event.enumerant) assert_equals('Pre Open', recorded_message.security_trading_status.value) assert_equals('PreOpen', recorded_message.security_trading_status.enumerant) assert_equals('ES', recorded_message.security_group.value) assert_equals('', recorded_message.asset.value) assert_equals('Group Schedule', recorded_message.halt_reason.value) assert_equals('GroupSchedule', recorded_message.halt_reason.enumerant) assert_equals(None, recorded_message.security_id.value)
def test_incremental_refresh_multiple_messages(self): schema = SBESchema(include_message_size_header=True, use_description_as_message_name=True) schema.parse(TestSBEParserLibrary.LOCAL_TEMPLATE_FILENAME) msg_factory = MDPMessageFactory(schema) parser = SBEParser(msg_factory) msg_buffer = binascii.a2b_hex( 'c90fa9008a15428b069bd91458000b00200001000800e7c43d8b069bd91484000020000180b2654d360200008e0000000a610000f62fac003000000007013000000000001800000000000001e44c980a960000002b13144401000000010000000101000058000b002000010008006f203f8b069bd9148400002000018017336b3602000004000000805d0000402d140002000000020131000000000018000000000000016153980a960000002c131444010000000200000001010000' ) offset = 12 msg_count = 0 for message in parser.parse(msg_buffer, offset): if msg_count == 0: assert_equals('MDIncrementalRefreshBook', message.name) n = 0 for entry in message.no_md_entries: if n == 0: assert_equals(243225.0, entry.md_entry_px.value) assert_equals(142, entry.md_entry_size.value) n += 1 assert_equals(1, n) elif msg_count == 1: assert_equals('MDIncrementalRefreshBook', message.name) n = 0 for entry in message.no_md_entries: if n == 0: assert_equals(243275.0, entry.md_entry_px.value) assert_equals(4, entry.md_entry_size.value) n += 1 assert_equals(1, n) msg_count += 1 assert_equals(2, msg_count)
def test_incremental_refresh_verify_group_attributes(self): schema = SBESchema(include_message_size_header=True, use_description_as_message_name=True) schema.parse(TestSBEParserLibrary.LOCAL_TEMPLATE_FILENAME) msg_factory = MDPMessageFactory(schema) parser = SBEParser(msg_factory) msg_buffer = binascii.a2b_hex( 'c30fa90082dd3f8b069bd91478000b0020000100080095ab3d8b069bd914840000200002009bb1203602000002000000805d00003e2d140001000000010030000000000080e8ca113602000002000000805d00003f2d140001000000020130000000000018000000000000019c53980a9600000024131444010000000200000001010000') offset = 12 msg_count = 0 for message in parser.parse(msg_buffer, offset): msg_count += 1 if msg_count == 1: assert_equals(32, message.template_id.value) assert_equals(1502402403112954773, message.transact_time.value) assert_equals(132, message.match_event_indicator.raw_value) assert_equals('LastQuoteMsg, EndOfEvent', message.match_event_indicator.value) n = 0 for repeating_group in message.no_md_entries: if n == 0: assert_equals(243150.0, repeating_group.md_entry_px.value) assert_equals(2, repeating_group.md_entry_size.value) assert_equals(23936, repeating_group.security_id.value) assert_equals(1322302, repeating_group.rpt_seq.value) assert_equals(1, repeating_group.number_of_orders.value) assert_equals(1, repeating_group.md_price_level.value) assert_equals(0, repeating_group.md_update_action.raw_value) assert_equals('New', repeating_group.md_update_action.value) assert_equals('0', repeating_group.md_entry_type.raw_value) assert_equals('Bid', repeating_group.md_entry_type.value) elif n == 1: assert_equals(243125.0, repeating_group.md_entry_px.value) assert_equals(2, repeating_group.md_entry_size.value) assert_equals(23936, repeating_group.security_id.value) assert_equals(1322303, repeating_group.rpt_seq.value) assert_equals(1, repeating_group.number_of_orders.value) assert_equals(2, repeating_group.md_price_level.value) assert_equals(1, repeating_group.md_update_action.raw_value) assert_equals('Change', repeating_group.md_update_action.value) assert_equals('0', repeating_group.md_entry_type.raw_value) assert_equals('Bid', repeating_group.md_entry_type.value) n += 1 assert_equals(2, n) n = 0 for repeating_group in message.no_order_id_entries: if n == 0: assert_equals(644422849436, repeating_group.order_id.value) assert_equals(5437133604, repeating_group.md_order_priority.value) assert_equals(2, repeating_group.md_display_qty.value) assert_equals(1, repeating_group.reference_id.value) assert_equals('Update', repeating_group.order_update_action.value) assert_equals(1, repeating_group.order_update_action.raw_value) n += 1 assert_equals(1, n)
def __init__(self, schema, out_file_handle=sys.stdout, ignore_messages=False): self.seq_num = 0 # Read in the schema xml as a dictionary and # construct the various schema objects mdp_schema = MDPSchema() mdp_schema.parse(schema) msg_factory = MDPMessageFactory(mdp_schema) self.mdp_parser = SBEParser(msg_factory) self.out_file_handle = out_file_handle self.ignore_messages = ignore_messages
def test_incremental_refresh_trade_summary(self): schema = SBESchema(include_message_size_header=True, use_description_as_message_name=True) schema.parse(TestSBEParserLibrary.LOCAL_TEMPLATE_FILENAME) msg_factory = MDPMessageFactory(schema) parser = SBEParser(msg_factory) msg_buffer = binascii.a2b_hex( '2f0aa9007decc6d2059bd91460000b002a000100080085b89fd2059bd91401000020000100f981d336020000020000000a610000fe2aac00020000000100ffffffff000010000000000000023051980a960000000200000000000000ad50980a960000000200000000000000' ) offset = 12 msg_count = 0 for message in parser.parse(msg_buffer, offset): msg_count += 1 assert_equals(42, message.template_id.value) assert_equals(1502402400015595653, message.transact_time.value) assert_equals(1, message.match_event_indicator.raw_value) assert_equals('LastTradeMsg', message.match_event_indicator.value) # This message has two repeating groups assert_equals(2, len(message.groups)) # We expect only 1 md entry for md_entry in message.no_md_entries: assert_equals(243450.0, md_entry.md_entry_px.value) assert_equals(2, md_entry.md_entry_size.value) assert_equals(24842, md_entry.security_id.value) assert_equals(11283198, md_entry.rpt_seq.value) assert_equals(2, md_entry.number_of_orders.value) assert_equals(1, md_entry.aggressor_side.raw_value) assert_equals('Buy', md_entry.aggressor_side.value) assert_equals(0, md_entry.md_update_action.raw_value) assert_equals('New', md_entry.md_update_action.value) assert_equals('2', md_entry.md_entry_type.value) # We expect two trades in this message num_order_id_entries = 0 for order_id_entry in message.no_order_id_entries: num_order_id_entries += 1 if num_order_id_entries == 1: assert_equals(644422848816, order_id_entry.order_id.value) assert_equals(2, order_id_entry.last_qty.value) else: assert_equals(644422848685, order_id_entry.order_id.value) assert_equals(2, order_id_entry.last_qty.value) assert_equals(1, msg_count)
def process_file(args, pcap_filename): # Read in the schema xml as a dictionary and construct the various schema objects mdp_schema = SBESchema() mdp_schema.parse(args.schema) msg_factory = SBEMessageFactory(mdp_schema) mdp_parser = SBEParser(msg_factory) skip_fields = set(args.skip_fields.split(',')) with gzip.open(pcap_filename, 'rb') if pcap_filename.endswith('.gz') else open( pcap_filename, 'rb') as pcap: pcap_reader = dpkt.pcap.Reader(pcap) packet_number = 0 for ts, packet in pcap_reader: packet_number += 1 ethernet = dpkt.ethernet.Ethernet(packet) if ethernet.type == dpkt.ethernet.ETH_TYPE_IP: ip = ethernet.data if ip.p == dpkt.ip.IP_PROTO_UDP: udp = ip.data try: parse_mdp3_packet(mdp_parser, ts, udp.data, skip_fields) except Exception as e: print('Error parsing packet #{} - {}'.format( packet_number, e))
def test_incremental_refresh_trade_summary(self): schema = SBESchema() schema.parse(TestSBEParserLibrary.LOCAL_TEMPLATE_FILENAME) msg_factory = SBEMessageFactory(schema) parser = SBEParser(msg_factory) msg_buffer = 'rWa\x00\xdc"\xda6Z%\xe6\x13`\x00\x0b\x00*\x00\x01\x00\x05\x00\x05\xd0\xd46Z%\xe6\x13\x01\x00\x00 \x00\x01\x80_\x8f\x9e\x06\x00\x00\x00\x01\x00\x00\x00.\xee\x01\x00\xee\xe4\x1b\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x02\xca\x89>\x0e\xb6\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00' offset = 12 msg_count = 0 for message in parser.parse(msg_buffer, offset): msg_count += 1 assert_equals(42, message.template_id.value) assert_equals(1433874600761282565, message.transact_time.value) assert_equals(1, message.match_event_indicator.raw_value) assert_equals('LastTradeMsg', message.match_event_indicator.value) # This message has two repeating groups assert_equals(2, len(message.iterators)) # We expect only 1 md entry for md_entry in message.no_md_entries: assert_equals(2843.0, md_entry.md_entry_px.value) assert_equals(1, md_entry.md_entry_size.value) assert_equals(126510, md_entry.security_id.value) assert_equals(1828078, md_entry.rpt_seq.value) assert_equals(2, md_entry.number_of_orders.value) assert_equals(2, md_entry.aggressor_side.raw_value) assert_equals('Sell', md_entry.aggressor_side.value) assert_equals(0, md_entry.md_update_action.raw_value) assert_equals('New', md_entry.md_update_action.value) assert_equals('2', md_entry.md_entry_type.value) # We expect two trades in this message num_order_id_entries = 0 for order_id_entry in message.no_order_id_entries: num_order_id_entries += 1 if num_order_id_entries == 1: assert_equals(781923027402, order_id_entry.order_id.value) assert_equals(1, order_id_entry.last_qty.value) else: assert_equals(0, order_id_entry.order_id.value) assert_equals(1, order_id_entry.last_qty.value) assert_equals(1, msg_count)
def test_incremental_refresh_multiple_messages(self): schema = SBESchema() schema.parse(TestSBEParserLibrary.LOCAL_TEMPLATE_FILENAME) msg_factory = SBEMessageFactory(schema) parser = SBEParser(msg_factory) msg_buffer = 'U[\xe0\x02\xd6\t\xc64Z%\xe6\x138\x00\x0b\x00 \x00\x01\x00\x05\x00yR\xc44Z%\xe6\x13\x04\x00\x00 \x00\x01\x80\x97\x1d\x94\xff\xff\xff\xff\r\x00\x00\x00\xcb\xb5\x00\x00\x1f%\x0e\x00\x07\x00\x00\x00\n\x011\x00\x00\x00\x00\x00\x18\x00\x0b\x00 \x00\x01\x00\x05\x00yR\xc44Z%\xe6\x13\x80\x00\x00 \x00\x00' offset = 12 msg_count = 0 for message in parser.parse(msg_buffer, offset): msg_count += 1 if msg_count == 1: assert_equals(32, message.template_id.value) assert_equals(1433874600726647417, message.transact_time.value) assert_equals(4, message.match_event_indicator.raw_value) assert_equals('LastQuoteMsg', message.match_event_indicator.value) repeating_groups = [x for x in message.no_md_entries] assert_equals(1, len(repeating_groups)) repeating_group = repeating_groups[0] assert_equals(-181.0, repeating_group.md_entry_px.value) assert_equals(13, repeating_group.md_entry_size.value) assert_equals(46539, repeating_group.security_id.value) assert_equals(927007, repeating_group.rpt_seq.value) assert_equals(7, repeating_group.number_of_orders.value) assert_equals(10, repeating_group.md_price_level.value) assert_equals(1, repeating_group.md_update_action.raw_value) assert_equals('Change', repeating_group.md_update_action.value) assert_equals('1', repeating_group.md_entry_type.raw_value) assert_equals('Offer', repeating_group.md_entry_type.value) elif msg_count == 2: assert_equals(32, message.template_id.value) assert_equals(1433874600726647417, message.transact_time.value) assert_equals(128, message.match_event_indicator.raw_value) assert_equals('EndOfEvent', message.match_event_indicator.value) # No repeating groups in this message repeating_groups = [x for x in message.no_md_entries] assert_equals(0, len(repeating_groups)) assert_equals(2, msg_count)
def process_file(args, pcap_filename, security_id_filter=None, print_data=False): mdp_schema = MDPSchema() # Read in the schema xml as a dictionary and construct the various schema objects try: from sbedecoder.generated import __messages__ as generated_messages mdp_schema.load(generated_messages) except: mdp_schema.parse(args.schema) msg_factory = MDPMessageFactory(mdp_schema) mdp_parser = SBEParser(msg_factory) secdef = SecDef() secdef.load(args.secdef) book_builder = PacketProcessor(mdp_parser, secdef, security_id_filter=security_id_filter) console_printer = ConsolePrinter() book_builder.orderbook_handler = console_printer with gzip.open(pcap_filename, 'rb') if pcap_filename.endswith('.gz') else open( pcap_filename, 'rb') as pcap: pcap_reader = dpkt.pcap.Reader(pcap) packet_number = 0 for ts, packet in pcap_reader: packet_number += 1 ethernet = dpkt.ethernet.Ethernet(packet) if ethernet.type == dpkt.ethernet.ETH_TYPE_IP: ip = ethernet.data if ip.p == dpkt.ip.IP_PROTO_UDP: udp = ip.data data = udp.data try: if print_data: print('data: {}'.format(binascii.b2a_hex(data))) book_builder.handle_packet(int(ts * 1000000), data) except Exception as e: print('Error decoding e:{} message:{}'.format( e, binascii.b2a_hex(data)))
def main(argv=None): args = process_command_line() # Read in the schema xml as a dictionary and construct the various schema objects mdp_schema = MDPSchema() mdp_schema.parse(args.schema) msg_factory = MDPMessageFactory(mdp_schema) mdp_parser = SBEParser(msg_factory) secdef = None if args.secdef: secdef = mdp.secdef.SecDef() secdef.load(args.secdef) skip_fields = set(args.skip_fields.split(',')) process_file(args.pcapfile, mdp_parser, secdef, args.pretty, args.print_data, skip_fields) return 0 # success
class MDP3Parser: def __init__(self, schema, out_file_handle=sys.stdout, ignore_messages=False): self.seq_num = 0 # Read in the schema xml as a dictionary and # construct the various schema objects mdp_schema = MDPSchema() mdp_schema.parse(schema) msg_factory = MDPMessageFactory(mdp_schema) self.mdp_parser = SBEParser(msg_factory) self.out_file_handle = out_file_handle self.ignore_messages = ignore_messages def handle_repeating_groups(self, group_container, msg_version, indent, skip_fields=[], secdef=None): for group in group_container.groups: if group.since_version > msg_version: continue print(":::{} - num_groups: {}".format(group.name, group.num_groups), file=self.out_file_handle) for index, group_field in enumerate(group.repeating_groups): group_fields = "" for group_field in group_field.fields: if group_field.since_version > msg_version: continue group_fields += "\n" + " " * len(indent) if secdef and group_field.id == '48': security_id = group_field.value symbol_info = secdef.lookup_security_id(security_id) if symbol_info: symbol = symbol_info[0] group_fields += "security_id: {} [{}]".format( security_id, symbol) continue group_fields += group_field.__str__(raw=True) print(":::: {}{}".format(index, group_fields), file=self.out_file_handle) self.handle_repeating_groups(group, msg_version, indent + ':', skip_fields=skip_fields, secdef=secdef) def parse_packet(self, data, skip_fields=[], token_filter=None, enable_trade_only=False, secdef=None): seq_num_str = ":packet => sequence_number: {} sending_time: {} size: {}" # Parse the packet header: # http://www.cmegroup.com/confluence/display/EPICSANDBOX/MDP+3.0+-+Binary+Packet+Header global GLOBAL_PACKET_COUNT GLOBAL_PACKET_COUNT += 1 if len(data) == 1: assert data.decode() == "0" return sequence_number = unpack_from("<i", data, offset=0)[0] sending_time = unpack_from("<Q", data, offset=4)[0] # 32 -> 46 -> MDIncrementalRefreshBook # 43 -> 47 -> MDIncrementalRefreshOrderBook # 37 -> 37 -> MDIncrementalRefreshVolume # 42 -> 48 -> MDIncrementalRefreshTradeSummary template_id_filter = [37, 42, 48] if self.ignore_messages: return header_done = False for mdp_message in self.mdp_parser.parse(data, offset=12): global GLOBAL_MESSAGE_COUNT GLOBAL_MESSAGE_COUNT += 1 template_val = mdp_message.template_id.value if enable_trade_only and template_val not in template_id_filter: continue checker = False if not enable_trade_only: checker = True if enable_trade_only: if token_filter is None: checker = True else: for md_entry in mdp_message.no_md_entries: security_id = md_entry.security_id.value if security_id in token_filter: checker = True message_fields = "" for field in mdp_message.fields: if field.name not in skip_fields: message_fields += "\n " + field.__str__(raw=True) if not checker: continue if not header_done: print("=" * 90, file=self.out_file_handle) print(seq_num_str.format(sequence_number, sending_time, len(data)), file=self.out_file_handle) header_done = True print("-" * 90, file=self.out_file_handle) print("::{} -{}".format(mdp_message, message_fields), file=self.out_file_handle) print(" size: {}\n template_id: {}\n".format( mdp_message.message_size.value, template_val), file=self.out_file_handle) try: # Code for older version _ = mdp_message.iterators for iterator in mdp_message.iterators: print(":::{} - num_groups: {}\n".format( iterator.name, iterator.num_groups), file=self.out_file_handle) group_fields = "" for index, group in enumerate(iterator): for group_field in group.fields: group_fields += "\n " + group_field.__str__( raw=True) print(':::: {}{}'.format(index, group_fields), file=self.out_file_handle) except: self.handle_repeating_groups(mdp_message, mdp_message.version.value, indent="::::", skip_fields=skip_fields, secdef=secdef) continue
def mdp_parser(mdp_schema): msg_factory = MDPMessageFactory(mdp_schema) parser = SBEParser(msg_factory) return parser