def on_instruments(self, instruments): inst_list = list(set(instruments)) if inst_list: self.ctx.db.set_instruments( [wc_utils.object_as_dict(inst) for inst in inst_list]) self.ctx.inst_infos = { inst.instrument_id: wc_utils.object_as_dict(inst) for inst in inst_list }
def on_future_account(self, asset, position_details): positions = [] key_func = lambda e: kwf.position.get_uid(e.instrument_id, e. exchange_id, e.direction) sorted_position_details = sorted(position_details, key=key_func) for uid, detail_group in groupby(sorted_position_details, key=key_func): detail_list = list(detail_group) direction = detail_list[0].direction instrument_id = detail_list[0].instrument_id exchange_id = detail_list[0].exchange_id pos_dict = { "instrument_id": instrument_id, "exchange_id": exchange_id, "direction": direction, "details": [wc_utils.object_as_dict(detail) for detail in detail_list] } positions.append(pos_dict) book_tags = kwf.book.AccountBookTags( ledger_category=LedgerCategory.Account, account_id=asset.account_id, source_id=asset.source_id) account_book = kwf.book.AccountBook(ctx=self.ctx, avail=asset.avail, tags=book_tags, positions=positions) book = self._get_book(book_tags).merge(account_book) self.publish(json.dumps(book.message)) self.ctx.db.dump(book)
def _message_from_order_event(self, event, order): order_dict = wc_utils.object_as_dict(order) order_dict["order_id"] = str(order.order_id) order_dict["parent_id"] = str(order.parent_id) if self.has_location(event.dest): order_dict["client_id"] = self.get_location(event.dest).name else: order_info = self.ctx.db.get_order(order.order_id) if not order_info: raise ValueError( "failed to find order dest location info, dest uid: {}, order {}" .format(event.dest, order)) else: order_dict["client_id"] = order_info["client_id"] return {"msg_type": int(MsgType.Order), "data": order_dict}
def on_stock_account(self, asset, positions): self.ctx.logger.info("asset: {}".format(asset)) for pos in positions: self.ctx.logger.info("pos: {}".format(pos)) book_tags = kwf.book.AccountBookTags( ledger_category=LedgerCategory.Account, account_id=asset.account_id, source_id=asset.source_id) account = kwf.book.AccountBook( ctx=self.ctx, tags=book_tags, avail=asset.avail, positions=[wc_utils.object_as_dict(pos) for pos in positions]) book = self._get_book(book_tags).merge(account) self.publish(json.dumps(book.message)) self.ctx.db.dump(book)
def reader(ctx, session_id, io_type, from_beginning, max_messages, msg, continuous, output): pass_ctx_from_parent(ctx) session = kfj.find_session(ctx, session_id) uname = '{}/{}/{}/{}'.format(session['category'], session['group'], session['name'], session['mode']) uid = pyyjj.hash_str_32(uname) ctx.category = '*' ctx.group = '*' ctx.name = '*' ctx.mode = '*' locations = kfj.collect_journal_locations(ctx) location = locations[uid] home = kfj.make_location_from_dict(ctx, location) io_device = pyyjj.io_device(home) reader = io_device.open_reader_to_subscribe() if io_type == 'out' or io_type == 'all': for dest in location['readers']: dest_id = int(dest, 16) reader.join(home, dest_id, session['begin_time']) if (io_type == 'in' or io_type == 'all' ) and not (home.category == pyyjj.category.SYSTEM and home.group == 'master' and home.name == 'master'): master_home_uid = pyyjj.hash_str_32('system/master/master/live') master_home_location = kfj.make_location_from_dict( ctx, locations[master_home_uid]) reader.join(master_home_location, 0, session['begin_time']) master_cmd_uid = pyyjj.hash_str_32('system/master/{:08x}/live'.format( location['uid'])) master_cmd_location = kfj.make_location_from_dict( ctx, locations[master_cmd_uid]) reader.join(master_cmd_location, location['uid'], session['begin_time']) start_time = pyyjj.now_in_nano( ) if not from_beginning else session["begin_time"] msg_count = 0 if output: if msg == "all": raise ValueError( "invalid msg {}, please choose from ('quote', 'order', 'trade')" .format(msg)) msg_type = wc_utils.get_msg_type(msg) fieldnames = wc_utils.get_csv_header(msg_type) csv_writer = csv.DictWriter( open(output, "w"), fieldnames=wc_utils.get_csv_header(msg_type)) csv_writer.writeheader() pp = pprint.PrettyPrinter(indent=4) while True: if reader.data_available() and msg_count < max_messages: frame = reader.current_frame() if frame.dest == home.uid and ( frame.msg_type == yjj_msg.RequestReadFrom or frame.msg_type == yjj_msg.RequestReadFromPublic): request = pyyjj.get_RequestReadFrom(frame) source_location = kfj.make_location_from_dict( ctx, locations[request.source_id]) reader.join( source_location, location['uid'] if frame.msg_type == yjj_msg.RequestReadFrom else 0, request.from_time) if frame.dest == home.uid and frame.msg_type == yjj_msg.Deregister: loc = json.loads(frame.data_as_string()) reader.disjoin(loc['uid']) if frame.msg_type == yjj_msg.SessionEnd: ctx.logger.info("session reach end at %s", kft.strftime(frame.gen_time)) break elif frame.gen_time >= start_time and ( msg == "all" or wc_utils.get_msg_type(msg) == frame.msg_type): dict_row = wc_utils.flatten_json( wc_utils.object_as_dict(frame.data)) if output: csv_writer.writerow(dict_row) else: pp.pprint(dict_row) msg_count += 1 reader.next() elif msg_count >= max_messages: ctx.logger.info("reach max messages {}".format(max_messages)) break elif not reader.data_available(): if not continuous: ctx.logger.info("no data is available") break else: time.sleep(0.1)