def setup(ctx, session_id, cmd, instance): ctx.journal_util_location = pyyjj.location(pyyjj.mode.LIVE, pyyjj.category.SYSTEM, 'util', 'journal', ctx.locator) if not session_id: all_sessions = kfj.find_sessions(ctx) all_sessions['begin_time'] = all_sessions['begin_time'].apply( lambda t: kft.strftime(t, kft.SESSION_DATETIME_FORMAT)) all_sessions['end_time'] = all_sessions['end_time'].apply( lambda t: kft.strftime(t, kft.SESSION_DATETIME_FORMAT)) all_sessions['duration'] = all_sessions['duration'].apply( lambda t: kft.strftime(t - kft.DURATION_TZ_ADJUST, kft. DURATION_FORMAT)) click.echo(cmd.get_help(ctx)) click.echo('please select sessions:') click.echo( tabulate(all_sessions.values, headers=all_sessions.columns, tablefmt='simple')) sys.exit(-1) else: session = kfj.find_session(ctx, session_id) instance.set_begin_time(session['begin_time']) instance.set_end_time(session['end_time']) # yarn dev -l trace backtest -g tushare -n 20190808 run -p ../examples/strategy/py/strategy_demo.py -i 1
def setup(ctx, session_id, cmd, instance): #ctx.mode = 'live' # to get live data ctx.journal_util_location = pyyjj.location(pyyjj.mode.LIVE, pyyjj.category.SYSTEM, 'util', 'journal', ctx.locator) if not session_id: all_sessions = kfj.find_sessions(ctx) all_sessions['begin_time'] = all_sessions['begin_time'].apply( lambda t: kft.strftime(t, kft.SESSION_DATETIME_FORMAT)) all_sessions['end_time'] = all_sessions['end_time'].apply( lambda t: kft.strftime(t, kft.SESSION_DATETIME_FORMAT)) all_sessions['duration'] = all_sessions['duration'].apply( lambda t: kft.strftime(t - kft.DURATION_TZ_ADJUST, kft. DURATION_FORMAT)) click.echo(cmd.get_help(ctx)) click.echo('please select sessions:') click.echo( tabulate(all_sessions.values, headers=all_sessions.columns, tablefmt='simple')) sys.exit(-1) else: session = kfj.find_session(ctx, session_id) instance.set_begin_time(session['begin_time']) instance.set_end_time(session['end_time']) instance.set_bt_source(ctx.group)
def trace(ctx, session_id, io_type, tablefmt, pager): pass_ctx_from_parent(ctx) trace_df = kfj.trace_journal(ctx, session_id, io_type) trace_df['gen_time'] = trace_df['gen_time'].apply( lambda t: kft.strftime(t)) trace_df['trigger_time'] = trace_df['trigger_time'].apply( lambda t: kft.strftime(t)) table = tabulate(trace_df.values, headers=trace_df.columns, tablefmt=tablefmt) if pager: click.echo_via_pager(table) else: click.echo(table)
def on_quote(context, quote): context.logger.info("position: {}".format( context.ledger.get_position(quote.instrument_id, exchange))) order_id = context.insert_order(quote.instrument_id, exchange, "15040900", quote.ask_price[0], 200, PriceType.Limit, Side.Buy, Offset.Open) context.log.info( "quote received: [time]{} [instrument_id]{} [last_price]{}".format( kft.strftime(quote.data_time), quote.instrument_id, quote.last_price))
def sessions(ctx, sortby, ascending, tablefmt, pager): pass_ctx_from_parent(ctx) all_sessions = kfj.find_sessions(ctx).sort_values(by=sortby, ascending=ascending) all_sessions['begin_time'] = all_sessions['begin_time'].apply( lambda t: kft.strftime(t, kft.SESSION_DATETIME_FORMAT)) all_sessions['end_time'] = all_sessions['end_time'].apply( lambda t: kft.strftime(t, kft.SESSION_DATETIME_FORMAT)) all_sessions['duration'] = all_sessions['duration'].apply( lambda t: kft.strftime(t - kft.DURATION_TZ_ADJUST, kft.DURATION_FORMAT )) table = tabulate(all_sessions.values, headers=all_sessions.columns, tablefmt=tablefmt) if pager: click.echo_via_pager(table) else: click.echo(table)
def on_quote(context, quote): #context.log.info("{} {}".format(quote.data_time, quote.last_price)) order_id = context.insert_limit_order("601988", exchange, "15040900", 75, 200, Side.Buy, Offset.Open) context.log.info( "quote received: [time]{} [instrument_id]{} [last_price]{}".format( kft.strftime(quote.data_time), quote.instrument_id, quote.last_price)) # context.insert_market_order("000001.SZ", "", "15040900", 20, Side.Buy, Offset.Open) pass
def setup(ctx, session_id, cmd, instance): ctx.mode = 'live' # to get live data if not session_id: all_sessions = kfj.find_sessions(ctx) all_sessions['begin_time'] = all_sessions['begin_time'].apply( lambda t: kft.strftime(t, kft.SESSION_DATETIME_FORMAT)) all_sessions['end_time'] = all_sessions['end_time'].apply( lambda t: kft.strftime(t, kft.SESSION_DATETIME_FORMAT)) all_sessions['duration'] = all_sessions['duration'].apply( lambda t: kft.strftime(t - kft.DURATION_TZ_ADJUST, kft. DURATION_FORMAT)) click.echo(cmd.get_help(ctx)) click.echo('please select sessions:') click.echo( tabulate(all_sessions.values, headers=all_sessions.columns, tablefmt='simple')) sys.exit(-1) else: session = kfj.find_session(ctx, session_id) instance.set_begin_time(session['begin_time']) instance.set_end_time(session['end_time'])
def on_quote(context, quote): context.logger.info("exexute[on_quote]position: {}".format( context.book.get_position(quote.instrument_id, exchange))) if (quote.instrument_id == "600000" or quote.instrument_id == "601988") and context.count > 0: order_id = context.insert_order(quote.instrument_id, exchange, account, quote.ask_price[0], 600, PriceType.Limit, Side.Buy, Offset.Open) context.count = context.count - 1 context.log.info( "quote received: [time]{} [instrument_id]{} [last_price]{}".format( kft.strftime(quote.data_time), quote.instrument_id, quote.last_price))
def read_data_from_journal(self): frame_count = 0 reader = self.reader while reader.data_available(): frame = reader.current_frame() frame_count = frame_count + 1 print('*' * 10 + ' frame-' + str(frame_count) + ' ' + '*' * 10) print(frame.msg_type) if frame.msg_type == MsgType.Quote: quote = pywingchun.utils.get_quote(frame) print(quote) print("gen_time: {}".format(kft.strftime(frame.gen_time))) print("gen_time: {}".format(frame.gen_time)) reader.next() self.logger.info("read journal success!")
def on_app_location(self, trigger_time, location): self.ctx.logger.info("{} {} [{:08x}]".format(kft.strftime(trigger_time), location.uname, location.uid)) if location.category == pyyjj.category.TD: tags = kwb.book.AccountBookTags.make_from_location(location) self.ctx.logger.info("mark orders status unknown for {}[{:08x}] with tags: {}".format(location.uname, location.uid, tags)) orders = self.ctx.db.mark_orders_status_unknown(tags.source_id, tags.account_id) for order in orders: self.publish(json.dumps({"msg_type": msg.Order, "data": order}, cls = wc_utils.WCEncoder)) book = self._get_book(location) book.subject.subscribe(self.on_book_event) self.book_context.add_book(location, book) elif location.category == pyyjj.category.STRATEGY: book = self._get_book(location) book.subject.subscribe(self.on_book_event) self.book_context.add_book(location, book) self.ctx.db.add_location(location)
def on_trade(context, trade): context.log.info( 'trade received: {} [trade_id]{} [volume]{} [price]{}'.format( kft.strftime(trade.trade_time), trade.order_id, trade.volume, trade.price))
def reader(ctx, session_id, io_type, from_beginning, max_messages, msg, continuous, output): pass_ctx_from_parent(ctx) session = kfj.find_session(ctx, session_id) uname = '{}/{}/{}/{}'.format(session['category'], session['group'], session['name'], session['mode']) uid = pyyjj.hash_str_32(uname) ctx.category = '*' ctx.group = '*' ctx.name = '*' ctx.mode = '*' locations = kfj.collect_journal_locations(ctx) location = locations[uid] home = kfj.make_location_from_dict(ctx, location) io_device = pyyjj.io_device(home) reader = io_device.open_reader_to_subscribe() if io_type == 'out' or io_type == 'all': for dest in location['readers']: dest_id = int(dest, 16) reader.join(home, dest_id, session['begin_time']) if (io_type == 'in' or io_type == 'all' ) and not (home.category == pyyjj.category.SYSTEM and home.group == 'master' and home.name == 'master'): master_home_uid = pyyjj.hash_str_32('system/master/master/live') master_home_location = kfj.make_location_from_dict( ctx, locations[master_home_uid]) reader.join(master_home_location, 0, session['begin_time']) master_cmd_uid = pyyjj.hash_str_32('system/master/{:08x}/live'.format( location['uid'])) master_cmd_location = kfj.make_location_from_dict( ctx, locations[master_cmd_uid]) reader.join(master_cmd_location, location['uid'], session['begin_time']) start_time = pyyjj.now_in_nano( ) if not from_beginning else session["begin_time"] msg_count = 0 if output: if msg == "all": raise ValueError( "invalid msg {}, please choose from ('quote', 'order', 'trade')" .format(msg)) msg_type = wc_utils.get_msg_type(msg) fieldnames = wc_utils.get_csv_header(msg_type) csv_writer = csv.DictWriter( open(output, "w"), fieldnames=wc_utils.get_csv_header(msg_type)) csv_writer.writeheader() pp = pprint.PrettyPrinter(indent=4) while True: if reader.data_available() and msg_count < max_messages: frame = reader.current_frame() if frame.dest == home.uid and ( frame.msg_type == yjj_msg.RequestReadFrom or frame.msg_type == yjj_msg.RequestReadFromPublic): request = pyyjj.get_RequestReadFrom(frame) source_location = kfj.make_location_from_dict( ctx, locations[request.source_id]) reader.join( source_location, location['uid'] if frame.msg_type == yjj_msg.RequestReadFrom else 0, request.from_time) if frame.dest == home.uid and frame.msg_type == yjj_msg.Deregister: loc = json.loads(frame.data_as_string()) reader.disjoin(loc['uid']) if frame.msg_type == yjj_msg.SessionEnd: ctx.logger.info("session reach end at %s", kft.strftime(frame.gen_time)) break elif frame.gen_time >= start_time and ( msg == "all" or wc_utils.get_msg_type(msg) == frame.msg_type): dict_row = wc_utils.flatten_json( wc_utils.object_as_dict(frame.data)) if output: csv_writer.writerow(dict_row) else: pp.pprint(dict_row) msg_count += 1 reader.next() elif msg_count >= max_messages: ctx.logger.info("reach max messages {}".format(max_messages)) break elif not reader.data_available(): if not continuous: ctx.logger.info("no data is available") break else: time.sleep(0.1)
def reader(ctx, session_id, io_type, from_beginning, max_messages, msg, continuous, output, script): pass_ctx_from_parent(ctx) session = kfj.find_session(ctx, session_id) uname = '{}/{}/{}/{}'.format(session['category'], session['group'], session['name'], session['mode']) uid = pyyjj.hash_str_32(uname) ctx.category = '*' ctx.group = '*' ctx.name = '*' ctx.mode = '*' locations = kfj.collect_journal_locations(ctx) location = locations[uid] home = kfj.make_location_from_dict(ctx, location) io_device = pyyjj.io_device(home) reader = io_device.open_reader_to_subscribe() if io_type == 'out' or io_type == 'all': for dest in location['readers']: dest_id = int(dest, 16) reader.join(home, dest_id, session['begin_time']) if (io_type == 'in' or io_type == 'all' ) and not (home.category == pyyjj.category.SYSTEM and home.group == 'master' and home.name == 'master'): master_home_uid = pyyjj.hash_str_32('system/master/master/live') master_home_location = kfj.make_location_from_dict( ctx, locations[master_home_uid]) reader.join(master_home_location, 0, session['begin_time']) master_cmd_uid = pyyjj.hash_str_32('system/master/{:08x}/live'.format( location['uid'])) master_cmd_location = kfj.make_location_from_dict( ctx, locations[master_cmd_uid]) reader.join(master_cmd_location, location['uid'], session['begin_time']) start_time = pyyjj.now_in_nano( ) if not from_beginning else session["begin_time"] msg_count = 0 msg_type_to_read = None if msg == "all" else kungfu.msg.Registry.meta_from_name( msg)["id"] if output: if msg not in kungfu.msg.Registry.type_names(): raise ValueError("invalid msg {}, please choose from {}".format( kungfu.msg.Registry.type_names())) csv_writer = None def handle(frame): data_as_dict = frame["data"] dict_row = kungfu.msg.utils.flatten_json(data_as_dict) nonlocal csv_writer if not csv_writer: csv_writer = csv.DictWriter(open(output, "w"), fieldnames=dict_row.keys()) csv_writer.writeheader() csv_writer.writerow(dict_row) frame_handler = handle elif script: dir = os.path.dirname(script) name_no_ext = os.path.split(os.path.basename(script)) sys.path.append(os.path.relpath(dir)) impl = importlib.import_module(os.path.splitext(name_no_ext[1])[0]) frame_handler = getattr(impl, 'on_frame', lambda frame: None) else: pp = pprint.PrettyPrinter(indent=4) frame_handler = pp.pprint while True: if reader.data_available() and msg_count < max_messages: frame = reader.current_frame() if frame.dest == home.uid and ( frame.msg_type == yjj_msg.RequestReadFrom or frame.msg_type == yjj_msg.RequestReadFromPublic): request = pyyjj.get_RequestReadFrom(frame) source_location = kfj.make_location_from_dict( ctx, locations[request.source_id]) reader.join( source_location, location['uid'] if frame.msg_type == yjj_msg.RequestReadFrom else 0, request.from_time) if frame.dest == home.uid and frame.msg_type == yjj_msg.Deregister: loc = json.loads(frame.data_as_string()) reader.disjoin(loc['uid']) if frame.msg_type == yjj_msg.SessionEnd: ctx.logger.info("session reach end at %s", kft.strftime(frame.gen_time)) break elif frame.gen_time >= start_time and ( msg == "all" or msg_type_to_read == frame.msg_type): try: frame_handler(frame.as_dict()) except Exception as e: exc_type, exc_obj, exc_tb = sys.exc_info() ctx.logger.error( 'error [%s] %s', exc_type, traceback.format_exception(exc_type, exc_obj, exc_tb)) msg_count += 1 reader.next() elif msg_count >= max_messages: ctx.logger.info("reach max messages {}".format(max_messages)) break elif not reader.data_available(): if not continuous: ctx.logger.info("no data is available") break else: time.sleep(0.1)